eb09257 Add gunicorn to pyextra
9558197 add flask to pyextra
git-subtree-dir: pyextra
git-subtree-split: eb092578c359bd54db22569d696ecacbd90237c3
old-commit-hash: ea6c19638c
commatwo_master
parent
c7fbd8ab8e
commit
7c4a4bf2f0
168 changed files with 51067 additions and 0 deletions
@ -0,0 +1,103 @@ |
||||
Metadata-Version: 1.1 |
||||
Name: Flask |
||||
Version: 1.0.2 |
||||
Summary: A simple framework for building complex web applications. |
||||
Home-page: https://www.palletsprojects.com/p/flask/ |
||||
Author: Pallets team |
||||
Author-email: contact@palletsprojects.com |
||||
License: BSD |
||||
Description: Flask |
||||
===== |
||||
|
||||
Flask is a lightweight `WSGI`_ web application framework. It is designed |
||||
to make getting started quick and easy, with the ability to scale up to |
||||
complex applications. It began as a simple wrapper around `Werkzeug`_ |
||||
and `Jinja`_ and has become one of the most popular Python web |
||||
application frameworks. |
||||
|
||||
Flask offers suggestions, but doesn't enforce any dependencies or |
||||
project layout. It is up to the developer to choose the tools and |
||||
libraries they want to use. There are many extensions provided by the |
||||
community that make adding new functionality easy. |
||||
|
||||
|
||||
Installing |
||||
---------- |
||||
|
||||
Install and update using `pip`_: |
||||
|
||||
.. code-block:: text |
||||
|
||||
pip install -U Flask |
||||
|
||||
|
||||
A Simple Example |
||||
---------------- |
||||
|
||||
.. code-block:: python |
||||
|
||||
from flask import Flask |
||||
|
||||
app = Flask(__name__) |
||||
|
||||
@app.route('/') |
||||
def hello(): |
||||
return 'Hello, World!' |
||||
|
||||
.. code-block:: text |
||||
|
||||
$ FLASK_APP=hello.py flask run |
||||
* Serving Flask app "hello" |
||||
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) |
||||
|
||||
|
||||
Donate |
||||
------ |
||||
|
||||
The Pallets organization develops and supports Flask and the libraries |
||||
it uses. In order to grow the community of contributors and users, and |
||||
allow the maintainers to devote more time to the projects, `please |
||||
donate today`_. |
||||
|
||||
.. _please donate today: https://psfmember.org/civicrm/contribute/transact?reset=1&id=20 |
||||
|
||||
|
||||
Links |
||||
----- |
||||
|
||||
* Website: https://www.palletsprojects.com/p/flask/ |
||||
* Documentation: http://flask.pocoo.org/docs/ |
||||
* License: `BSD <https://github.com/pallets/flask/blob/master/LICENSE>`_ |
||||
* Releases: https://pypi.org/project/Flask/ |
||||
* Code: https://github.com/pallets/flask |
||||
* Issue tracker: https://github.com/pallets/flask/issues |
||||
* Test status: |
||||
|
||||
* Linux, Mac: https://travis-ci.org/pallets/flask |
||||
* Windows: https://ci.appveyor.com/project/pallets/flask |
||||
|
||||
* Test coverage: https://codecov.io/gh/pallets/flask |
||||
|
||||
.. _WSGI: https://wsgi.readthedocs.io |
||||
.. _Werkzeug: https://www.palletsprojects.com/p/werkzeug/ |
||||
.. _Jinja: https://www.palletsprojects.com/p/jinja/ |
||||
.. _pip: https://pip.pypa.io/en/stable/quickstart/ |
||||
|
||||
Platform: any |
||||
Classifier: Development Status :: 5 - Production/Stable |
||||
Classifier: Environment :: Web Environment |
||||
Classifier: Framework :: Flask |
||||
Classifier: Intended Audience :: Developers |
||||
Classifier: License :: OSI Approved :: BSD License |
||||
Classifier: Operating System :: OS Independent |
||||
Classifier: Programming Language :: Python |
||||
Classifier: Programming Language :: Python :: 2 |
||||
Classifier: Programming Language :: Python :: 2.7 |
||||
Classifier: Programming Language :: Python :: 3 |
||||
Classifier: Programming Language :: Python :: 3.4 |
||||
Classifier: Programming Language :: Python :: 3.5 |
||||
Classifier: Programming Language :: Python :: 3.6 |
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content |
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application |
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks |
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules |
@ -0,0 +1,223 @@ |
||||
AUTHORS |
||||
CHANGES.rst |
||||
LICENSE |
||||
MANIFEST.in |
||||
Makefile |
||||
README.rst |
||||
setup.cfg |
||||
setup.py |
||||
tox.ini |
||||
Flask.egg-info/PKG-INFO |
||||
Flask.egg-info/SOURCES.txt |
||||
Flask.egg-info/dependency_links.txt |
||||
Flask.egg-info/entry_points.txt |
||||
Flask.egg-info/not-zip-safe |
||||
Flask.egg-info/requires.txt |
||||
Flask.egg-info/top_level.txt |
||||
artwork/LICENSE |
||||
artwork/logo-full.svg |
||||
artwork/logo-lineart.svg |
||||
docs/Makefile |
||||
docs/advanced_foreword.rst |
||||
docs/api.rst |
||||
docs/appcontext.rst |
||||
docs/becomingbig.rst |
||||
docs/blueprints.rst |
||||
docs/changelog.rst |
||||
docs/cli.rst |
||||
docs/conf.py |
||||
docs/config.rst |
||||
docs/contents.rst.inc |
||||
docs/contributing.rst |
||||
docs/design.rst |
||||
docs/errorhandling.rst |
||||
docs/extensiondev.rst |
||||
docs/extensions.rst |
||||
docs/flaskstyle.sty |
||||
docs/foreword.rst |
||||
docs/htmlfaq.rst |
||||
docs/index.rst |
||||
docs/installation.rst |
||||
docs/latexindex.rst |
||||
docs/license.rst |
||||
docs/logging.rst |
||||
docs/logo.pdf |
||||
docs/make.bat |
||||
docs/quickstart.rst |
||||
docs/reqcontext.rst |
||||
docs/security.rst |
||||
docs/server.rst |
||||
docs/shell.rst |
||||
docs/signals.rst |
||||
docs/styleguide.rst |
||||
docs/templating.rst |
||||
docs/testing.rst |
||||
docs/unicode.rst |
||||
docs/upgrading.rst |
||||
docs/views.rst |
||||
docs/_static/debugger.png |
||||
docs/_static/flask-favicon.ico |
||||
docs/_static/flask.png |
||||
docs/_static/logo-full.png |
||||
docs/_static/no.png |
||||
docs/_static/pycharm-runconfig.png |
||||
docs/_static/touch-icon.png |
||||
docs/_static/yes.png |
||||
docs/deploying/cgi.rst |
||||
docs/deploying/fastcgi.rst |
||||
docs/deploying/index.rst |
||||
docs/deploying/mod_wsgi.rst |
||||
docs/deploying/uwsgi.rst |
||||
docs/deploying/wsgi-standalone.rst |
||||
docs/patterns/apierrors.rst |
||||
docs/patterns/appdispatch.rst |
||||
docs/patterns/appfactories.rst |
||||
docs/patterns/caching.rst |
||||
docs/patterns/celery.rst |
||||
docs/patterns/deferredcallbacks.rst |
||||
docs/patterns/distribute.rst |
||||
docs/patterns/errorpages.rst |
||||
docs/patterns/fabric.rst |
||||
docs/patterns/favicon.rst |
||||
docs/patterns/fileuploads.rst |
||||
docs/patterns/flashing.rst |
||||
docs/patterns/index.rst |
||||
docs/patterns/jquery.rst |
||||
docs/patterns/lazyloading.rst |
||||
docs/patterns/methodoverrides.rst |
||||
docs/patterns/mongokit.rst |
||||
docs/patterns/packages.rst |
||||
docs/patterns/requestchecksum.rst |
||||
docs/patterns/sqlalchemy.rst |
||||
docs/patterns/sqlite3.rst |
||||
docs/patterns/streaming.rst |
||||
docs/patterns/subclassing.rst |
||||
docs/patterns/templateinheritance.rst |
||||
docs/patterns/urlprocessors.rst |
||||
docs/patterns/viewdecorators.rst |
||||
docs/patterns/wtforms.rst |
||||
docs/tutorial/blog.rst |
||||
docs/tutorial/database.rst |
||||
docs/tutorial/deploy.rst |
||||
docs/tutorial/factory.rst |
||||
docs/tutorial/flaskr_edit.png |
||||
docs/tutorial/flaskr_index.png |
||||
docs/tutorial/flaskr_login.png |
||||
docs/tutorial/index.rst |
||||
docs/tutorial/install.rst |
||||
docs/tutorial/layout.rst |
||||
docs/tutorial/next.rst |
||||
docs/tutorial/static.rst |
||||
docs/tutorial/templates.rst |
||||
docs/tutorial/tests.rst |
||||
docs/tutorial/views.rst |
||||
examples/javascript/.gitignore |
||||
examples/javascript/LICENSE |
||||
examples/javascript/MANIFEST.in |
||||
examples/javascript/README.rst |
||||
examples/javascript/setup.cfg |
||||
examples/javascript/setup.py |
||||
examples/javascript/js_example/__init__.py |
||||
examples/javascript/js_example/views.py |
||||
examples/javascript/js_example/templates/base.html |
||||
examples/javascript/js_example/templates/fetch.html |
||||
examples/javascript/js_example/templates/jquery.html |
||||
examples/javascript/js_example/templates/plain.html |
||||
examples/javascript/tests/conftest.py |
||||
examples/javascript/tests/test_js_example.py |
||||
examples/tutorial/.gitignore |
||||
examples/tutorial/LICENSE |
||||
examples/tutorial/MANIFEST.in |
||||
examples/tutorial/README.rst |
||||
examples/tutorial/setup.cfg |
||||
examples/tutorial/setup.py |
||||
examples/tutorial/flaskr/__init__.py |
||||
examples/tutorial/flaskr/auth.py |
||||
examples/tutorial/flaskr/blog.py |
||||
examples/tutorial/flaskr/db.py |
||||
examples/tutorial/flaskr/schema.sql |
||||
examples/tutorial/flaskr/static/style.css |
||||
examples/tutorial/flaskr/templates/base.html |
||||
examples/tutorial/flaskr/templates/auth/login.html |
||||
examples/tutorial/flaskr/templates/auth/register.html |
||||
examples/tutorial/flaskr/templates/blog/create.html |
||||
examples/tutorial/flaskr/templates/blog/index.html |
||||
examples/tutorial/flaskr/templates/blog/update.html |
||||
examples/tutorial/tests/conftest.py |
||||
examples/tutorial/tests/data.sql |
||||
examples/tutorial/tests/test_auth.py |
||||
examples/tutorial/tests/test_blog.py |
||||
examples/tutorial/tests/test_db.py |
||||
examples/tutorial/tests/test_factory.py |
||||
flask/__init__.py |
||||
flask/__main__.py |
||||
flask/_compat.py |
||||
flask/app.py |
||||
flask/blueprints.py |
||||
flask/cli.py |
||||
flask/config.py |
||||
flask/ctx.py |
||||
flask/debughelpers.py |
||||
flask/globals.py |
||||
flask/helpers.py |
||||
flask/logging.py |
||||
flask/sessions.py |
||||
flask/signals.py |
||||
flask/templating.py |
||||
flask/testing.py |
||||
flask/views.py |
||||
flask/wrappers.py |
||||
flask/json/__init__.py |
||||
flask/json/tag.py |
||||
tests/conftest.py |
||||
tests/test_appctx.py |
||||
tests/test_basic.py |
||||
tests/test_blueprints.py |
||||
tests/test_cli.py |
||||
tests/test_config.py |
||||
tests/test_helpers.py |
||||
tests/test_instance_config.py |
||||
tests/test_json_tag.py |
||||
tests/test_logging.py |
||||
tests/test_regression.py |
||||
tests/test_reqctx.py |
||||
tests/test_signals.py |
||||
tests/test_subclassing.py |
||||
tests/test_templating.py |
||||
tests/test_testing.py |
||||
tests/test_user_error_handler.py |
||||
tests/test_views.py |
||||
tests/static/config.json |
||||
tests/static/index.html |
||||
tests/templates/_macro.html |
||||
tests/templates/context_template.html |
||||
tests/templates/escaping_template.html |
||||
tests/templates/mail.txt |
||||
tests/templates/non_escaping_template.txt |
||||
tests/templates/simple_template.html |
||||
tests/templates/template_filter.html |
||||
tests/templates/template_test.html |
||||
tests/templates/nested/nested.txt |
||||
tests/test_apps/.env |
||||
tests/test_apps/.flaskenv |
||||
tests/test_apps/blueprintapp/__init__.py |
||||
tests/test_apps/blueprintapp/apps/__init__.py |
||||
tests/test_apps/blueprintapp/apps/admin/__init__.py |
||||
tests/test_apps/blueprintapp/apps/admin/static/test.txt |
||||
tests/test_apps/blueprintapp/apps/admin/static/css/test.css |
||||
tests/test_apps/blueprintapp/apps/admin/templates/admin/index.html |
||||
tests/test_apps/blueprintapp/apps/frontend/__init__.py |
||||
tests/test_apps/blueprintapp/apps/frontend/templates/frontend/index.html |
||||
tests/test_apps/cliapp/__init__.py |
||||
tests/test_apps/cliapp/app.py |
||||
tests/test_apps/cliapp/factory.py |
||||
tests/test_apps/cliapp/importerrorapp.py |
||||
tests/test_apps/cliapp/message.txt |
||||
tests/test_apps/cliapp/multiapp.py |
||||
tests/test_apps/cliapp/inner1/__init__.py |
||||
tests/test_apps/cliapp/inner1/inner2/__init__.py |
||||
tests/test_apps/cliapp/inner1/inner2/flask.py |
||||
tests/test_apps/helloworld/hello.py |
||||
tests/test_apps/helloworld/wsgi.py |
||||
tests/test_apps/subdomaintestmodule/__init__.py |
||||
tests/test_apps/subdomaintestmodule/static/hello.txt |
@ -0,0 +1 @@ |
||||
|
@ -0,0 +1,3 @@ |
||||
[console_scripts] |
||||
flask = flask.cli:main |
||||
|
@ -0,0 +1,48 @@ |
||||
../flask/testing.py |
||||
../flask/templating.py |
||||
../flask/__main__.py |
||||
../flask/sessions.py |
||||
../flask/signals.py |
||||
../flask/helpers.py |
||||
../flask/debughelpers.py |
||||
../flask/wrappers.py |
||||
../flask/app.py |
||||
../flask/ctx.py |
||||
../flask/config.py |
||||
../flask/logging.py |
||||
../flask/blueprints.py |
||||
../flask/views.py |
||||
../flask/cli.py |
||||
../flask/_compat.py |
||||
../flask/globals.py |
||||
../flask/__init__.py |
||||
../flask/json/tag.py |
||||
../flask/json/__init__.py |
||||
../flask/testing.pyc |
||||
../flask/templating.pyc |
||||
../flask/__main__.pyc |
||||
../flask/sessions.pyc |
||||
../flask/signals.pyc |
||||
../flask/helpers.pyc |
||||
../flask/debughelpers.pyc |
||||
../flask/wrappers.pyc |
||||
../flask/app.pyc |
||||
../flask/ctx.pyc |
||||
../flask/config.pyc |
||||
../flask/logging.pyc |
||||
../flask/blueprints.pyc |
||||
../flask/views.pyc |
||||
../flask/cli.pyc |
||||
../flask/_compat.pyc |
||||
../flask/globals.pyc |
||||
../flask/__init__.pyc |
||||
../flask/json/tag.pyc |
||||
../flask/json/__init__.pyc |
||||
not-zip-safe |
||||
entry_points.txt |
||||
dependency_links.txt |
||||
PKG-INFO |
||||
top_level.txt |
||||
requires.txt |
||||
SOURCES.txt |
||||
../../../../bin/flask |
@ -0,0 +1 @@ |
||||
|
@ -0,0 +1,20 @@ |
||||
Werkzeug>=0.14 |
||||
Jinja2>=2.10 |
||||
itsdangerous>=0.24 |
||||
click>=5.1 |
||||
|
||||
[dev] |
||||
pytest>=3 |
||||
coverage |
||||
tox |
||||
sphinx |
||||
pallets-sphinx-themes |
||||
sphinxcontrib-log-cabinet |
||||
|
||||
[docs] |
||||
sphinx |
||||
pallets-sphinx-themes |
||||
sphinxcontrib-log-cabinet |
||||
|
||||
[dotenv] |
||||
python-dotenv |
@ -0,0 +1 @@ |
||||
flask |
@ -0,0 +1,62 @@ |
||||
Metadata-Version: 1.1 |
||||
Name: Jinja2 |
||||
Version: 2.10 |
||||
Summary: A small but fast and easy to use stand-alone template engine written in pure python. |
||||
Home-page: http://jinja.pocoo.org/ |
||||
Author: Armin Ronacher |
||||
Author-email: armin.ronacher@active-4.com |
||||
License: BSD |
||||
Description: |
||||
Jinja2 |
||||
~~~~~~ |
||||
|
||||
Jinja2 is a template engine written in pure Python. It provides a |
||||
`Django`_ inspired non-XML syntax but supports inline expressions and |
||||
an optional `sandboxed`_ environment. |
||||
|
||||
Nutshell |
||||
-------- |
||||
|
||||
Here a small example of a Jinja template:: |
||||
|
||||
{% extends 'base.html' %} |
||||
{% block title %}Memberlist{% endblock %} |
||||
{% block content %} |
||||
<ul> |
||||
{% for user in users %} |
||||
<li><a href="{{ user.url }}">{{ user.username }}</a></li> |
||||
{% endfor %} |
||||
</ul> |
||||
{% endblock %} |
||||
|
||||
Philosophy |
||||
---------- |
||||
|
||||
Application logic is for the controller but don't try to make the life |
||||
for the template designer too hard by giving him too few functionality. |
||||
|
||||
For more informations visit the new `Jinja2 webpage`_ and `documentation`_. |
||||
|
||||
.. _sandboxed: https://en.wikipedia.org/wiki/Sandbox_(computer_security) |
||||
.. _Django: https://www.djangoproject.com/ |
||||
.. _Jinja2 webpage: http://jinja.pocoo.org/ |
||||
.. _documentation: http://jinja.pocoo.org/2/documentation/ |
||||
|
||||
Platform: UNKNOWN |
||||
Classifier: Development Status :: 5 - Production/Stable |
||||
Classifier: Environment :: Web Environment |
||||
Classifier: Intended Audience :: Developers |
||||
Classifier: License :: OSI Approved :: BSD License |
||||
Classifier: Operating System :: OS Independent |
||||
Classifier: Programming Language :: Python |
||||
Classifier: Programming Language :: Python :: 2 |
||||
Classifier: Programming Language :: Python :: 2.6 |
||||
Classifier: Programming Language :: Python :: 2.7 |
||||
Classifier: Programming Language :: Python :: 3 |
||||
Classifier: Programming Language :: Python :: 3.3 |
||||
Classifier: Programming Language :: Python :: 3.4 |
||||
Classifier: Programming Language :: Python :: 3.5 |
||||
Classifier: Programming Language :: Python :: 3.6 |
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content |
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules |
||||
Classifier: Topic :: Text Processing :: Markup :: HTML |
@ -0,0 +1,133 @@ |
||||
AUTHORS |
||||
CHANGES.rst |
||||
LICENSE |
||||
MANIFEST.in |
||||
README.rst |
||||
setup.cfg |
||||
setup.py |
||||
Jinja2.egg-info/PKG-INFO |
||||
Jinja2.egg-info/SOURCES.txt |
||||
Jinja2.egg-info/dependency_links.txt |
||||
Jinja2.egg-info/entry_points.txt |
||||
Jinja2.egg-info/not-zip-safe |
||||
Jinja2.egg-info/requires.txt |
||||
Jinja2.egg-info/top_level.txt |
||||
artwork/jinjalogo.svg |
||||
docs/Makefile |
||||
docs/api.rst |
||||
docs/cache_extension.py |
||||
docs/changelog.rst |
||||
docs/conf.py |
||||
docs/contents.rst.inc |
||||
docs/extensions.rst |
||||
docs/faq.rst |
||||
docs/index.rst |
||||
docs/integration.rst |
||||
docs/intro.rst |
||||
docs/jinjaext.py |
||||
docs/jinjastyle.sty |
||||
docs/latexindex.rst |
||||
docs/logo.pdf |
||||
docs/nativetypes.rst |
||||
docs/sandbox.rst |
||||
docs/switching.rst |
||||
docs/templates.rst |
||||
docs/tricks.rst |
||||
docs/_static/.ignore |
||||
docs/_static/jinja-small.png |
||||
docs/_templates/sidebarintro.html |
||||
docs/_templates/sidebarlogo.html |
||||
docs/_themes/LICENSE |
||||
docs/_themes/README |
||||
docs/_themes/jinja/layout.html |
||||
docs/_themes/jinja/relations.html |
||||
docs/_themes/jinja/theme.conf |
||||
docs/_themes/jinja/static/jinja.css_t |
||||
examples/bench.py |
||||
examples/profile.py |
||||
examples/basic/cycle.py |
||||
examples/basic/debugger.py |
||||
examples/basic/inheritance.py |
||||
examples/basic/test.py |
||||
examples/basic/test_filter_and_linestatements.py |
||||
examples/basic/test_loop_filter.py |
||||
examples/basic/translate.py |
||||
examples/basic/templates/broken.html |
||||
examples/basic/templates/subbroken.html |
||||
examples/rwbench/djangoext.py |
||||
examples/rwbench/rwbench.py |
||||
examples/rwbench/django/_form.html |
||||
examples/rwbench/django/_input_field.html |
||||
examples/rwbench/django/_textarea.html |
||||
examples/rwbench/django/index.html |
||||
examples/rwbench/django/layout.html |
||||
examples/rwbench/genshi/helpers.html |
||||
examples/rwbench/genshi/index.html |
||||
examples/rwbench/genshi/layout.html |
||||
examples/rwbench/jinja/helpers.html |
||||
examples/rwbench/jinja/index.html |
||||
examples/rwbench/jinja/layout.html |
||||
examples/rwbench/mako/helpers.html |
||||
examples/rwbench/mako/index.html |
||||
examples/rwbench/mako/layout.html |
||||
ext/djangojinja2.py |
||||
ext/inlinegettext.py |
||||
ext/jinja.el |
||||
ext/Vim/jinja.vim |
||||
ext/django2jinja/django2jinja.py |
||||
ext/django2jinja/example.py |
||||
ext/django2jinja/templates/index.html |
||||
ext/django2jinja/templates/layout.html |
||||
ext/django2jinja/templates/subtemplate.html |
||||
jinja2/__init__.py |
||||
jinja2/_compat.py |
||||
jinja2/_identifier.py |
||||
jinja2/asyncfilters.py |
||||
jinja2/asyncsupport.py |
||||
jinja2/bccache.py |
||||
jinja2/compiler.py |
||||
jinja2/constants.py |
||||
jinja2/debug.py |
||||
jinja2/defaults.py |
||||
jinja2/environment.py |
||||
jinja2/exceptions.py |
||||
jinja2/ext.py |
||||
jinja2/filters.py |
||||
jinja2/idtracking.py |
||||
jinja2/lexer.py |
||||
jinja2/loaders.py |
||||
jinja2/meta.py |
||||
jinja2/nativetypes.py |
||||
jinja2/nodes.py |
||||
jinja2/optimizer.py |
||||
jinja2/parser.py |
||||
jinja2/runtime.py |
||||
jinja2/sandbox.py |
||||
jinja2/tests.py |
||||
jinja2/utils.py |
||||
jinja2/visitor.py |
||||
tests/conftest.py |
||||
tests/test_api.py |
||||
tests/test_async.py |
||||
tests/test_asyncfilters.py |
||||
tests/test_bytecode_cache.py |
||||
tests/test_core_tags.py |
||||
tests/test_debug.py |
||||
tests/test_ext.py |
||||
tests/test_features.py |
||||
tests/test_filters.py |
||||
tests/test_idtracking.py |
||||
tests/test_imports.py |
||||
tests/test_inheritance.py |
||||
tests/test_lexnparse.py |
||||
tests/test_loader.py |
||||
tests/test_nativetypes.py |
||||
tests/test_regression.py |
||||
tests/test_security.py |
||||
tests/test_tests.py |
||||
tests/test_utils.py |
||||
tests/res/__init__.py |
||||
tests/res/templates/broken.html |
||||
tests/res/templates/syntaxerror.html |
||||
tests/res/templates/test.html |
||||
tests/res/templates/foo/test.html |
@ -0,0 +1 @@ |
||||
|
@ -0,0 +1,4 @@ |
||||
|
||||
[babel.extractors] |
||||
jinja2 = jinja2.ext:babel_extract[i18n] |
||||
|
@ -0,0 +1,61 @@ |
||||
../jinja2/lexer.py |
||||
../jinja2/idtracking.py |
||||
../jinja2/_identifier.py |
||||
../jinja2/nodes.py |
||||
../jinja2/asyncfilters.py |
||||
../jinja2/loaders.py |
||||
../jinja2/defaults.py |
||||
../jinja2/meta.py |
||||
../jinja2/compiler.py |
||||
../jinja2/environment.py |
||||
../jinja2/tests.py |
||||
../jinja2/sandbox.py |
||||
../jinja2/filters.py |
||||
../jinja2/exceptions.py |
||||
../jinja2/asyncsupport.py |
||||
../jinja2/visitor.py |
||||
../jinja2/constants.py |
||||
../jinja2/utils.py |
||||
../jinja2/ext.py |
||||
../jinja2/optimizer.py |
||||
../jinja2/nativetypes.py |
||||
../jinja2/parser.py |
||||
../jinja2/runtime.py |
||||
../jinja2/debug.py |
||||
../jinja2/_compat.py |
||||
../jinja2/bccache.py |
||||
../jinja2/__init__.py |
||||
../jinja2/lexer.pyc |
||||
../jinja2/idtracking.pyc |
||||
../jinja2/_identifier.pyc |
||||
../jinja2/nodes.pyc |
||||
../jinja2/asyncfilters.pyc |
||||
../jinja2/loaders.pyc |
||||
../jinja2/defaults.pyc |
||||
../jinja2/meta.pyc |
||||
../jinja2/compiler.pyc |
||||
../jinja2/environment.pyc |
||||
../jinja2/tests.pyc |
||||
../jinja2/sandbox.pyc |
||||
../jinja2/filters.pyc |
||||
../jinja2/exceptions.pyc |
||||
../jinja2/asyncsupport.pyc |
||||
../jinja2/visitor.pyc |
||||
../jinja2/constants.pyc |
||||
../jinja2/utils.pyc |
||||
../jinja2/ext.pyc |
||||
../jinja2/optimizer.pyc |
||||
../jinja2/nativetypes.pyc |
||||
../jinja2/parser.pyc |
||||
../jinja2/runtime.pyc |
||||
../jinja2/debug.pyc |
||||
../jinja2/_compat.pyc |
||||
../jinja2/bccache.pyc |
||||
../jinja2/__init__.pyc |
||||
not-zip-safe |
||||
entry_points.txt |
||||
dependency_links.txt |
||||
PKG-INFO |
||||
top_level.txt |
||||
requires.txt |
||||
SOURCES.txt |
@ -0,0 +1 @@ |
||||
|
@ -0,0 +1,4 @@ |
||||
MarkupSafe>=0.23 |
||||
|
||||
[i18n] |
||||
Babel>=0.8 |
@ -0,0 +1 @@ |
||||
jinja2 |
@ -0,0 +1,104 @@ |
||||
Metadata-Version: 1.1 |
||||
Name: Werkzeug |
||||
Version: 0.14.1 |
||||
Summary: The comprehensive WSGI web application library. |
||||
Home-page: https://www.palletsprojects.org/p/werkzeug/ |
||||
Author: Armin Ronacher |
||||
Author-email: armin.ronacher@active-4.com |
||||
License: BSD |
||||
Description: Werkzeug |
||||
======== |
||||
|
||||
Werkzeug is a comprehensive `WSGI`_ web application library. It began as |
||||
a simple collection of various utilities for WSGI applications and has |
||||
become one of the most advanced WSGI utility libraries. |
||||
|
||||
It includes: |
||||
|
||||
* An interactive debugger that allows inspecting stack traces and source |
||||
code in the browser with an interactive interpreter for any frame in |
||||
the stack. |
||||
* A full-featured request object with objects to interact with headers, |
||||
query args, form data, files, and cookies. |
||||
* A response object that can wrap other WSGI applications and handle |
||||
streaming data. |
||||
* A routing system for matching URLs to endpoints and generating URLs |
||||
for endpoints, with an extensible system for capturing variables from |
||||
URLs. |
||||
* HTTP utilities to handle entity tags, cache control, dates, user |
||||
agents, cookies, files, and more. |
||||
* A threaded WSGI server for use while developing applications locally. |
||||
* A test client for simulating HTTP requests during testing without |
||||
requiring running a server. |
||||
|
||||
Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up |
||||
to the developer to choose a template engine, database adapter, and even |
||||
how to handle requests. It can be used to build all sorts of end user |
||||
applications such as blogs, wikis, or bulletin boards. |
||||
|
||||
`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while |
||||
providing more structure and patterns for defining powerful |
||||
applications. |
||||
|
||||
|
||||
Installing |
||||
---------- |
||||
|
||||
Install and update using `pip`_: |
||||
|
||||
.. code-block:: text |
||||
|
||||
pip install -U Werkzeug |
||||
|
||||
|
||||
A Simple Example |
||||
---------------- |
||||
|
||||
.. code-block:: python |
||||
|
||||
from werkzeug.wrappers import Request, Response |
||||
|
||||
@Request.application |
||||
def application(request): |
||||
return Response('Hello, World!') |
||||
|
||||
if __name__ == '__main__': |
||||
from werkzeug.serving import run_simple |
||||
run_simple('localhost', 4000, application) |
||||
|
||||
|
||||
Links |
||||
----- |
||||
|
||||
* Website: https://www.palletsprojects.com/p/werkzeug/ |
||||
* Releases: https://pypi.org/project/Werkzeug/ |
||||
* Code: https://github.com/pallets/werkzeug |
||||
* Issue tracker: https://github.com/pallets/werkzeug/issues |
||||
* Test status: |
||||
|
||||
* Linux, Mac: https://travis-ci.org/pallets/werkzeug |
||||
* Windows: https://ci.appveyor.com/project/davidism/werkzeug |
||||
|
||||
* Test coverage: https://codecov.io/gh/pallets/werkzeug |
||||
|
||||
.. _WSGI: https://wsgi.readthedocs.io/en/latest/ |
||||
.. _Flask: https://www.palletsprojects.com/p/flask/ |
||||
.. _pip: https://pip.pypa.io/en/stable/quickstart/ |
||||
|
||||
Platform: any |
||||
Classifier: Development Status :: 5 - Production/Stable |
||||
Classifier: Environment :: Web Environment |
||||
Classifier: Intended Audience :: Developers |
||||
Classifier: License :: OSI Approved :: BSD License |
||||
Classifier: Operating System :: OS Independent |
||||
Classifier: Programming Language :: Python |
||||
Classifier: Programming Language :: Python :: 2 |
||||
Classifier: Programming Language :: Python :: 2.6 |
||||
Classifier: Programming Language :: Python :: 2.7 |
||||
Classifier: Programming Language :: Python :: 3 |
||||
Classifier: Programming Language :: Python :: 3.3 |
||||
Classifier: Programming Language :: Python :: 3.4 |
||||
Classifier: Programming Language :: Python :: 3.5 |
||||
Classifier: Programming Language :: Python :: 3.6 |
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content |
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules |
@ -0,0 +1,299 @@ |
||||
AUTHORS |
||||
CHANGES.rst |
||||
LICENSE |
||||
MANIFEST.in |
||||
Makefile |
||||
README.rst |
||||
setup.cfg |
||||
setup.py |
||||
tox.ini |
||||
Werkzeug.egg-info/PKG-INFO |
||||
Werkzeug.egg-info/SOURCES.txt |
||||
Werkzeug.egg-info/dependency_links.txt |
||||
Werkzeug.egg-info/not-zip-safe |
||||
Werkzeug.egg-info/requires.txt |
||||
Werkzeug.egg-info/top_level.txt |
||||
artwork/.DS_Store |
||||
artwork/logo.png |
||||
artwork/logo.svg |
||||
docs/.DS_Store |
||||
docs/Makefile |
||||
docs/changes.rst |
||||
docs/conf.py |
||||
docs/contents.rst.inc |
||||
docs/datastructures.rst |
||||
docs/debug.rst |
||||
docs/exceptions.rst |
||||
docs/filesystem.rst |
||||
docs/http.rst |
||||
docs/index.rst |
||||
docs/installation.rst |
||||
docs/latexindex.rst |
||||
docs/levels.rst |
||||
docs/local.rst |
||||
docs/logo.pdf |
||||
docs/make.bat |
||||
docs/makearchive.py |
||||
docs/middlewares.rst |
||||
docs/python3.rst |
||||
docs/quickstart.rst |
||||
docs/request_data.rst |
||||
docs/routing.rst |
||||
docs/serving.rst |
||||
docs/terms.rst |
||||
docs/test.rst |
||||
docs/transition.rst |
||||
docs/tutorial.rst |
||||
docs/unicode.rst |
||||
docs/urls.rst |
||||
docs/utils.rst |
||||
docs/werkzeugext.py |
||||
docs/werkzeugstyle.sty |
||||
docs/wrappers.rst |
||||
docs/wsgi.rst |
||||
docs/_static/background.png |
||||
docs/_static/codebackground.png |
||||
docs/_static/contents.png |
||||
docs/_static/debug-screenshot.png |
||||
docs/_static/favicon.ico |
||||
docs/_static/header.png |
||||
docs/_static/navigation.png |
||||
docs/_static/navigation_active.png |
||||
docs/_static/shortly.png |
||||
docs/_static/shorty-screenshot.png |
||||
docs/_static/style.css |
||||
docs/_static/werkzeug.js |
||||
docs/_static/werkzeug.png |
||||
docs/_templates/sidebarintro.html |
||||
docs/_templates/sidebarlogo.html |
||||
docs/contrib/atom.rst |
||||
docs/contrib/cache.rst |
||||
docs/contrib/fixers.rst |
||||
docs/contrib/index.rst |
||||
docs/contrib/iterio.rst |
||||
docs/contrib/lint.rst |
||||
docs/contrib/profiler.rst |
||||
docs/contrib/securecookie.rst |
||||
docs/contrib/sessions.rst |
||||
docs/contrib/wrappers.rst |
||||
docs/deployment/cgi.rst |
||||
docs/deployment/fastcgi.rst |
||||
docs/deployment/index.rst |
||||
docs/deployment/mod_wsgi.rst |
||||
docs/deployment/proxying.rst |
||||
examples/README |
||||
examples/cookieauth.py |
||||
examples/httpbasicauth.py |
||||
examples/manage-coolmagic.py |
||||
examples/manage-couchy.py |
||||
examples/manage-cupoftee.py |
||||
examples/manage-i18nurls.py |
||||
examples/manage-plnt.py |
||||
examples/manage-shorty.py |
||||
examples/manage-simplewiki.py |
||||
examples/manage-webpylike.py |
||||
examples/upload.py |
||||
examples/contrib/README |
||||
examples/contrib/securecookie.py |
||||
examples/contrib/sessions.py |
||||
examples/coolmagic/__init__.py |
||||
examples/coolmagic/application.py |
||||
examples/coolmagic/helpers.py |
||||
examples/coolmagic/utils.py |
||||
examples/coolmagic/public/style.css |
||||
examples/coolmagic/templates/layout.html |
||||
examples/coolmagic/templates/static/about.html |
||||
examples/coolmagic/templates/static/index.html |
||||
examples/coolmagic/templates/static/not_found.html |
||||
examples/coolmagic/views/__init__.py |
||||
examples/coolmagic/views/static.py |
||||
examples/couchy/README |
||||
examples/couchy/__init__.py |
||||
examples/couchy/application.py |
||||
examples/couchy/models.py |
||||
examples/couchy/utils.py |
||||
examples/couchy/views.py |
||||
examples/couchy/static/style.css |
||||
examples/couchy/templates/display.html |
||||
examples/couchy/templates/layout.html |
||||
examples/couchy/templates/list.html |
||||
examples/couchy/templates/new.html |
||||
examples/couchy/templates/not_found.html |
||||
examples/cupoftee/__init__.py |
||||
examples/cupoftee/application.py |
||||
examples/cupoftee/db.py |
||||
examples/cupoftee/network.py |
||||
examples/cupoftee/pages.py |
||||
examples/cupoftee/utils.py |
||||
examples/cupoftee/shared/content.png |
||||
examples/cupoftee/shared/down.png |
||||
examples/cupoftee/shared/favicon.ico |
||||
examples/cupoftee/shared/header.png |
||||
examples/cupoftee/shared/logo.png |
||||
examples/cupoftee/shared/style.css |
||||
examples/cupoftee/shared/up.png |
||||
examples/cupoftee/templates/layout.html |
||||
examples/cupoftee/templates/missingpage.html |
||||
examples/cupoftee/templates/search.html |
||||
examples/cupoftee/templates/server.html |
||||
examples/cupoftee/templates/serverlist.html |
||||
examples/i18nurls/__init__.py |
||||
examples/i18nurls/application.py |
||||
examples/i18nurls/urls.py |
||||
examples/i18nurls/views.py |
||||
examples/i18nurls/templates/about.html |
||||
examples/i18nurls/templates/blog.html |
||||
examples/i18nurls/templates/index.html |
||||
examples/i18nurls/templates/layout.html |
||||
examples/partial/README |
||||
examples/partial/complex_routing.py |
||||
examples/plnt/__init__.py |
||||
examples/plnt/database.py |
||||
examples/plnt/sync.py |
||||
examples/plnt/utils.py |
||||
examples/plnt/views.py |
||||
examples/plnt/webapp.py |
||||
examples/plnt/shared/style.css |
||||
examples/plnt/templates/about.html |
||||
examples/plnt/templates/index.html |
||||
examples/plnt/templates/layout.html |
||||
examples/shortly/shortly.py |
||||
examples/shortly/static/style.css |
||||
examples/shortly/templates/404.html |
||||
examples/shortly/templates/layout.html |
||||
examples/shortly/templates/new_url.html |
||||
examples/shortly/templates/short_link_details.html |
||||
examples/shorty/__init__.py |
||||
examples/shorty/application.py |
||||
examples/shorty/models.py |
||||
examples/shorty/utils.py |
||||
examples/shorty/views.py |
||||
examples/shorty/static/style.css |
||||
examples/shorty/templates/display.html |
||||
examples/shorty/templates/layout.html |
||||
examples/shorty/templates/list.html |
||||
examples/shorty/templates/new.html |
||||
examples/shorty/templates/not_found.html |
||||
examples/simplewiki/__init__.py |
||||
examples/simplewiki/actions.py |
||||
examples/simplewiki/application.py |
||||
examples/simplewiki/database.py |
||||
examples/simplewiki/specialpages.py |
||||
examples/simplewiki/utils.py |
||||
examples/simplewiki/shared/style.css |
||||
examples/simplewiki/templates/action_diff.html |
||||
examples/simplewiki/templates/action_edit.html |
||||
examples/simplewiki/templates/action_log.html |
||||
examples/simplewiki/templates/action_revert.html |
||||
examples/simplewiki/templates/action_show.html |
||||
examples/simplewiki/templates/layout.html |
||||
examples/simplewiki/templates/macros.xml |
||||
examples/simplewiki/templates/missing_action.html |
||||
examples/simplewiki/templates/page_index.html |
||||
examples/simplewiki/templates/page_missing.html |
||||
examples/simplewiki/templates/recent_changes.html |
||||
examples/webpylike/example.py |
||||
examples/webpylike/webpylike.py |
||||
tests/__init__.py |
||||
tests/conftest.py |
||||
tests/test_compat.py |
||||
tests/test_datastructures.py |
||||
tests/test_debug.py |
||||
tests/test_exceptions.py |
||||
tests/test_formparser.py |
||||
tests/test_http.py |
||||
tests/test_internal.py |
||||
tests/test_local.py |
||||
tests/test_routing.py |
||||
tests/test_security.py |
||||
tests/test_serving.py |
||||
tests/test_test.py |
||||
tests/test_urls.py |
||||
tests/test_utils.py |
||||
tests/test_wrappers.py |
||||
tests/test_wsgi.py |
||||
tests/contrib/__init__.py |
||||
tests/contrib/test_atom.py |
||||
tests/contrib/test_cache.py |
||||
tests/contrib/test_fixers.py |
||||
tests/contrib/test_iterio.py |
||||
tests/contrib/test_securecookie.py |
||||
tests/contrib/test_sessions.py |
||||
tests/contrib/test_wrappers.py |
||||
tests/contrib/cache/conftest.py |
||||
tests/contrib/cache/test_cache.py |
||||
tests/hypothesis/__init__.py |
||||
tests/hypothesis/test_urls.py |
||||
tests/multipart/__init__.py |
||||
tests/multipart/ie7_full_path_request.txt |
||||
tests/multipart/test_collect.py |
||||
tests/multipart/firefox3-2png1txt/file1.png |
||||
tests/multipart/firefox3-2png1txt/file2.png |
||||
tests/multipart/firefox3-2png1txt/request.txt |
||||
tests/multipart/firefox3-2png1txt/text.txt |
||||
tests/multipart/firefox3-2pnglongtext/file1.png |
||||
tests/multipart/firefox3-2pnglongtext/file2.png |
||||
tests/multipart/firefox3-2pnglongtext/request.txt |
||||
tests/multipart/firefox3-2pnglongtext/text.txt |
||||
tests/multipart/ie6-2png1txt/file1.png |
||||
tests/multipart/ie6-2png1txt/file2.png |
||||
tests/multipart/ie6-2png1txt/request.txt |
||||
tests/multipart/ie6-2png1txt/text.txt |
||||
tests/multipart/opera8-2png1txt/file1.png |
||||
tests/multipart/opera8-2png1txt/file2.png |
||||
tests/multipart/opera8-2png1txt/request.txt |
||||
tests/multipart/opera8-2png1txt/text.txt |
||||
tests/multipart/webkit3-2png1txt/file1.png |
||||
tests/multipart/webkit3-2png1txt/file2.png |
||||
tests/multipart/webkit3-2png1txt/request.txt |
||||
tests/multipart/webkit3-2png1txt/text.txt |
||||
tests/res/chunked.txt |
||||
tests/res/test.txt |
||||
werkzeug/__init__.py |
||||
werkzeug/_compat.py |
||||
werkzeug/_internal.py |
||||
werkzeug/_reloader.py |
||||
werkzeug/datastructures.py |
||||
werkzeug/exceptions.py |
||||
werkzeug/filesystem.py |
||||
werkzeug/formparser.py |
||||
werkzeug/http.py |
||||
werkzeug/local.py |
||||
werkzeug/posixemulation.py |
||||
werkzeug/routing.py |
||||
werkzeug/security.py |
||||
werkzeug/serving.py |
||||
werkzeug/test.py |
||||
werkzeug/testapp.py |
||||
werkzeug/urls.py |
||||
werkzeug/useragents.py |
||||
werkzeug/utils.py |
||||
werkzeug/websocket.py |
||||
werkzeug/wrappers.py |
||||
werkzeug/wsgi.py |
||||
werkzeug/contrib/__init__.py |
||||
werkzeug/contrib/atom.py |
||||
werkzeug/contrib/cache.py |
||||
werkzeug/contrib/fixers.py |
||||
werkzeug/contrib/iterio.py |
||||
werkzeug/contrib/jsrouting.py |
||||
werkzeug/contrib/limiter.py |
||||
werkzeug/contrib/lint.py |
||||
werkzeug/contrib/profiler.py |
||||
werkzeug/contrib/securecookie.py |
||||
werkzeug/contrib/sessions.py |
||||
werkzeug/contrib/testtools.py |
||||
werkzeug/contrib/wrappers.py |
||||
werkzeug/debug/__init__.py |
||||
werkzeug/debug/console.py |
||||
werkzeug/debug/repr.py |
||||
werkzeug/debug/tbtools.py |
||||
werkzeug/debug/shared/FONT_LICENSE |
||||
werkzeug/debug/shared/console.png |
||||
werkzeug/debug/shared/debugger.js |
||||
werkzeug/debug/shared/jquery.js |
||||
werkzeug/debug/shared/less.png |
||||
werkzeug/debug/shared/more.png |
||||
werkzeug/debug/shared/source.png |
||||
werkzeug/debug/shared/style.css |
||||
werkzeug/debug/shared/ubuntu.ttf |
@ -0,0 +1 @@ |
||||
|
@ -0,0 +1,93 @@ |
||||
../werkzeug/_reloader.py |
||||
../werkzeug/_internal.py |
||||
../werkzeug/serving.py |
||||
../werkzeug/local.py |
||||
../werkzeug/filesystem.py |
||||
../werkzeug/security.py |
||||
../werkzeug/__init__.py |
||||
../werkzeug/test.py |
||||
../werkzeug/formparser.py |
||||
../werkzeug/posixemulation.py |
||||
../werkzeug/utils.py |
||||
../werkzeug/wrappers.py |
||||
../werkzeug/routing.py |
||||
../werkzeug/http.py |
||||
../werkzeug/useragents.py |
||||
../werkzeug/exceptions.py |
||||
../werkzeug/_compat.py |
||||
../werkzeug/datastructures.py |
||||
../werkzeug/urls.py |
||||
../werkzeug/websocket.py |
||||
../werkzeug/wsgi.py |
||||
../werkzeug/testapp.py |
||||
../werkzeug/contrib/sessions.py |
||||
../werkzeug/contrib/cache.py |
||||
../werkzeug/contrib/__init__.py |
||||
../werkzeug/contrib/testtools.py |
||||
../werkzeug/contrib/wrappers.py |
||||
../werkzeug/contrib/jsrouting.py |
||||
../werkzeug/contrib/fixers.py |
||||
../werkzeug/contrib/profiler.py |
||||
../werkzeug/contrib/iterio.py |
||||
../werkzeug/contrib/atom.py |
||||
../werkzeug/contrib/securecookie.py |
||||
../werkzeug/contrib/limiter.py |
||||
../werkzeug/contrib/lint.py |
||||
../werkzeug/debug/console.py |
||||
../werkzeug/debug/tbtools.py |
||||
../werkzeug/debug/__init__.py |
||||
../werkzeug/debug/repr.py |
||||
../werkzeug/debug/shared/FONT_LICENSE |
||||
../werkzeug/debug/shared/console.png |
||||
../werkzeug/debug/shared/debugger.js |
||||
../werkzeug/debug/shared/jquery.js |
||||
../werkzeug/debug/shared/less.png |
||||
../werkzeug/debug/shared/more.png |
||||
../werkzeug/debug/shared/source.png |
||||
../werkzeug/debug/shared/style.css |
||||
../werkzeug/debug/shared/ubuntu.ttf |
||||
../werkzeug/_reloader.pyc |
||||
../werkzeug/_internal.pyc |
||||
../werkzeug/serving.pyc |
||||
../werkzeug/local.pyc |
||||
../werkzeug/filesystem.pyc |
||||
../werkzeug/security.pyc |
||||
../werkzeug/__init__.pyc |
||||
../werkzeug/test.pyc |
||||
../werkzeug/formparser.pyc |
||||
../werkzeug/posixemulation.pyc |
||||
../werkzeug/utils.pyc |
||||
../werkzeug/wrappers.pyc |
||||
../werkzeug/routing.pyc |
||||
../werkzeug/http.pyc |
||||
../werkzeug/useragents.pyc |
||||
../werkzeug/exceptions.pyc |
||||
../werkzeug/_compat.pyc |
||||
../werkzeug/datastructures.pyc |
||||
../werkzeug/urls.pyc |
||||
../werkzeug/websocket.pyc |
||||
../werkzeug/wsgi.pyc |
||||
../werkzeug/testapp.pyc |
||||
../werkzeug/contrib/sessions.pyc |
||||
../werkzeug/contrib/cache.pyc |
||||
../werkzeug/contrib/__init__.pyc |
||||
../werkzeug/contrib/testtools.pyc |
||||
../werkzeug/contrib/wrappers.pyc |
||||
../werkzeug/contrib/jsrouting.pyc |
||||
../werkzeug/contrib/fixers.pyc |
||||
../werkzeug/contrib/profiler.pyc |
||||
../werkzeug/contrib/iterio.pyc |
||||
../werkzeug/contrib/atom.pyc |
||||
../werkzeug/contrib/securecookie.pyc |
||||
../werkzeug/contrib/limiter.pyc |
||||
../werkzeug/contrib/lint.pyc |
||||
../werkzeug/debug/console.pyc |
||||
../werkzeug/debug/tbtools.pyc |
||||
../werkzeug/debug/__init__.pyc |
||||
../werkzeug/debug/repr.pyc |
||||
PKG-INFO |
||||
not-zip-safe |
||||
SOURCES.txt |
||||
requires.txt |
||||
top_level.txt |
||||
dependency_links.txt |
@ -0,0 +1 @@ |
||||
|
@ -0,0 +1,12 @@ |
||||
|
||||
[dev] |
||||
pytest |
||||
coverage |
||||
tox |
||||
sphinx |
||||
|
||||
[termcolor] |
||||
termcolor |
||||
|
||||
[watchdog] |
||||
watchdog |
@ -0,0 +1 @@ |
||||
werkzeug |
@ -0,0 +1,10 @@ |
||||
#!/usr/local/bin/python |
||||
# EASY-INSTALL-ENTRY-SCRIPT: 'Flask==1.0.2','console_scripts','flask' |
||||
__requires__ = 'Flask==1.0.2' |
||||
import sys |
||||
from pkg_resources import load_entry_point |
||||
|
||||
if __name__ == '__main__': |
||||
sys.exit( |
||||
load_entry_point('Flask==1.0.2', 'console_scripts', 'flask')() |
||||
) |
@ -0,0 +1,13 @@ |
||||
Metadata-Version: 1.1 |
||||
Name: click |
||||
Version: 6.7 |
||||
Summary: A simple wrapper around optparse for powerful command line utilities. |
||||
Home-page: http://github.com/mitsuhiko/click |
||||
Author: Armin Ronacher |
||||
Author-email: armin.ronacher@active-4.com |
||||
License: UNKNOWN |
||||
Description: UNKNOWN |
||||
Platform: UNKNOWN |
||||
Classifier: License :: OSI Approved :: BSD License |
||||
Classifier: Programming Language :: Python |
||||
Classifier: Programming Language :: Python :: 3 |
@ -0,0 +1,114 @@ |
||||
CHANGES |
||||
LICENSE |
||||
MANIFEST.in |
||||
Makefile |
||||
README |
||||
setup.cfg |
||||
setup.py |
||||
artwork/logo.svg |
||||
click/__init__.py |
||||
click/_bashcomplete.py |
||||
click/_compat.py |
||||
click/_termui_impl.py |
||||
click/_textwrap.py |
||||
click/_unicodefun.py |
||||
click/_winconsole.py |
||||
click/core.py |
||||
click/decorators.py |
||||
click/exceptions.py |
||||
click/formatting.py |
||||
click/globals.py |
||||
click/parser.py |
||||
click/termui.py |
||||
click/testing.py |
||||
click/types.py |
||||
click/utils.py |
||||
click.egg-info/PKG-INFO |
||||
click.egg-info/SOURCES.txt |
||||
click.egg-info/dependency_links.txt |
||||
click.egg-info/top_level.txt |
||||
docs/Makefile |
||||
docs/advanced.rst |
||||
docs/api.rst |
||||
docs/arguments.rst |
||||
docs/bashcomplete.rst |
||||
docs/changelog.rst |
||||
docs/clickdoctools.py |
||||
docs/commands.rst |
||||
docs/complex.rst |
||||
docs/conf.py |
||||
docs/contrib.rst |
||||
docs/documentation.rst |
||||
docs/exceptions.rst |
||||
docs/index.rst |
||||
docs/license.rst |
||||
docs/make.bat |
||||
docs/options.rst |
||||
docs/parameters.rst |
||||
docs/prompts.rst |
||||
docs/python3.rst |
||||
docs/quickstart.rst |
||||
docs/setuptools.rst |
||||
docs/testing.rst |
||||
docs/upgrading.rst |
||||
docs/utils.rst |
||||
docs/why.rst |
||||
docs/wincmd.rst |
||||
docs/_static/click-small.png |
||||
docs/_static/click-small@2x.png |
||||
docs/_static/click.png |
||||
docs/_static/click@2x.png |
||||
docs/_templates/sidebarintro.html |
||||
docs/_templates/sidebarlogo.html |
||||
examples/README |
||||
examples/aliases/README |
||||
examples/aliases/aliases.ini |
||||
examples/aliases/aliases.py |
||||
examples/aliases/setup.py |
||||
examples/colors/README |
||||
examples/colors/colors.py |
||||
examples/colors/setup.py |
||||
examples/complex/README |
||||
examples/complex/setup.py |
||||
examples/complex/complex/__init__.py |
||||
examples/complex/complex/cli.py |
||||
examples/complex/complex/commands/__init__.py |
||||
examples/complex/complex/commands/cmd_init.py |
||||
examples/complex/complex/commands/cmd_status.py |
||||
examples/imagepipe/.gitignore |
||||
examples/imagepipe/README |
||||
examples/imagepipe/example01.jpg |
||||
examples/imagepipe/example02.jpg |
||||
examples/imagepipe/imagepipe.py |
||||
examples/imagepipe/setup.py |
||||
examples/inout/README |
||||
examples/inout/inout.py |
||||
examples/inout/setup.py |
||||
examples/naval/README |
||||
examples/naval/naval.py |
||||
examples/naval/setup.py |
||||
examples/repo/README |
||||
examples/repo/repo.py |
||||
examples/repo/setup.py |
||||
examples/termui/README |
||||
examples/termui/setup.py |
||||
examples/termui/termui.py |
||||
examples/validation/README |
||||
examples/validation/setup.py |
||||
examples/validation/validation.py |
||||
tests/conftest.py |
||||
tests/test_arguments.py |
||||
tests/test_bashcomplete.py |
||||
tests/test_basic.py |
||||
tests/test_chain.py |
||||
tests/test_commands.py |
||||
tests/test_compat.py |
||||
tests/test_context.py |
||||
tests/test_defaults.py |
||||
tests/test_formatting.py |
||||
tests/test_imports.py |
||||
tests/test_normalization.py |
||||
tests/test_options.py |
||||
tests/test_termui.py |
||||
tests/test_testing.py |
||||
tests/test_utils.py |
@ -0,0 +1 @@ |
||||
|
@ -0,0 +1,38 @@ |
||||
../click/exceptions.py |
||||
../click/testing.py |
||||
../click/decorators.py |
||||
../click/parser.py |
||||
../click/formatting.py |
||||
../click/globals.py |
||||
../click/_termui_impl.py |
||||
../click/__init__.py |
||||
../click/_compat.py |
||||
../click/_winconsole.py |
||||
../click/_unicodefun.py |
||||
../click/_textwrap.py |
||||
../click/_bashcomplete.py |
||||
../click/core.py |
||||
../click/types.py |
||||
../click/termui.py |
||||
../click/utils.py |
||||
../click/exceptions.pyc |
||||
../click/testing.pyc |
||||
../click/decorators.pyc |
||||
../click/parser.pyc |
||||
../click/formatting.pyc |
||||
../click/globals.pyc |
||||
../click/_termui_impl.pyc |
||||
../click/__init__.pyc |
||||
../click/_compat.pyc |
||||
../click/_winconsole.pyc |
||||
../click/_unicodefun.pyc |
||||
../click/_textwrap.pyc |
||||
../click/_bashcomplete.pyc |
||||
../click/core.pyc |
||||
../click/types.pyc |
||||
../click/termui.pyc |
||||
../click/utils.pyc |
||||
SOURCES.txt |
||||
top_level.txt |
||||
PKG-INFO |
||||
dependency_links.txt |
@ -0,0 +1 @@ |
||||
click |
@ -0,0 +1,98 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
click |
||||
~~~~~ |
||||
|
||||
Click is a simple Python module that wraps the stdlib's optparse to make |
||||
writing command line scripts fun. Unlike other modules, it's based around |
||||
a simple API that does not come with too much magic and is composable. |
||||
|
||||
In case optparse ever gets removed from the stdlib, it will be shipped by |
||||
this module. |
||||
|
||||
:copyright: (c) 2014 by Armin Ronacher. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
# Core classes |
||||
from .core import Context, BaseCommand, Command, MultiCommand, Group, \ |
||||
CommandCollection, Parameter, Option, Argument |
||||
|
||||
# Globals |
||||
from .globals import get_current_context |
||||
|
||||
# Decorators |
||||
from .decorators import pass_context, pass_obj, make_pass_decorator, \ |
||||
command, group, argument, option, confirmation_option, \ |
||||
password_option, version_option, help_option |
||||
|
||||
# Types |
||||
from .types import ParamType, File, Path, Choice, IntRange, Tuple, \ |
||||
STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED |
||||
|
||||
# Utilities |
||||
from .utils import echo, get_binary_stream, get_text_stream, open_file, \ |
||||
format_filename, get_app_dir, get_os_args |
||||
|
||||
# Terminal functions |
||||
from .termui import prompt, confirm, get_terminal_size, echo_via_pager, \ |
||||
progressbar, clear, style, unstyle, secho, edit, launch, getchar, \ |
||||
pause |
||||
|
||||
# Exceptions |
||||
from .exceptions import ClickException, UsageError, BadParameter, \ |
||||
FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \ |
||||
MissingParameter |
||||
|
||||
# Formatting |
||||
from .formatting import HelpFormatter, wrap_text |
||||
|
||||
# Parsing |
||||
from .parser import OptionParser |
||||
|
||||
|
||||
__all__ = [ |
||||
# Core classes |
||||
'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group', |
||||
'CommandCollection', 'Parameter', 'Option', 'Argument', |
||||
|
||||
# Globals |
||||
'get_current_context', |
||||
|
||||
# Decorators |
||||
'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group', |
||||
'argument', 'option', 'confirmation_option', 'password_option', |
||||
'version_option', 'help_option', |
||||
|
||||
# Types |
||||
'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', 'STRING', |
||||
'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED', |
||||
|
||||
# Utilities |
||||
'echo', 'get_binary_stream', 'get_text_stream', 'open_file', |
||||
'format_filename', 'get_app_dir', 'get_os_args', |
||||
|
||||
# Terminal functions |
||||
'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager', |
||||
'progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch', |
||||
'getchar', 'pause', |
||||
|
||||
# Exceptions |
||||
'ClickException', 'UsageError', 'BadParameter', 'FileError', |
||||
'Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage', |
||||
'MissingParameter', |
||||
|
||||
# Formatting |
||||
'HelpFormatter', 'wrap_text', |
||||
|
||||
# Parsing |
||||
'OptionParser', |
||||
] |
||||
|
||||
|
||||
# Controls if click should emit the warning about the use of unicode |
||||
# literals. |
||||
disable_unicode_literals_warning = False |
||||
|
||||
|
||||
__version__ = '6.7' |
@ -0,0 +1,83 @@ |
||||
import os |
||||
import re |
||||
from .utils import echo |
||||
from .parser import split_arg_string |
||||
from .core import MultiCommand, Option |
||||
|
||||
|
||||
COMPLETION_SCRIPT = ''' |
||||
%(complete_func)s() { |
||||
COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ |
||||
COMP_CWORD=$COMP_CWORD \\ |
||||
%(autocomplete_var)s=complete $1 ) ) |
||||
return 0 |
||||
} |
||||
|
||||
complete -F %(complete_func)s -o default %(script_names)s |
||||
''' |
||||
|
||||
_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]') |
||||
|
||||
|
||||
def get_completion_script(prog_name, complete_var): |
||||
cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_')) |
||||
return (COMPLETION_SCRIPT % { |
||||
'complete_func': '_%s_completion' % cf_name, |
||||
'script_names': prog_name, |
||||
'autocomplete_var': complete_var, |
||||
}).strip() + ';' |
||||
|
||||
|
||||
def resolve_ctx(cli, prog_name, args): |
||||
ctx = cli.make_context(prog_name, args, resilient_parsing=True) |
||||
while ctx.protected_args + ctx.args and isinstance(ctx.command, MultiCommand): |
||||
a = ctx.protected_args + ctx.args |
||||
cmd = ctx.command.get_command(ctx, a[0]) |
||||
if cmd is None: |
||||
return None |
||||
ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True) |
||||
return ctx |
||||
|
||||
|
||||
def get_choices(cli, prog_name, args, incomplete): |
||||
ctx = resolve_ctx(cli, prog_name, args) |
||||
if ctx is None: |
||||
return |
||||
|
||||
choices = [] |
||||
if incomplete and not incomplete[:1].isalnum(): |
||||
for param in ctx.command.params: |
||||
if not isinstance(param, Option): |
||||
continue |
||||
choices.extend(param.opts) |
||||
choices.extend(param.secondary_opts) |
||||
elif isinstance(ctx.command, MultiCommand): |
||||
choices.extend(ctx.command.list_commands(ctx)) |
||||
|
||||
for item in choices: |
||||
if item.startswith(incomplete): |
||||
yield item |
||||
|
||||
|
||||
def do_complete(cli, prog_name): |
||||
cwords = split_arg_string(os.environ['COMP_WORDS']) |
||||
cword = int(os.environ['COMP_CWORD']) |
||||
args = cwords[1:cword] |
||||
try: |
||||
incomplete = cwords[cword] |
||||
except IndexError: |
||||
incomplete = '' |
||||
|
||||
for item in get_choices(cli, prog_name, args, incomplete): |
||||
echo(item) |
||||
|
||||
return True |
||||
|
||||
|
||||
def bashcomplete(cli, prog_name, complete_var, complete_instr): |
||||
if complete_instr == 'source': |
||||
echo(get_completion_script(prog_name, complete_var)) |
||||
return True |
||||
elif complete_instr == 'complete': |
||||
return do_complete(cli, prog_name) |
||||
return False |
@ -0,0 +1,648 @@ |
||||
import re |
||||
import io |
||||
import os |
||||
import sys |
||||
import codecs |
||||
from weakref import WeakKeyDictionary |
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2 |
||||
WIN = sys.platform.startswith('win') |
||||
DEFAULT_COLUMNS = 80 |
||||
|
||||
|
||||
_ansi_re = re.compile('\033\[((?:\d|;)*)([a-zA-Z])') |
||||
|
||||
|
||||
def get_filesystem_encoding(): |
||||
return sys.getfilesystemencoding() or sys.getdefaultencoding() |
||||
|
||||
|
||||
def _make_text_stream(stream, encoding, errors): |
||||
if encoding is None: |
||||
encoding = get_best_encoding(stream) |
||||
if errors is None: |
||||
errors = 'replace' |
||||
return _NonClosingTextIOWrapper(stream, encoding, errors, |
||||
line_buffering=True) |
||||
|
||||
|
||||
def is_ascii_encoding(encoding): |
||||
"""Checks if a given encoding is ascii.""" |
||||
try: |
||||
return codecs.lookup(encoding).name == 'ascii' |
||||
except LookupError: |
||||
return False |
||||
|
||||
|
||||
def get_best_encoding(stream): |
||||
"""Returns the default stream encoding if not found.""" |
||||
rv = getattr(stream, 'encoding', None) or sys.getdefaultencoding() |
||||
if is_ascii_encoding(rv): |
||||
return 'utf-8' |
||||
return rv |
||||
|
||||
|
||||
class _NonClosingTextIOWrapper(io.TextIOWrapper): |
||||
|
||||
def __init__(self, stream, encoding, errors, **extra): |
||||
self._stream = stream = _FixupStream(stream) |
||||
io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra) |
||||
|
||||
# The io module is a place where the Python 3 text behavior |
||||
# was forced upon Python 2, so we need to unbreak |
||||
# it to look like Python 2. |
||||
if PY2: |
||||
def write(self, x): |
||||
if isinstance(x, str) or is_bytes(x): |
||||
try: |
||||
self.flush() |
||||
except Exception: |
||||
pass |
||||
return self.buffer.write(str(x)) |
||||
return io.TextIOWrapper.write(self, x) |
||||
|
||||
def writelines(self, lines): |
||||
for line in lines: |
||||
self.write(line) |
||||
|
||||
def __del__(self): |
||||
try: |
||||
self.detach() |
||||
except Exception: |
||||
pass |
||||
|
||||
def isatty(self): |
||||
# https://bitbucket.org/pypy/pypy/issue/1803 |
||||
return self._stream.isatty() |
||||
|
||||
|
||||
class _FixupStream(object): |
||||
"""The new io interface needs more from streams than streams |
||||
traditionally implement. As such, this fix-up code is necessary in |
||||
some circumstances. |
||||
""" |
||||
|
||||
def __init__(self, stream): |
||||
self._stream = stream |
||||
|
||||
def __getattr__(self, name): |
||||
return getattr(self._stream, name) |
||||
|
||||
def read1(self, size): |
||||
f = getattr(self._stream, 'read1', None) |
||||
if f is not None: |
||||
return f(size) |
||||
# We only dispatch to readline instead of read in Python 2 as we |
||||
# do not want cause problems with the different implementation |
||||
# of line buffering. |
||||
if PY2: |
||||
return self._stream.readline(size) |
||||
return self._stream.read(size) |
||||
|
||||
def readable(self): |
||||
x = getattr(self._stream, 'readable', None) |
||||
if x is not None: |
||||
return x() |
||||
try: |
||||
self._stream.read(0) |
||||
except Exception: |
||||
return False |
||||
return True |
||||
|
||||
def writable(self): |
||||
x = getattr(self._stream, 'writable', None) |
||||
if x is not None: |
||||
return x() |
||||
try: |
||||
self._stream.write('') |
||||
except Exception: |
||||
try: |
||||
self._stream.write(b'') |
||||
except Exception: |
||||
return False |
||||
return True |
||||
|
||||
def seekable(self): |
||||
x = getattr(self._stream, 'seekable', None) |
||||
if x is not None: |
||||
return x() |
||||
try: |
||||
self._stream.seek(self._stream.tell()) |
||||
except Exception: |
||||
return False |
||||
return True |
||||
|
||||
|
||||
if PY2: |
||||
text_type = unicode |
||||
bytes = str |
||||
raw_input = raw_input |
||||
string_types = (str, unicode) |
||||
iteritems = lambda x: x.iteritems() |
||||
range_type = xrange |
||||
|
||||
def is_bytes(x): |
||||
return isinstance(x, (buffer, bytearray)) |
||||
|
||||
_identifier_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$') |
||||
|
||||
# For Windows, we need to force stdout/stdin/stderr to binary if it's |
||||
# fetched for that. This obviously is not the most correct way to do |
||||
# it as it changes global state. Unfortunately, there does not seem to |
||||
# be a clear better way to do it as just reopening the file in binary |
||||
# mode does not change anything. |
||||
# |
||||
# An option would be to do what Python 3 does and to open the file as |
||||
# binary only, patch it back to the system, and then use a wrapper |
||||
# stream that converts newlines. It's not quite clear what's the |
||||
# correct option here. |
||||
# |
||||
# This code also lives in _winconsole for the fallback to the console |
||||
# emulation stream. |
||||
# |
||||
# There are also Windows environments where the `msvcrt` module is not |
||||
# available (which is why we use try-catch instead of the WIN variable |
||||
# here), such as the Google App Engine development server on Windows. In |
||||
# those cases there is just nothing we can do. |
||||
try: |
||||
import msvcrt |
||||
except ImportError: |
||||
set_binary_mode = lambda x: x |
||||
else: |
||||
def set_binary_mode(f): |
||||
try: |
||||
fileno = f.fileno() |
||||
except Exception: |
||||
pass |
||||
else: |
||||
msvcrt.setmode(fileno, os.O_BINARY) |
||||
return f |
||||
|
||||
def isidentifier(x): |
||||
return _identifier_re.search(x) is not None |
||||
|
||||
def get_binary_stdin(): |
||||
return set_binary_mode(sys.stdin) |
||||
|
||||
def get_binary_stdout(): |
||||
return set_binary_mode(sys.stdout) |
||||
|
||||
def get_binary_stderr(): |
||||
return set_binary_mode(sys.stderr) |
||||
|
||||
def get_text_stdin(encoding=None, errors=None): |
||||
rv = _get_windows_console_stream(sys.stdin, encoding, errors) |
||||
if rv is not None: |
||||
return rv |
||||
return _make_text_stream(sys.stdin, encoding, errors) |
||||
|
||||
def get_text_stdout(encoding=None, errors=None): |
||||
rv = _get_windows_console_stream(sys.stdout, encoding, errors) |
||||
if rv is not None: |
||||
return rv |
||||
return _make_text_stream(sys.stdout, encoding, errors) |
||||
|
||||
def get_text_stderr(encoding=None, errors=None): |
||||
rv = _get_windows_console_stream(sys.stderr, encoding, errors) |
||||
if rv is not None: |
||||
return rv |
||||
return _make_text_stream(sys.stderr, encoding, errors) |
||||
|
||||
def filename_to_ui(value): |
||||
if isinstance(value, bytes): |
||||
value = value.decode(get_filesystem_encoding(), 'replace') |
||||
return value |
||||
else: |
||||
import io |
||||
text_type = str |
||||
raw_input = input |
||||
string_types = (str,) |
||||
range_type = range |
||||
isidentifier = lambda x: x.isidentifier() |
||||
iteritems = lambda x: iter(x.items()) |
||||
|
||||
def is_bytes(x): |
||||
return isinstance(x, (bytes, memoryview, bytearray)) |
||||
|
||||
def _is_binary_reader(stream, default=False): |
||||
try: |
||||
return isinstance(stream.read(0), bytes) |
||||
except Exception: |
||||
return default |
||||
# This happens in some cases where the stream was already |
||||
# closed. In this case, we assume the default. |
||||
|
||||
def _is_binary_writer(stream, default=False): |
||||
try: |
||||
stream.write(b'') |
||||
except Exception: |
||||
try: |
||||
stream.write('') |
||||
return False |
||||
except Exception: |
||||
pass |
||||
return default |
||||
return True |
||||
|
||||
def _find_binary_reader(stream): |
||||
# We need to figure out if the given stream is already binary. |
||||
# This can happen because the official docs recommend detaching |
||||
# the streams to get binary streams. Some code might do this, so |
||||
# we need to deal with this case explicitly. |
||||
if _is_binary_reader(stream, False): |
||||
return stream |
||||
|
||||
buf = getattr(stream, 'buffer', None) |
||||
|
||||
# Same situation here; this time we assume that the buffer is |
||||
# actually binary in case it's closed. |
||||
if buf is not None and _is_binary_reader(buf, True): |
||||
return buf |
||||
|
||||
def _find_binary_writer(stream): |
||||
# We need to figure out if the given stream is already binary. |
||||
# This can happen because the official docs recommend detatching |
||||
# the streams to get binary streams. Some code might do this, so |
||||
# we need to deal with this case explicitly. |
||||
if _is_binary_writer(stream, False): |
||||
return stream |
||||
|
||||
buf = getattr(stream, 'buffer', None) |
||||
|
||||
# Same situation here; this time we assume that the buffer is |
||||
# actually binary in case it's closed. |
||||
if buf is not None and _is_binary_writer(buf, True): |
||||
return buf |
||||
|
||||
def _stream_is_misconfigured(stream): |
||||
"""A stream is misconfigured if its encoding is ASCII.""" |
||||
# If the stream does not have an encoding set, we assume it's set |
||||
# to ASCII. This appears to happen in certain unittest |
||||
# environments. It's not quite clear what the correct behavior is |
||||
# but this at least will force Click to recover somehow. |
||||
return is_ascii_encoding(getattr(stream, 'encoding', None) or 'ascii') |
||||
|
||||
def _is_compatible_text_stream(stream, encoding, errors): |
||||
stream_encoding = getattr(stream, 'encoding', None) |
||||
stream_errors = getattr(stream, 'errors', None) |
||||
|
||||
# Perfect match. |
||||
if stream_encoding == encoding and stream_errors == errors: |
||||
return True |
||||
|
||||
# Otherwise, it's only a compatible stream if we did not ask for |
||||
# an encoding. |
||||
if encoding is None: |
||||
return stream_encoding is not None |
||||
|
||||
return False |
||||
|
||||
def _force_correct_text_reader(text_reader, encoding, errors): |
||||
if _is_binary_reader(text_reader, False): |
||||
binary_reader = text_reader |
||||
else: |
||||
# If there is no target encoding set, we need to verify that the |
||||
# reader is not actually misconfigured. |
||||
if encoding is None and not _stream_is_misconfigured(text_reader): |
||||
return text_reader |
||||
|
||||
if _is_compatible_text_stream(text_reader, encoding, errors): |
||||
return text_reader |
||||
|
||||
# If the reader has no encoding, we try to find the underlying |
||||
# binary reader for it. If that fails because the environment is |
||||
# misconfigured, we silently go with the same reader because this |
||||
# is too common to happen. In that case, mojibake is better than |
||||
# exceptions. |
||||
binary_reader = _find_binary_reader(text_reader) |
||||
if binary_reader is None: |
||||
return text_reader |
||||
|
||||
# At this point, we default the errors to replace instead of strict |
||||
# because nobody handles those errors anyways and at this point |
||||
# we're so fundamentally fucked that nothing can repair it. |
||||
if errors is None: |
||||
errors = 'replace' |
||||
return _make_text_stream(binary_reader, encoding, errors) |
||||
|
||||
def _force_correct_text_writer(text_writer, encoding, errors): |
||||
if _is_binary_writer(text_writer, False): |
||||
binary_writer = text_writer |
||||
else: |
||||
# If there is no target encoding set, we need to verify that the |
||||
# writer is not actually misconfigured. |
||||
if encoding is None and not _stream_is_misconfigured(text_writer): |
||||
return text_writer |
||||
|
||||
if _is_compatible_text_stream(text_writer, encoding, errors): |
||||
return text_writer |
||||
|
||||
# If the writer has no encoding, we try to find the underlying |
||||
# binary writer for it. If that fails because the environment is |
||||
# misconfigured, we silently go with the same writer because this |
||||
# is too common to happen. In that case, mojibake is better than |
||||
# exceptions. |
||||
binary_writer = _find_binary_writer(text_writer) |
||||
if binary_writer is None: |
||||
return text_writer |
||||
|
||||
# At this point, we default the errors to replace instead of strict |
||||
# because nobody handles those errors anyways and at this point |
||||
# we're so fundamentally fucked that nothing can repair it. |
||||
if errors is None: |
||||
errors = 'replace' |
||||
return _make_text_stream(binary_writer, encoding, errors) |
||||
|
||||
def get_binary_stdin(): |
||||
reader = _find_binary_reader(sys.stdin) |
||||
if reader is None: |
||||
raise RuntimeError('Was not able to determine binary ' |
||||
'stream for sys.stdin.') |
||||
return reader |
||||
|
||||
def get_binary_stdout(): |
||||
writer = _find_binary_writer(sys.stdout) |
||||
if writer is None: |
||||
raise RuntimeError('Was not able to determine binary ' |
||||
'stream for sys.stdout.') |
||||
return writer |
||||
|
||||
def get_binary_stderr(): |
||||
writer = _find_binary_writer(sys.stderr) |
||||
if writer is None: |
||||
raise RuntimeError('Was not able to determine binary ' |
||||
'stream for sys.stderr.') |
||||
return writer |
||||
|
||||
def get_text_stdin(encoding=None, errors=None): |
||||
rv = _get_windows_console_stream(sys.stdin, encoding, errors) |
||||
if rv is not None: |
||||
return rv |
||||
return _force_correct_text_reader(sys.stdin, encoding, errors) |
||||
|
||||
def get_text_stdout(encoding=None, errors=None): |
||||
rv = _get_windows_console_stream(sys.stdout, encoding, errors) |
||||
if rv is not None: |
||||
return rv |
||||
return _force_correct_text_writer(sys.stdout, encoding, errors) |
||||
|
||||
def get_text_stderr(encoding=None, errors=None): |
||||
rv = _get_windows_console_stream(sys.stderr, encoding, errors) |
||||
if rv is not None: |
||||
return rv |
||||
return _force_correct_text_writer(sys.stderr, encoding, errors) |
||||
|
||||
def filename_to_ui(value): |
||||
if isinstance(value, bytes): |
||||
value = value.decode(get_filesystem_encoding(), 'replace') |
||||
else: |
||||
value = value.encode('utf-8', 'surrogateescape') \ |
||||
.decode('utf-8', 'replace') |
||||
return value |
||||
|
||||
|
||||
def get_streerror(e, default=None): |
||||
if hasattr(e, 'strerror'): |
||||
msg = e.strerror |
||||
else: |
||||
if default is not None: |
||||
msg = default |
||||
else: |
||||
msg = str(e) |
||||
if isinstance(msg, bytes): |
||||
msg = msg.decode('utf-8', 'replace') |
||||
return msg |
||||
|
||||
|
||||
def open_stream(filename, mode='r', encoding=None, errors='strict', |
||||
atomic=False): |
||||
# Standard streams first. These are simple because they don't need |
||||
# special handling for the atomic flag. It's entirely ignored. |
||||
if filename == '-': |
||||
if 'w' in mode: |
||||
if 'b' in mode: |
||||
return get_binary_stdout(), False |
||||
return get_text_stdout(encoding=encoding, errors=errors), False |
||||
if 'b' in mode: |
||||
return get_binary_stdin(), False |
||||
return get_text_stdin(encoding=encoding, errors=errors), False |
||||
|
||||
# Non-atomic writes directly go out through the regular open functions. |
||||
if not atomic: |
||||
if encoding is None: |
||||
return open(filename, mode), True |
||||
return io.open(filename, mode, encoding=encoding, errors=errors), True |
||||
|
||||
# Some usability stuff for atomic writes |
||||
if 'a' in mode: |
||||
raise ValueError( |
||||
'Appending to an existing file is not supported, because that ' |
||||
'would involve an expensive `copy`-operation to a temporary ' |
||||
'file. Open the file in normal `w`-mode and copy explicitly ' |
||||
'if that\'s what you\'re after.' |
||||
) |
||||
if 'x' in mode: |
||||
raise ValueError('Use the `overwrite`-parameter instead.') |
||||
if 'w' not in mode: |
||||
raise ValueError('Atomic writes only make sense with `w`-mode.') |
||||
|
||||
# Atomic writes are more complicated. They work by opening a file |
||||
# as a proxy in the same folder and then using the fdopen |
||||
# functionality to wrap it in a Python file. Then we wrap it in an |
||||
# atomic file that moves the file over on close. |
||||
import tempfile |
||||
fd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename), |
||||
prefix='.__atomic-write') |
||||
|
||||
if encoding is not None: |
||||
f = io.open(fd, mode, encoding=encoding, errors=errors) |
||||
else: |
||||
f = os.fdopen(fd, mode) |
||||
|
||||
return _AtomicFile(f, tmp_filename, filename), True |
||||
|
||||
|
||||
# Used in a destructor call, needs extra protection from interpreter cleanup. |
||||
if hasattr(os, 'replace'): |
||||
_replace = os.replace |
||||
_can_replace = True |
||||
else: |
||||
_replace = os.rename |
||||
_can_replace = not WIN |
||||
|
||||
|
||||
class _AtomicFile(object): |
||||
|
||||
def __init__(self, f, tmp_filename, real_filename): |
||||
self._f = f |
||||
self._tmp_filename = tmp_filename |
||||
self._real_filename = real_filename |
||||
self.closed = False |
||||
|
||||
@property |
||||
def name(self): |
||||
return self._real_filename |
||||
|
||||
def close(self, delete=False): |
||||
if self.closed: |
||||
return |
||||
self._f.close() |
||||
if not _can_replace: |
||||
try: |
||||
os.remove(self._real_filename) |
||||
except OSError: |
||||
pass |
||||
_replace(self._tmp_filename, self._real_filename) |
||||
self.closed = True |
||||
|
||||
def __getattr__(self, name): |
||||
return getattr(self._f, name) |
||||
|
||||
def __enter__(self): |
||||
return self |
||||
|
||||
def __exit__(self, exc_type, exc_value, tb): |
||||
self.close(delete=exc_type is not None) |
||||
|
||||
def __repr__(self): |
||||
return repr(self._f) |
||||
|
||||
|
||||
auto_wrap_for_ansi = None |
||||
colorama = None |
||||
get_winterm_size = None |
||||
|
||||
|
||||
def strip_ansi(value): |
||||
return _ansi_re.sub('', value) |
||||
|
||||
|
||||
def should_strip_ansi(stream=None, color=None): |
||||
if color is None: |
||||
if stream is None: |
||||
stream = sys.stdin |
||||
return not isatty(stream) |
||||
return not color |
||||
|
||||
|
||||
# If we're on Windows, we provide transparent integration through |
||||
# colorama. This will make ANSI colors through the echo function |
||||
# work automatically. |
||||
if WIN: |
||||
# Windows has a smaller terminal |
||||
DEFAULT_COLUMNS = 79 |
||||
|
||||
from ._winconsole import _get_windows_console_stream |
||||
|
||||
def _get_argv_encoding(): |
||||
import locale |
||||
return locale.getpreferredencoding() |
||||
|
||||
if PY2: |
||||
def raw_input(prompt=''): |
||||
sys.stderr.flush() |
||||
if prompt: |
||||
stdout = _default_text_stdout() |
||||
stdout.write(prompt) |
||||
stdin = _default_text_stdin() |
||||
return stdin.readline().rstrip('\r\n') |
||||
|
||||
try: |
||||
import colorama |
||||
except ImportError: |
||||
pass |
||||
else: |
||||
_ansi_stream_wrappers = WeakKeyDictionary() |
||||
|
||||
def auto_wrap_for_ansi(stream, color=None): |
||||
"""This function wraps a stream so that calls through colorama |
||||
are issued to the win32 console API to recolor on demand. It |
||||
also ensures to reset the colors if a write call is interrupted |
||||
to not destroy the console afterwards. |
||||
""" |
||||
try: |
||||
cached = _ansi_stream_wrappers.get(stream) |
||||
except Exception: |
||||
cached = None |
||||
if cached is not None: |
||||
return cached |
||||
strip = should_strip_ansi(stream, color) |
||||
ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) |
||||
rv = ansi_wrapper.stream |
||||
_write = rv.write |
||||
|
||||
def _safe_write(s): |
||||
try: |
||||
return _write(s) |
||||
except: |
||||
ansi_wrapper.reset_all() |
||||
raise |
||||
|
||||
rv.write = _safe_write |
||||
try: |
||||
_ansi_stream_wrappers[stream] = rv |
||||
except Exception: |
||||
pass |
||||
return rv |
||||
|
||||
def get_winterm_size(): |
||||
win = colorama.win32.GetConsoleScreenBufferInfo( |
||||
colorama.win32.STDOUT).srWindow |
||||
return win.Right - win.Left, win.Bottom - win.Top |
||||
else: |
||||
def _get_argv_encoding(): |
||||
return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding() |
||||
|
||||
_get_windows_console_stream = lambda *x: None |
||||
|
||||
|
||||
def term_len(x): |
||||
return len(strip_ansi(x)) |
||||
|
||||
|
||||
def isatty(stream): |
||||
try: |
||||
return stream.isatty() |
||||
except Exception: |
||||
return False |
||||
|
||||
|
||||
def _make_cached_stream_func(src_func, wrapper_func): |
||||
cache = WeakKeyDictionary() |
||||
def func(): |
||||
stream = src_func() |
||||
try: |
||||
rv = cache.get(stream) |
||||
except Exception: |
||||
rv = None |
||||
if rv is not None: |
||||
return rv |
||||
rv = wrapper_func() |
||||
try: |
||||
cache[stream] = rv |
||||
except Exception: |
||||
pass |
||||
return rv |
||||
return func |
||||
|
||||
|
||||
_default_text_stdin = _make_cached_stream_func( |
||||
lambda: sys.stdin, get_text_stdin) |
||||
_default_text_stdout = _make_cached_stream_func( |
||||
lambda: sys.stdout, get_text_stdout) |
||||
_default_text_stderr = _make_cached_stream_func( |
||||
lambda: sys.stderr, get_text_stderr) |
||||
|
||||
|
||||
binary_streams = { |
||||
'stdin': get_binary_stdin, |
||||
'stdout': get_binary_stdout, |
||||
'stderr': get_binary_stderr, |
||||
} |
||||
|
||||
text_streams = { |
||||
'stdin': get_text_stdin, |
||||
'stdout': get_text_stdout, |
||||
'stderr': get_text_stderr, |
||||
} |
@ -0,0 +1,547 @@ |
||||
""" |
||||
click._termui_impl |
||||
~~~~~~~~~~~~~~~~~~ |
||||
|
||||
This module contains implementations for the termui module. To keep the |
||||
import time of Click down, some infrequently used functionality is placed |
||||
in this module and only imported as needed. |
||||
|
||||
:copyright: (c) 2014 by Armin Ronacher. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
import os |
||||
import sys |
||||
import time |
||||
import math |
||||
from ._compat import _default_text_stdout, range_type, PY2, isatty, \ |
||||
open_stream, strip_ansi, term_len, get_best_encoding, WIN |
||||
from .utils import echo |
||||
from .exceptions import ClickException |
||||
|
||||
|
||||
if os.name == 'nt': |
||||
BEFORE_BAR = '\r' |
||||
AFTER_BAR = '\n' |
||||
else: |
||||
BEFORE_BAR = '\r\033[?25l' |
||||
AFTER_BAR = '\033[?25h\n' |
||||
|
||||
|
||||
def _length_hint(obj): |
||||
"""Returns the length hint of an object.""" |
||||
try: |
||||
return len(obj) |
||||
except (AttributeError, TypeError): |
||||
try: |
||||
get_hint = type(obj).__length_hint__ |
||||
except AttributeError: |
||||
return None |
||||
try: |
||||
hint = get_hint(obj) |
||||
except TypeError: |
||||
return None |
||||
if hint is NotImplemented or \ |
||||
not isinstance(hint, (int, long)) or \ |
||||
hint < 0: |
||||
return None |
||||
return hint |
||||
|
||||
|
||||
class ProgressBar(object): |
||||
|
||||
def __init__(self, iterable, length=None, fill_char='#', empty_char=' ', |
||||
bar_template='%(bar)s', info_sep=' ', show_eta=True, |
||||
show_percent=None, show_pos=False, item_show_func=None, |
||||
label=None, file=None, color=None, width=30): |
||||
self.fill_char = fill_char |
||||
self.empty_char = empty_char |
||||
self.bar_template = bar_template |
||||
self.info_sep = info_sep |
||||
self.show_eta = show_eta |
||||
self.show_percent = show_percent |
||||
self.show_pos = show_pos |
||||
self.item_show_func = item_show_func |
||||
self.label = label or '' |
||||
if file is None: |
||||
file = _default_text_stdout() |
||||
self.file = file |
||||
self.color = color |
||||
self.width = width |
||||
self.autowidth = width == 0 |
||||
|
||||
if length is None: |
||||
length = _length_hint(iterable) |
||||
if iterable is None: |
||||
if length is None: |
||||
raise TypeError('iterable or length is required') |
||||
iterable = range_type(length) |
||||
self.iter = iter(iterable) |
||||
self.length = length |
||||
self.length_known = length is not None |
||||
self.pos = 0 |
||||
self.avg = [] |
||||
self.start = self.last_eta = time.time() |
||||
self.eta_known = False |
||||
self.finished = False |
||||
self.max_width = None |
||||
self.entered = False |
||||
self.current_item = None |
||||
self.is_hidden = not isatty(self.file) |
||||
self._last_line = None |
||||
|
||||
def __enter__(self): |
||||
self.entered = True |
||||
self.render_progress() |
||||
return self |
||||
|
||||
def __exit__(self, exc_type, exc_value, tb): |
||||
self.render_finish() |
||||
|
||||
def __iter__(self): |
||||
if not self.entered: |
||||
raise RuntimeError('You need to use progress bars in a with block.') |
||||
self.render_progress() |
||||
return self |
||||
|
||||
def render_finish(self): |
||||
if self.is_hidden: |
||||
return |
||||
self.file.write(AFTER_BAR) |
||||
self.file.flush() |
||||
|
||||
@property |
||||
def pct(self): |
||||
if self.finished: |
||||
return 1.0 |
||||
return min(self.pos / (float(self.length) or 1), 1.0) |
||||
|
||||
@property |
||||
def time_per_iteration(self): |
||||
if not self.avg: |
||||
return 0.0 |
||||
return sum(self.avg) / float(len(self.avg)) |
||||
|
||||
@property |
||||
def eta(self): |
||||
if self.length_known and not self.finished: |
||||
return self.time_per_iteration * (self.length - self.pos) |
||||
return 0.0 |
||||
|
||||
def format_eta(self): |
||||
if self.eta_known: |
||||
t = self.eta + 1 |
||||
seconds = t % 60 |
||||
t /= 60 |
||||
minutes = t % 60 |
||||
t /= 60 |
||||
hours = t % 24 |
||||
t /= 24 |
||||
if t > 0: |
||||
days = t |
||||
return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds) |
||||
else: |
||||
return '%02d:%02d:%02d' % (hours, minutes, seconds) |
||||
return '' |
||||
|
||||
def format_pos(self): |
||||
pos = str(self.pos) |
||||
if self.length_known: |
||||
pos += '/%s' % self.length |
||||
return pos |
||||
|
||||
def format_pct(self): |
||||
return ('% 4d%%' % int(self.pct * 100))[1:] |
||||
|
||||
def format_progress_line(self): |
||||
show_percent = self.show_percent |
||||
|
||||
info_bits = [] |
||||
if self.length_known: |
||||
bar_length = int(self.pct * self.width) |
||||
bar = self.fill_char * bar_length |
||||
bar += self.empty_char * (self.width - bar_length) |
||||
if show_percent is None: |
||||
show_percent = not self.show_pos |
||||
else: |
||||
if self.finished: |
||||
bar = self.fill_char * self.width |
||||
else: |
||||
bar = list(self.empty_char * (self.width or 1)) |
||||
if self.time_per_iteration != 0: |
||||
bar[int((math.cos(self.pos * self.time_per_iteration) |
||||
/ 2.0 + 0.5) * self.width)] = self.fill_char |
||||
bar = ''.join(bar) |
||||
|
||||
if self.show_pos: |
||||
info_bits.append(self.format_pos()) |
||||
if show_percent: |
||||
info_bits.append(self.format_pct()) |
||||
if self.show_eta and self.eta_known and not self.finished: |
||||
info_bits.append(self.format_eta()) |
||||
if self.item_show_func is not None: |
||||
item_info = self.item_show_func(self.current_item) |
||||
if item_info is not None: |
||||
info_bits.append(item_info) |
||||
|
||||
return (self.bar_template % { |
||||
'label': self.label, |
||||
'bar': bar, |
||||
'info': self.info_sep.join(info_bits) |
||||
}).rstrip() |
||||
|
||||
def render_progress(self): |
||||
from .termui import get_terminal_size |
||||
nl = False |
||||
|
||||
if self.is_hidden: |
||||
buf = [self.label] |
||||
nl = True |
||||
else: |
||||
buf = [] |
||||
# Update width in case the terminal has been resized |
||||
if self.autowidth: |
||||
old_width = self.width |
||||
self.width = 0 |
||||
clutter_length = term_len(self.format_progress_line()) |
||||
new_width = max(0, get_terminal_size()[0] - clutter_length) |
||||
if new_width < old_width: |
||||
buf.append(BEFORE_BAR) |
||||
buf.append(' ' * self.max_width) |
||||
self.max_width = new_width |
||||
self.width = new_width |
||||
|
||||
clear_width = self.width |
||||
if self.max_width is not None: |
||||
clear_width = self.max_width |
||||
|
||||
buf.append(BEFORE_BAR) |
||||
line = self.format_progress_line() |
||||
line_len = term_len(line) |
||||
if self.max_width is None or self.max_width < line_len: |
||||
self.max_width = line_len |
||||
buf.append(line) |
||||
|
||||
buf.append(' ' * (clear_width - line_len)) |
||||
line = ''.join(buf) |
||||
|
||||
# Render the line only if it changed. |
||||
if line != self._last_line: |
||||
self._last_line = line |
||||
echo(line, file=self.file, color=self.color, nl=nl) |
||||
self.file.flush() |
||||
|
||||
def make_step(self, n_steps): |
||||
self.pos += n_steps |
||||
if self.length_known and self.pos >= self.length: |
||||
self.finished = True |
||||
|
||||
if (time.time() - self.last_eta) < 1.0: |
||||
return |
||||
|
||||
self.last_eta = time.time() |
||||
self.avg = self.avg[-6:] + [-(self.start - time.time()) / (self.pos)] |
||||
|
||||
self.eta_known = self.length_known |
||||
|
||||
def update(self, n_steps): |
||||
self.make_step(n_steps) |
||||
self.render_progress() |
||||
|
||||
def finish(self): |
||||
self.eta_known = 0 |
||||
self.current_item = None |
||||
self.finished = True |
||||
|
||||
def next(self): |
||||
if self.is_hidden: |
||||
return next(self.iter) |
||||
try: |
||||
rv = next(self.iter) |
||||
self.current_item = rv |
||||
except StopIteration: |
||||
self.finish() |
||||
self.render_progress() |
||||
raise StopIteration() |
||||
else: |
||||
self.update(1) |
||||
return rv |
||||
|
||||
if not PY2: |
||||
__next__ = next |
||||
del next |
||||
|
||||
|
||||
def pager(text, color=None): |
||||
"""Decide what method to use for paging through text.""" |
||||
stdout = _default_text_stdout() |
||||
if not isatty(sys.stdin) or not isatty(stdout): |
||||
return _nullpager(stdout, text, color) |
||||
pager_cmd = (os.environ.get('PAGER', None) or '').strip() |
||||
if pager_cmd: |
||||
if WIN: |
||||
return _tempfilepager(text, pager_cmd, color) |
||||
return _pipepager(text, pager_cmd, color) |
||||
if os.environ.get('TERM') in ('dumb', 'emacs'): |
||||
return _nullpager(stdout, text, color) |
||||
if WIN or sys.platform.startswith('os2'): |
||||
return _tempfilepager(text, 'more <', color) |
||||
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: |
||||
return _pipepager(text, 'less', color) |
||||
|
||||
import tempfile |
||||
fd, filename = tempfile.mkstemp() |
||||
os.close(fd) |
||||
try: |
||||
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0: |
||||
return _pipepager(text, 'more', color) |
||||
return _nullpager(stdout, text, color) |
||||
finally: |
||||
os.unlink(filename) |
||||
|
||||
|
||||
def _pipepager(text, cmd, color): |
||||
"""Page through text by feeding it to another program. Invoking a |
||||
pager through this might support colors. |
||||
""" |
||||
import subprocess |
||||
env = dict(os.environ) |
||||
|
||||
# If we're piping to less we might support colors under the |
||||
# condition that |
||||
cmd_detail = cmd.rsplit('/', 1)[-1].split() |
||||
if color is None and cmd_detail[0] == 'less': |
||||
less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:]) |
||||
if not less_flags: |
||||
env['LESS'] = '-R' |
||||
color = True |
||||
elif 'r' in less_flags or 'R' in less_flags: |
||||
color = True |
||||
|
||||
if not color: |
||||
text = strip_ansi(text) |
||||
|
||||
c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, |
||||
env=env) |
||||
encoding = get_best_encoding(c.stdin) |
||||
try: |
||||
c.stdin.write(text.encode(encoding, 'replace')) |
||||
c.stdin.close() |
||||
except (IOError, KeyboardInterrupt): |
||||
pass |
||||
|
||||
# Less doesn't respect ^C, but catches it for its own UI purposes (aborting |
||||
# search or other commands inside less). |
||||
# |
||||
# That means when the user hits ^C, the parent process (click) terminates, |
||||
# but less is still alive, paging the output and messing up the terminal. |
||||
# |
||||
# If the user wants to make the pager exit on ^C, they should set |
||||
# `LESS='-K'`. It's not our decision to make. |
||||
while True: |
||||
try: |
||||
c.wait() |
||||
except KeyboardInterrupt: |
||||
pass |
||||
else: |
||||
break |
||||
|
||||
|
||||
def _tempfilepager(text, cmd, color): |
||||
"""Page through text by invoking a program on a temporary file.""" |
||||
import tempfile |
||||
filename = tempfile.mktemp() |
||||
if not color: |
||||
text = strip_ansi(text) |
||||
encoding = get_best_encoding(sys.stdout) |
||||
with open_stream(filename, 'wb')[0] as f: |
||||
f.write(text.encode(encoding)) |
||||
try: |
||||
os.system(cmd + ' "' + filename + '"') |
||||
finally: |
||||
os.unlink(filename) |
||||
|
||||
|
||||
def _nullpager(stream, text, color): |
||||
"""Simply print unformatted text. This is the ultimate fallback.""" |
||||
if not color: |
||||
text = strip_ansi(text) |
||||
stream.write(text) |
||||
|
||||
|
||||
class Editor(object): |
||||
|
||||
def __init__(self, editor=None, env=None, require_save=True, |
||||
extension='.txt'): |
||||
self.editor = editor |
||||
self.env = env |
||||
self.require_save = require_save |
||||
self.extension = extension |
||||
|
||||
def get_editor(self): |
||||
if self.editor is not None: |
||||
return self.editor |
||||
for key in 'VISUAL', 'EDITOR': |
||||
rv = os.environ.get(key) |
||||
if rv: |
||||
return rv |
||||
if WIN: |
||||
return 'notepad' |
||||
for editor in 'vim', 'nano': |
||||
if os.system('which %s >/dev/null 2>&1' % editor) == 0: |
||||
return editor |
||||
return 'vi' |
||||
|
||||
def edit_file(self, filename): |
||||
import subprocess |
||||
editor = self.get_editor() |
||||
if self.env: |
||||
environ = os.environ.copy() |
||||
environ.update(self.env) |
||||
else: |
||||
environ = None |
||||
try: |
||||
c = subprocess.Popen('%s "%s"' % (editor, filename), |
||||
env=environ, shell=True) |
||||
exit_code = c.wait() |
||||
if exit_code != 0: |
||||
raise ClickException('%s: Editing failed!' % editor) |
||||
except OSError as e: |
||||
raise ClickException('%s: Editing failed: %s' % (editor, e)) |
||||
|
||||
def edit(self, text): |
||||
import tempfile |
||||
|
||||
text = text or '' |
||||
if text and not text.endswith('\n'): |
||||
text += '\n' |
||||
|
||||
fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension) |
||||
try: |
||||
if WIN: |
||||
encoding = 'utf-8-sig' |
||||
text = text.replace('\n', '\r\n') |
||||
else: |
||||
encoding = 'utf-8' |
||||
text = text.encode(encoding) |
||||
|
||||
f = os.fdopen(fd, 'wb') |
||||
f.write(text) |
||||
f.close() |
||||
timestamp = os.path.getmtime(name) |
||||
|
||||
self.edit_file(name) |
||||
|
||||
if self.require_save \ |
||||
and os.path.getmtime(name) == timestamp: |
||||
return None |
||||
|
||||
f = open(name, 'rb') |
||||
try: |
||||
rv = f.read() |
||||
finally: |
||||
f.close() |
||||
return rv.decode('utf-8-sig').replace('\r\n', '\n') |
||||
finally: |
||||
os.unlink(name) |
||||
|
||||
|
||||
def open_url(url, wait=False, locate=False): |
||||
import subprocess |
||||
|
||||
def _unquote_file(url): |
||||
try: |
||||
import urllib |
||||
except ImportError: |
||||
import urllib |
||||
if url.startswith('file://'): |
||||
url = urllib.unquote(url[7:]) |
||||
return url |
||||
|
||||
if sys.platform == 'darwin': |
||||
args = ['open'] |
||||
if wait: |
||||
args.append('-W') |
||||
if locate: |
||||
args.append('-R') |
||||
args.append(_unquote_file(url)) |
||||
null = open('/dev/null', 'w') |
||||
try: |
||||
return subprocess.Popen(args, stderr=null).wait() |
||||
finally: |
||||
null.close() |
||||
elif WIN: |
||||
if locate: |
||||
url = _unquote_file(url) |
||||
args = 'explorer /select,"%s"' % _unquote_file( |
||||
url.replace('"', '')) |
||||
else: |
||||
args = 'start %s "" "%s"' % ( |
||||
wait and '/WAIT' or '', url.replace('"', '')) |
||||
return os.system(args) |
||||
|
||||
try: |
||||
if locate: |
||||
url = os.path.dirname(_unquote_file(url)) or '.' |
||||
else: |
||||
url = _unquote_file(url) |
||||
c = subprocess.Popen(['xdg-open', url]) |
||||
if wait: |
||||
return c.wait() |
||||
return 0 |
||||
except OSError: |
||||
if url.startswith(('http://', 'https://')) and not locate and not wait: |
||||
import webbrowser |
||||
webbrowser.open(url) |
||||
return 0 |
||||
return 1 |
||||
|
||||
|
||||
def _translate_ch_to_exc(ch): |
||||
if ch == '\x03': |
||||
raise KeyboardInterrupt() |
||||
if ch == '\x04': |
||||
raise EOFError() |
||||
|
||||
|
||||
if WIN: |
||||
import msvcrt |
||||
|
||||
def getchar(echo): |
||||
rv = msvcrt.getch() |
||||
if echo: |
||||
msvcrt.putchar(rv) |
||||
_translate_ch_to_exc(rv) |
||||
if PY2: |
||||
enc = getattr(sys.stdin, 'encoding', None) |
||||
if enc is not None: |
||||
rv = rv.decode(enc, 'replace') |
||||
else: |
||||
rv = rv.decode('cp1252', 'replace') |
||||
return rv |
||||
else: |
||||
import tty |
||||
import termios |
||||
|
||||
def getchar(echo): |
||||
if not isatty(sys.stdin): |
||||
f = open('/dev/tty') |
||||
fd = f.fileno() |
||||
else: |
||||
fd = sys.stdin.fileno() |
||||
f = None |
||||
try: |
||||
old_settings = termios.tcgetattr(fd) |
||||
try: |
||||
tty.setraw(fd) |
||||
ch = os.read(fd, 32) |
||||
if echo and isatty(sys.stdout): |
||||
sys.stdout.write(ch) |
||||
finally: |
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) |
||||
sys.stdout.flush() |
||||
if f is not None: |
||||
f.close() |
||||
except termios.error: |
||||
pass |
||||
_translate_ch_to_exc(ch) |
||||
return ch.decode(get_best_encoding(sys.stdin), 'replace') |
@ -0,0 +1,38 @@ |
||||
import textwrap |
||||
from contextlib import contextmanager |
||||
|
||||
|
||||
class TextWrapper(textwrap.TextWrapper): |
||||
|
||||
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): |
||||
space_left = max(width - cur_len, 1) |
||||
|
||||
if self.break_long_words: |
||||
last = reversed_chunks[-1] |
||||
cut = last[:space_left] |
||||
res = last[space_left:] |
||||
cur_line.append(cut) |
||||
reversed_chunks[-1] = res |
||||
elif not cur_line: |
||||
cur_line.append(reversed_chunks.pop()) |
||||
|
||||
@contextmanager |
||||
def extra_indent(self, indent): |
||||
old_initial_indent = self.initial_indent |
||||
old_subsequent_indent = self.subsequent_indent |
||||
self.initial_indent += indent |
||||
self.subsequent_indent += indent |
||||
try: |
||||
yield |
||||
finally: |
||||
self.initial_indent = old_initial_indent |
||||
self.subsequent_indent = old_subsequent_indent |
||||
|
||||
def indent_only(self, text): |
||||
rv = [] |
||||
for idx, line in enumerate(text.splitlines()): |
||||
indent = self.initial_indent |
||||
if idx > 0: |
||||
indent = self.subsequent_indent |
||||
rv.append(indent + line) |
||||
return '\n'.join(rv) |
@ -0,0 +1,118 @@ |
||||
import os |
||||
import sys |
||||
import codecs |
||||
|
||||
from ._compat import PY2 |
||||
|
||||
|
||||
# If someone wants to vendor click, we want to ensure the |
||||
# correct package is discovered. Ideally we could use a |
||||
# relative import here but unfortunately Python does not |
||||
# support that. |
||||
click = sys.modules[__name__.rsplit('.', 1)[0]] |
||||
|
||||
|
||||
def _find_unicode_literals_frame(): |
||||
import __future__ |
||||
frm = sys._getframe(1) |
||||
idx = 1 |
||||
while frm is not None: |
||||
if frm.f_globals.get('__name__', '').startswith('click.'): |
||||
frm = frm.f_back |
||||
idx += 1 |
||||
elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: |
||||
return idx |
||||
else: |
||||
break |
||||
return 0 |
||||
|
||||
|
||||
def _check_for_unicode_literals(): |
||||
if not __debug__: |
||||
return |
||||
if not PY2 or click.disable_unicode_literals_warning: |
||||
return |
||||
bad_frame = _find_unicode_literals_frame() |
||||
if bad_frame <= 0: |
||||
return |
||||
from warnings import warn |
||||
warn(Warning('Click detected the use of the unicode_literals ' |
||||
'__future__ import. This is heavily discouraged ' |
||||
'because it can introduce subtle bugs in your ' |
||||
'code. You should instead use explicit u"" literals ' |
||||
'for your unicode strings. For more information see ' |
||||
'http://click.pocoo.org/python3/'), |
||||
stacklevel=bad_frame) |
||||
|
||||
|
||||
def _verify_python3_env(): |
||||
"""Ensures that the environment is good for unicode on Python 3.""" |
||||
if PY2: |
||||
return |
||||
try: |
||||
import locale |
||||
fs_enc = codecs.lookup(locale.getpreferredencoding()).name |
||||
except Exception: |
||||
fs_enc = 'ascii' |
||||
if fs_enc != 'ascii': |
||||
return |
||||
|
||||
extra = '' |
||||
if os.name == 'posix': |
||||
import subprocess |
||||
rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, |
||||
stderr=subprocess.PIPE).communicate()[0] |
||||
good_locales = set() |
||||
has_c_utf8 = False |
||||
|
||||
# Make sure we're operating on text here. |
||||
if isinstance(rv, bytes): |
||||
rv = rv.decode('ascii', 'replace') |
||||
|
||||
for line in rv.splitlines(): |
||||
locale = line.strip() |
||||
if locale.lower().endswith(('.utf-8', '.utf8')): |
||||
good_locales.add(locale) |
||||
if locale.lower() in ('c.utf8', 'c.utf-8'): |
||||
has_c_utf8 = True |
||||
|
||||
extra += '\n\n' |
||||
if not good_locales: |
||||
extra += ( |
||||
'Additional information: on this system no suitable UTF-8\n' |
||||
'locales were discovered. This most likely requires resolving\n' |
||||
'by reconfiguring the locale system.' |
||||
) |
||||
elif has_c_utf8: |
||||
extra += ( |
||||
'This system supports the C.UTF-8 locale which is recommended.\n' |
||||
'You might be able to resolve your issue by exporting the\n' |
||||
'following environment variables:\n\n' |
||||
' export LC_ALL=C.UTF-8\n' |
||||
' export LANG=C.UTF-8' |
||||
) |
||||
else: |
||||
extra += ( |
||||
'This system lists a couple of UTF-8 supporting locales that\n' |
||||
'you can pick from. The following suitable locales where\n' |
||||
'discovered: %s' |
||||
) % ', '.join(sorted(good_locales)) |
||||
|
||||
bad_locale = None |
||||
for locale in os.environ.get('LC_ALL'), os.environ.get('LANG'): |
||||
if locale and locale.lower().endswith(('.utf-8', '.utf8')): |
||||
bad_locale = locale |
||||
if locale is not None: |
||||
break |
||||
if bad_locale is not None: |
||||
extra += ( |
||||
'\n\nClick discovered that you exported a UTF-8 locale\n' |
||||
'but the locale system could not pick up from it because\n' |
||||
'it does not exist. The exported locale is "%s" but it\n' |
||||
'is not supported' |
||||
) % bad_locale |
||||
|
||||
raise RuntimeError('Click will abort further execution because Python 3 ' |
||||
'was configured to use ASCII as encoding for the ' |
||||
'environment. Consult http://click.pocoo.org/python3/' |
||||
'for mitigation steps.' + extra) |
@ -0,0 +1,273 @@ |
||||
# -*- coding: utf-8 -*- |
||||
# This module is based on the excellent work by Adam Bartoš who |
||||
# provided a lot of what went into the implementation here in |
||||
# the discussion to issue1602 in the Python bug tracker. |
||||
# |
||||
# There are some general differences in regards to how this works |
||||
# compared to the original patches as we do not need to patch |
||||
# the entire interpreter but just work in our little world of |
||||
# echo and prmopt. |
||||
|
||||
import io |
||||
import os |
||||
import sys |
||||
import zlib |
||||
import time |
||||
import ctypes |
||||
import msvcrt |
||||
from click._compat import _NonClosingTextIOWrapper, text_type, PY2 |
||||
from ctypes import byref, POINTER, c_int, c_char, c_char_p, \ |
||||
c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPE |
||||
try: |
||||
from ctypes import pythonapi |
||||
PyObject_GetBuffer = pythonapi.PyObject_GetBuffer |
||||
PyBuffer_Release = pythonapi.PyBuffer_Release |
||||
except ImportError: |
||||
pythonapi = None |
||||
from ctypes.wintypes import LPWSTR, LPCWSTR |
||||
|
||||
|
||||
c_ssize_p = POINTER(c_ssize_t) |
||||
|
||||
kernel32 = windll.kernel32 |
||||
GetStdHandle = kernel32.GetStdHandle |
||||
ReadConsoleW = kernel32.ReadConsoleW |
||||
WriteConsoleW = kernel32.WriteConsoleW |
||||
GetLastError = kernel32.GetLastError |
||||
GetCommandLineW = WINFUNCTYPE(LPWSTR)( |
||||
('GetCommandLineW', windll.kernel32)) |
||||
CommandLineToArgvW = WINFUNCTYPE( |
||||
POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( |
||||
('CommandLineToArgvW', windll.shell32)) |
||||
|
||||
|
||||
STDIN_HANDLE = GetStdHandle(-10) |
||||
STDOUT_HANDLE = GetStdHandle(-11) |
||||
STDERR_HANDLE = GetStdHandle(-12) |
||||
|
||||
|
||||
PyBUF_SIMPLE = 0 |
||||
PyBUF_WRITABLE = 1 |
||||
|
||||
ERROR_SUCCESS = 0 |
||||
ERROR_NOT_ENOUGH_MEMORY = 8 |
||||
ERROR_OPERATION_ABORTED = 995 |
||||
|
||||
STDIN_FILENO = 0 |
||||
STDOUT_FILENO = 1 |
||||
STDERR_FILENO = 2 |
||||
|
||||
EOF = b'\x1a' |
||||
MAX_BYTES_WRITTEN = 32767 |
||||
|
||||
|
||||
class Py_buffer(ctypes.Structure): |
||||
_fields_ = [ |
||||
('buf', c_void_p), |
||||
('obj', py_object), |
||||
('len', c_ssize_t), |
||||
('itemsize', c_ssize_t), |
||||
('readonly', c_int), |
||||
('ndim', c_int), |
||||
('format', c_char_p), |
||||
('shape', c_ssize_p), |
||||
('strides', c_ssize_p), |
||||
('suboffsets', c_ssize_p), |
||||
('internal', c_void_p) |
||||
] |
||||
|
||||
if PY2: |
||||
_fields_.insert(-1, ('smalltable', c_ssize_t * 2)) |
||||
|
||||
|
||||
# On PyPy we cannot get buffers so our ability to operate here is |
||||
# serverly limited. |
||||
if pythonapi is None: |
||||
get_buffer = None |
||||
else: |
||||
def get_buffer(obj, writable=False): |
||||
buf = Py_buffer() |
||||
flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE |
||||
PyObject_GetBuffer(py_object(obj), byref(buf), flags) |
||||
try: |
||||
buffer_type = c_char * buf.len |
||||
return buffer_type.from_address(buf.buf) |
||||
finally: |
||||
PyBuffer_Release(byref(buf)) |
||||
|
||||
|
||||
class _WindowsConsoleRawIOBase(io.RawIOBase): |
||||
|
||||
def __init__(self, handle): |
||||
self.handle = handle |
||||
|
||||
def isatty(self): |
||||
io.RawIOBase.isatty(self) |
||||
return True |
||||
|
||||
|
||||
class _WindowsConsoleReader(_WindowsConsoleRawIOBase): |
||||
|
||||
def readable(self): |
||||
return True |
||||
|
||||
def readinto(self, b): |
||||
bytes_to_be_read = len(b) |
||||
if not bytes_to_be_read: |
||||
return 0 |
||||
elif bytes_to_be_read % 2: |
||||
raise ValueError('cannot read odd number of bytes from ' |
||||
'UTF-16-LE encoded console') |
||||
|
||||
buffer = get_buffer(b, writable=True) |
||||
code_units_to_be_read = bytes_to_be_read // 2 |
||||
code_units_read = c_ulong() |
||||
|
||||
rv = ReadConsoleW(self.handle, buffer, code_units_to_be_read, |
||||
byref(code_units_read), None) |
||||
if GetLastError() == ERROR_OPERATION_ABORTED: |
||||
# wait for KeyboardInterrupt |
||||
time.sleep(0.1) |
||||
if not rv: |
||||
raise OSError('Windows error: %s' % GetLastError()) |
||||
|
||||
if buffer[0] == EOF: |
||||
return 0 |
||||
return 2 * code_units_read.value |
||||
|
||||
|
||||
class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): |
||||
|
||||
def writable(self): |
||||
return True |
||||
|
||||
@staticmethod |
||||
def _get_error_message(errno): |
||||
if errno == ERROR_SUCCESS: |
||||
return 'ERROR_SUCCESS' |
||||
elif errno == ERROR_NOT_ENOUGH_MEMORY: |
||||
return 'ERROR_NOT_ENOUGH_MEMORY' |
||||
return 'Windows error %s' % errno |
||||
|
||||
def write(self, b): |
||||
bytes_to_be_written = len(b) |
||||
buf = get_buffer(b) |
||||
code_units_to_be_written = min(bytes_to_be_written, |
||||
MAX_BYTES_WRITTEN) // 2 |
||||
code_units_written = c_ulong() |
||||
|
||||
WriteConsoleW(self.handle, buf, code_units_to_be_written, |
||||
byref(code_units_written), None) |
||||
bytes_written = 2 * code_units_written.value |
||||
|
||||
if bytes_written == 0 and bytes_to_be_written > 0: |
||||
raise OSError(self._get_error_message(GetLastError())) |
||||
return bytes_written |
||||
|
||||
|
||||
class ConsoleStream(object): |
||||
|
||||
def __init__(self, text_stream, byte_stream): |
||||
self._text_stream = text_stream |
||||
self.buffer = byte_stream |
||||
|
||||
@property |
||||
def name(self): |
||||
return self.buffer.name |
||||
|
||||
def write(self, x): |
||||
if isinstance(x, text_type): |
||||
return self._text_stream.write(x) |
||||
try: |
||||
self.flush() |
||||
except Exception: |
||||
pass |
||||
return self.buffer.write(x) |
||||
|
||||
def writelines(self, lines): |
||||
for line in lines: |
||||
self.write(line) |
||||
|
||||
def __getattr__(self, name): |
||||
return getattr(self._text_stream, name) |
||||
|
||||
def isatty(self): |
||||
return self.buffer.isatty() |
||||
|
||||
def __repr__(self): |
||||
return '<ConsoleStream name=%r encoding=%r>' % ( |
||||
self.name, |
||||
self.encoding, |
||||
) |
||||
|
||||
|
||||
def _get_text_stdin(buffer_stream): |
||||
text_stream = _NonClosingTextIOWrapper( |
||||
io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), |
||||
'utf-16-le', 'strict', line_buffering=True) |
||||
return ConsoleStream(text_stream, buffer_stream) |
||||
|
||||
|
||||
def _get_text_stdout(buffer_stream): |
||||
text_stream = _NonClosingTextIOWrapper( |
||||
_WindowsConsoleWriter(STDOUT_HANDLE), |
||||
'utf-16-le', 'strict', line_buffering=True) |
||||
return ConsoleStream(text_stream, buffer_stream) |
||||
|
||||
|
||||
def _get_text_stderr(buffer_stream): |
||||
text_stream = _NonClosingTextIOWrapper( |
||||
_WindowsConsoleWriter(STDERR_HANDLE), |
||||
'utf-16-le', 'strict', line_buffering=True) |
||||
return ConsoleStream(text_stream, buffer_stream) |
||||
|
||||
|
||||
if PY2: |
||||
def _hash_py_argv(): |
||||
return zlib.crc32('\x00'.join(sys.argv[1:])) |
||||
|
||||
_initial_argv_hash = _hash_py_argv() |
||||
|
||||
def _get_windows_argv(): |
||||
argc = c_int(0) |
||||
argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) |
||||
argv = [argv_unicode[i] for i in range(0, argc.value)] |
||||
|
||||
if not hasattr(sys, 'frozen'): |
||||
argv = argv[1:] |
||||
while len(argv) > 0: |
||||
arg = argv[0] |
||||
if not arg.startswith('-') or arg == '-': |
||||
break |
||||
argv = argv[1:] |
||||
if arg.startswith(('-c', '-m')): |
||||
break |
||||
|
||||
return argv[1:] |
||||
|
||||
|
||||
_stream_factories = { |
||||
0: _get_text_stdin, |
||||
1: _get_text_stdout, |
||||
2: _get_text_stderr, |
||||
} |
||||
|
||||
|
||||
def _get_windows_console_stream(f, encoding, errors): |
||||
if get_buffer is not None and \ |
||||
encoding in ('utf-16-le', None) \ |
||||
and errors in ('strict', None) and \ |
||||
hasattr(f, 'isatty') and f.isatty(): |
||||
func = _stream_factories.get(f.fileno()) |
||||
if func is not None: |
||||
if not PY2: |
||||
f = getattr(f, 'buffer') |
||||
if f is None: |
||||
return None |
||||
else: |
||||
# If we are on Python 2 we need to set the stream that we |
||||
# deal with to binary mode as otherwise the exercise if a |
||||
# bit moot. The same problems apply as for |
||||
# get_binary_stdin and friends from _compat. |
||||
msvcrt.setmode(f.fileno(), os.O_BINARY) |
||||
return func(f) |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,304 @@ |
||||
import sys |
||||
import inspect |
||||
|
||||
from functools import update_wrapper |
||||
|
||||
from ._compat import iteritems |
||||
from ._unicodefun import _check_for_unicode_literals |
||||
from .utils import echo |
||||
from .globals import get_current_context |
||||
|
||||
|
||||
def pass_context(f): |
||||
"""Marks a callback as wanting to receive the current context |
||||
object as first argument. |
||||
""" |
||||
def new_func(*args, **kwargs): |
||||
return f(get_current_context(), *args, **kwargs) |
||||
return update_wrapper(new_func, f) |
||||
|
||||
|
||||
def pass_obj(f): |
||||
"""Similar to :func:`pass_context`, but only pass the object on the |
||||
context onwards (:attr:`Context.obj`). This is useful if that object |
||||
represents the state of a nested system. |
||||
""" |
||||
def new_func(*args, **kwargs): |
||||
return f(get_current_context().obj, *args, **kwargs) |
||||
return update_wrapper(new_func, f) |
||||
|
||||
|
||||
def make_pass_decorator(object_type, ensure=False): |
||||
"""Given an object type this creates a decorator that will work |
||||
similar to :func:`pass_obj` but instead of passing the object of the |
||||
current context, it will find the innermost context of type |
||||
:func:`object_type`. |
||||
|
||||
This generates a decorator that works roughly like this:: |
||||
|
||||
from functools import update_wrapper |
||||
|
||||
def decorator(f): |
||||
@pass_context |
||||
def new_func(ctx, *args, **kwargs): |
||||
obj = ctx.find_object(object_type) |
||||
return ctx.invoke(f, obj, *args, **kwargs) |
||||
return update_wrapper(new_func, f) |
||||
return decorator |
||||
|
||||
:param object_type: the type of the object to pass. |
||||
:param ensure: if set to `True`, a new object will be created and |
||||
remembered on the context if it's not there yet. |
||||
""" |
||||
def decorator(f): |
||||
def new_func(*args, **kwargs): |
||||
ctx = get_current_context() |
||||
if ensure: |
||||
obj = ctx.ensure_object(object_type) |
||||
else: |
||||
obj = ctx.find_object(object_type) |
||||
if obj is None: |
||||
raise RuntimeError('Managed to invoke callback without a ' |
||||
'context object of type %r existing' |
||||
% object_type.__name__) |
||||
return ctx.invoke(f, obj, *args[1:], **kwargs) |
||||
return update_wrapper(new_func, f) |
||||
return decorator |
||||
|
||||
|
||||
def _make_command(f, name, attrs, cls): |
||||
if isinstance(f, Command): |
||||
raise TypeError('Attempted to convert a callback into a ' |
||||
'command twice.') |
||||
try: |
||||
params = f.__click_params__ |
||||
params.reverse() |
||||
del f.__click_params__ |
||||
except AttributeError: |
||||
params = [] |
||||
help = attrs.get('help') |
||||
if help is None: |
||||
help = inspect.getdoc(f) |
||||
if isinstance(help, bytes): |
||||
help = help.decode('utf-8') |
||||
else: |
||||
help = inspect.cleandoc(help) |
||||
attrs['help'] = help |
||||
_check_for_unicode_literals() |
||||
return cls(name=name or f.__name__.lower(), |
||||
callback=f, params=params, **attrs) |
||||
|
||||
|
||||
def command(name=None, cls=None, **attrs): |
||||
"""Creates a new :class:`Command` and uses the decorated function as |
||||
callback. This will also automatically attach all decorated |
||||
:func:`option`\s and :func:`argument`\s as parameters to the command. |
||||
|
||||
The name of the command defaults to the name of the function. If you |
||||
want to change that, you can pass the intended name as the first |
||||
argument. |
||||
|
||||
All keyword arguments are forwarded to the underlying command class. |
||||
|
||||
Once decorated the function turns into a :class:`Command` instance |
||||
that can be invoked as a command line utility or be attached to a |
||||
command :class:`Group`. |
||||
|
||||
:param name: the name of the command. This defaults to the function |
||||
name. |
||||
:param cls: the command class to instantiate. This defaults to |
||||
:class:`Command`. |
||||
""" |
||||
if cls is None: |
||||
cls = Command |
||||
def decorator(f): |
||||
cmd = _make_command(f, name, attrs, cls) |
||||
cmd.__doc__ = f.__doc__ |
||||
return cmd |
||||
return decorator |
||||
|
||||
|
||||
def group(name=None, **attrs): |
||||
"""Creates a new :class:`Group` with a function as callback. This |
||||
works otherwise the same as :func:`command` just that the `cls` |
||||
parameter is set to :class:`Group`. |
||||
""" |
||||
attrs.setdefault('cls', Group) |
||||
return command(name, **attrs) |
||||
|
||||
|
||||
def _param_memo(f, param): |
||||
if isinstance(f, Command): |
||||
f.params.append(param) |
||||
else: |
||||
if not hasattr(f, '__click_params__'): |
||||
f.__click_params__ = [] |
||||
f.__click_params__.append(param) |
||||
|
||||
|
||||
def argument(*param_decls, **attrs): |
||||
"""Attaches an argument to the command. All positional arguments are |
||||
passed as parameter declarations to :class:`Argument`; all keyword |
||||
arguments are forwarded unchanged (except ``cls``). |
||||
This is equivalent to creating an :class:`Argument` instance manually |
||||
and attaching it to the :attr:`Command.params` list. |
||||
|
||||
:param cls: the argument class to instantiate. This defaults to |
||||
:class:`Argument`. |
||||
""" |
||||
def decorator(f): |
||||
ArgumentClass = attrs.pop('cls', Argument) |
||||
_param_memo(f, ArgumentClass(param_decls, **attrs)) |
||||
return f |
||||
return decorator |
||||
|
||||
|
||||
def option(*param_decls, **attrs): |
||||
"""Attaches an option to the command. All positional arguments are |
||||
passed as parameter declarations to :class:`Option`; all keyword |
||||
arguments are forwarded unchanged (except ``cls``). |
||||
This is equivalent to creating an :class:`Option` instance manually |
||||
and attaching it to the :attr:`Command.params` list. |
||||
|
||||
:param cls: the option class to instantiate. This defaults to |
||||
:class:`Option`. |
||||
""" |
||||
def decorator(f): |
||||
if 'help' in attrs: |
||||
attrs['help'] = inspect.cleandoc(attrs['help']) |
||||
OptionClass = attrs.pop('cls', Option) |
||||
_param_memo(f, OptionClass(param_decls, **attrs)) |
||||
return f |
||||
return decorator |
||||
|
||||
|
||||
def confirmation_option(*param_decls, **attrs): |
||||
"""Shortcut for confirmation prompts that can be ignored by passing |
||||
``--yes`` as parameter. |
||||
|
||||
This is equivalent to decorating a function with :func:`option` with |
||||
the following parameters:: |
||||
|
||||
def callback(ctx, param, value): |
||||
if not value: |
||||
ctx.abort() |
||||
|
||||
@click.command() |
||||
@click.option('--yes', is_flag=True, callback=callback, |
||||
expose_value=False, prompt='Do you want to continue?') |
||||
def dropdb(): |
||||
pass |
||||
""" |
||||
def decorator(f): |
||||
def callback(ctx, param, value): |
||||
if not value: |
||||
ctx.abort() |
||||
attrs.setdefault('is_flag', True) |
||||
attrs.setdefault('callback', callback) |
||||
attrs.setdefault('expose_value', False) |
||||
attrs.setdefault('prompt', 'Do you want to continue?') |
||||
attrs.setdefault('help', 'Confirm the action without prompting.') |
||||
return option(*(param_decls or ('--yes',)), **attrs)(f) |
||||
return decorator |
||||
|
||||
|
||||
def password_option(*param_decls, **attrs): |
||||
"""Shortcut for password prompts. |
||||
|
||||
This is equivalent to decorating a function with :func:`option` with |
||||
the following parameters:: |
||||
|
||||
@click.command() |
||||
@click.option('--password', prompt=True, confirmation_prompt=True, |
||||
hide_input=True) |
||||
def changeadmin(password): |
||||
pass |
||||
""" |
||||
def decorator(f): |
||||
attrs.setdefault('prompt', True) |
||||
attrs.setdefault('confirmation_prompt', True) |
||||
attrs.setdefault('hide_input', True) |
||||
return option(*(param_decls or ('--password',)), **attrs)(f) |
||||
return decorator |
||||
|
||||
|
||||
def version_option(version=None, *param_decls, **attrs): |
||||
"""Adds a ``--version`` option which immediately ends the program |
||||
printing out the version number. This is implemented as an eager |
||||
option that prints the version and exits the program in the callback. |
||||
|
||||
:param version: the version number to show. If not provided Click |
||||
attempts an auto discovery via setuptools. |
||||
:param prog_name: the name of the program (defaults to autodetection) |
||||
:param message: custom message to show instead of the default |
||||
(``'%(prog)s, version %(version)s'``) |
||||
:param others: everything else is forwarded to :func:`option`. |
||||
""" |
||||
if version is None: |
||||
module = sys._getframe(1).f_globals.get('__name__') |
||||
def decorator(f): |
||||
prog_name = attrs.pop('prog_name', None) |
||||
message = attrs.pop('message', '%(prog)s, version %(version)s') |
||||
|
||||
def callback(ctx, param, value): |
||||
if not value or ctx.resilient_parsing: |
||||
return |
||||
prog = prog_name |
||||
if prog is None: |
||||
prog = ctx.find_root().info_name |
||||
ver = version |
||||
if ver is None: |
||||
try: |
||||
import pkg_resources |
||||
except ImportError: |
||||
pass |
||||
else: |
||||
for dist in pkg_resources.working_set: |
||||
scripts = dist.get_entry_map().get('console_scripts') or {} |
||||
for script_name, entry_point in iteritems(scripts): |
||||
if entry_point.module_name == module: |
||||
ver = dist.version |
||||
break |
||||
if ver is None: |
||||
raise RuntimeError('Could not determine version') |
||||
echo(message % { |
||||
'prog': prog, |
||||
'version': ver, |
||||
}, color=ctx.color) |
||||
ctx.exit() |
||||
|
||||
attrs.setdefault('is_flag', True) |
||||
attrs.setdefault('expose_value', False) |
||||
attrs.setdefault('is_eager', True) |
||||
attrs.setdefault('help', 'Show the version and exit.') |
||||
attrs['callback'] = callback |
||||
return option(*(param_decls or ('--version',)), **attrs)(f) |
||||
return decorator |
||||
|
||||
|
||||
def help_option(*param_decls, **attrs): |
||||
"""Adds a ``--help`` option which immediately ends the program |
||||
printing out the help page. This is usually unnecessary to add as |
||||
this is added by default to all commands unless suppressed. |
||||
|
||||
Like :func:`version_option`, this is implemented as eager option that |
||||
prints in the callback and exits. |
||||
|
||||
All arguments are forwarded to :func:`option`. |
||||
""" |
||||
def decorator(f): |
||||
def callback(ctx, param, value): |
||||
if value and not ctx.resilient_parsing: |
||||
echo(ctx.get_help(), color=ctx.color) |
||||
ctx.exit() |
||||
attrs.setdefault('is_flag', True) |
||||
attrs.setdefault('expose_value', False) |
||||
attrs.setdefault('help', 'Show this message and exit.') |
||||
attrs.setdefault('is_eager', True) |
||||
attrs['callback'] = callback |
||||
return option(*(param_decls or ('--help',)), **attrs)(f) |
||||
return decorator |
||||
|
||||
|
||||
# Circular dependencies between core and decorators |
||||
from .core import Command, Group, Argument, Option |
@ -0,0 +1,201 @@ |
||||
from ._compat import PY2, filename_to_ui, get_text_stderr |
||||
from .utils import echo |
||||
|
||||
|
||||
class ClickException(Exception): |
||||
"""An exception that Click can handle and show to the user.""" |
||||
|
||||
#: The exit code for this exception |
||||
exit_code = 1 |
||||
|
||||
def __init__(self, message): |
||||
if PY2: |
||||
if message is not None: |
||||
message = message.encode('utf-8') |
||||
Exception.__init__(self, message) |
||||
self.message = message |
||||
|
||||
def format_message(self): |
||||
return self.message |
||||
|
||||
def show(self, file=None): |
||||
if file is None: |
||||
file = get_text_stderr() |
||||
echo('Error: %s' % self.format_message(), file=file) |
||||
|
||||
|
||||
class UsageError(ClickException): |
||||
"""An internal exception that signals a usage error. This typically |
||||
aborts any further handling. |
||||
|
||||
:param message: the error message to display. |
||||
:param ctx: optionally the context that caused this error. Click will |
||||
fill in the context automatically in some situations. |
||||
""" |
||||
exit_code = 2 |
||||
|
||||
def __init__(self, message, ctx=None): |
||||
ClickException.__init__(self, message) |
||||
self.ctx = ctx |
||||
|
||||
def show(self, file=None): |
||||
if file is None: |
||||
file = get_text_stderr() |
||||
color = None |
||||
if self.ctx is not None: |
||||
color = self.ctx.color |
||||
echo(self.ctx.get_usage() + '\n', file=file, color=color) |
||||
echo('Error: %s' % self.format_message(), file=file, color=color) |
||||
|
||||
|
||||
class BadParameter(UsageError): |
||||
"""An exception that formats out a standardized error message for a |
||||
bad parameter. This is useful when thrown from a callback or type as |
||||
Click will attach contextual information to it (for instance, which |
||||
parameter it is). |
||||
|
||||
.. versionadded:: 2.0 |
||||
|
||||
:param param: the parameter object that caused this error. This can |
||||
be left out, and Click will attach this info itself |
||||
if possible. |
||||
:param param_hint: a string that shows up as parameter name. This |
||||
can be used as alternative to `param` in cases |
||||
where custom validation should happen. If it is |
||||
a string it's used as such, if it's a list then |
||||
each item is quoted and separated. |
||||
""" |
||||
|
||||
def __init__(self, message, ctx=None, param=None, |
||||
param_hint=None): |
||||
UsageError.__init__(self, message, ctx) |
||||
self.param = param |
||||
self.param_hint = param_hint |
||||
|
||||
def format_message(self): |
||||
if self.param_hint is not None: |
||||
param_hint = self.param_hint |
||||
elif self.param is not None: |
||||
param_hint = self.param.opts or [self.param.human_readable_name] |
||||
else: |
||||
return 'Invalid value: %s' % self.message |
||||
if isinstance(param_hint, (tuple, list)): |
||||
param_hint = ' / '.join('"%s"' % x for x in param_hint) |
||||
return 'Invalid value for %s: %s' % (param_hint, self.message) |
||||
|
||||
|
||||
class MissingParameter(BadParameter): |
||||
"""Raised if click required an option or argument but it was not |
||||
provided when invoking the script. |
||||
|
||||
.. versionadded:: 4.0 |
||||
|
||||
:param param_type: a string that indicates the type of the parameter. |
||||
The default is to inherit the parameter type from |
||||
the given `param`. Valid values are ``'parameter'``, |
||||
``'option'`` or ``'argument'``. |
||||
""" |
||||
|
||||
def __init__(self, message=None, ctx=None, param=None, |
||||
param_hint=None, param_type=None): |
||||
BadParameter.__init__(self, message, ctx, param, param_hint) |
||||
self.param_type = param_type |
||||
|
||||
def format_message(self): |
||||
if self.param_hint is not None: |
||||
param_hint = self.param_hint |
||||
elif self.param is not None: |
||||
param_hint = self.param.opts or [self.param.human_readable_name] |
||||
else: |
||||
param_hint = None |
||||
if isinstance(param_hint, (tuple, list)): |
||||
param_hint = ' / '.join('"%s"' % x for x in param_hint) |
||||
|
||||
param_type = self.param_type |
||||
if param_type is None and self.param is not None: |
||||
param_type = self.param.param_type_name |
||||
|
||||
msg = self.message |
||||
if self.param is not None: |
||||
msg_extra = self.param.type.get_missing_message(self.param) |
||||
if msg_extra: |
||||
if msg: |
||||
msg += '. ' + msg_extra |
||||
else: |
||||
msg = msg_extra |
||||
|
||||
return 'Missing %s%s%s%s' % ( |
||||
param_type, |
||||
param_hint and ' %s' % param_hint or '', |
||||
msg and '. ' or '.', |
||||
msg or '', |
||||
) |
||||
|
||||
|
||||
class NoSuchOption(UsageError): |
||||
"""Raised if click attempted to handle an option that does not |
||||
exist. |
||||
|
||||
.. versionadded:: 4.0 |
||||
""" |
||||
|
||||
def __init__(self, option_name, message=None, possibilities=None, |
||||
ctx=None): |
||||
if message is None: |
||||
message = 'no such option: %s' % option_name |
||||
UsageError.__init__(self, message, ctx) |
||||
self.option_name = option_name |
||||
self.possibilities = possibilities |
||||
|
||||
def format_message(self): |
||||
bits = [self.message] |
||||
if self.possibilities: |
||||
if len(self.possibilities) == 1: |
||||
bits.append('Did you mean %s?' % self.possibilities[0]) |
||||
else: |
||||
possibilities = sorted(self.possibilities) |
||||
bits.append('(Possible options: %s)' % ', '.join(possibilities)) |
||||
return ' '.join(bits) |
||||
|
||||
|
||||
class BadOptionUsage(UsageError): |
||||
"""Raised if an option is generally supplied but the use of the option |
||||
was incorrect. This is for instance raised if the number of arguments |
||||
for an option is not correct. |
||||
|
||||
.. versionadded:: 4.0 |
||||
""" |
||||
|
||||
def __init__(self, message, ctx=None): |
||||
UsageError.__init__(self, message, ctx) |
||||
|
||||
|
||||
class BadArgumentUsage(UsageError): |
||||
"""Raised if an argument is generally supplied but the use of the argument |
||||
was incorrect. This is for instance raised if the number of values |
||||
for an argument is not correct. |
||||
|
||||
.. versionadded:: 6.0 |
||||
""" |
||||
|
||||
def __init__(self, message, ctx=None): |
||||
UsageError.__init__(self, message, ctx) |
||||
|
||||
|
||||
class FileError(ClickException): |
||||
"""Raised if a file cannot be opened.""" |
||||
|
||||
def __init__(self, filename, hint=None): |
||||
ui_filename = filename_to_ui(filename) |
||||
if hint is None: |
||||
hint = 'unknown error' |
||||
ClickException.__init__(self, hint) |
||||
self.ui_filename = ui_filename |
||||
self.filename = filename |
||||
|
||||
def format_message(self): |
||||
return 'Could not open file %s: %s' % (self.ui_filename, self.message) |
||||
|
||||
|
||||
class Abort(RuntimeError): |
||||
"""An internal signalling exception that signals Click to abort.""" |
@ -0,0 +1,256 @@ |
||||
from contextlib import contextmanager |
||||
from .termui import get_terminal_size |
||||
from .parser import split_opt |
||||
from ._compat import term_len |
||||
|
||||
|
||||
# Can force a width. This is used by the test system |
||||
FORCED_WIDTH = None |
||||
|
||||
|
||||
def measure_table(rows): |
||||
widths = {} |
||||
for row in rows: |
||||
for idx, col in enumerate(row): |
||||
widths[idx] = max(widths.get(idx, 0), term_len(col)) |
||||
return tuple(y for x, y in sorted(widths.items())) |
||||
|
||||
|
||||
def iter_rows(rows, col_count): |
||||
for row in rows: |
||||
row = tuple(row) |
||||
yield row + ('',) * (col_count - len(row)) |
||||
|
||||
|
||||
def wrap_text(text, width=78, initial_indent='', subsequent_indent='', |
||||
preserve_paragraphs=False): |
||||
"""A helper function that intelligently wraps text. By default, it |
||||
assumes that it operates on a single paragraph of text but if the |
||||
`preserve_paragraphs` parameter is provided it will intelligently |
||||
handle paragraphs (defined by two empty lines). |
||||
|
||||
If paragraphs are handled, a paragraph can be prefixed with an empty |
||||
line containing the ``\\b`` character (``\\x08``) to indicate that |
||||
no rewrapping should happen in that block. |
||||
|
||||
:param text: the text that should be rewrapped. |
||||
:param width: the maximum width for the text. |
||||
:param initial_indent: the initial indent that should be placed on the |
||||
first line as a string. |
||||
:param subsequent_indent: the indent string that should be placed on |
||||
each consecutive line. |
||||
:param preserve_paragraphs: if this flag is set then the wrapping will |
||||
intelligently handle paragraphs. |
||||
""" |
||||
from ._textwrap import TextWrapper |
||||
text = text.expandtabs() |
||||
wrapper = TextWrapper(width, initial_indent=initial_indent, |
||||
subsequent_indent=subsequent_indent, |
||||
replace_whitespace=False) |
||||
if not preserve_paragraphs: |
||||
return wrapper.fill(text) |
||||
|
||||
p = [] |
||||
buf = [] |
||||
indent = None |
||||
|
||||
def _flush_par(): |
||||
if not buf: |
||||
return |
||||
if buf[0].strip() == '\b': |
||||
p.append((indent or 0, True, '\n'.join(buf[1:]))) |
||||
else: |
||||
p.append((indent or 0, False, ' '.join(buf))) |
||||
del buf[:] |
||||
|
||||
for line in text.splitlines(): |
||||
if not line: |
||||
_flush_par() |
||||
indent = None |
||||
else: |
||||
if indent is None: |
||||
orig_len = term_len(line) |
||||
line = line.lstrip() |
||||
indent = orig_len - term_len(line) |
||||
buf.append(line) |
||||
_flush_par() |
||||
|
||||
rv = [] |
||||
for indent, raw, text in p: |
||||
with wrapper.extra_indent(' ' * indent): |
||||
if raw: |
||||
rv.append(wrapper.indent_only(text)) |
||||
else: |
||||
rv.append(wrapper.fill(text)) |
||||
|
||||
return '\n\n'.join(rv) |
||||
|
||||
|
||||
class HelpFormatter(object): |
||||
"""This class helps with formatting text-based help pages. It's |
||||
usually just needed for very special internal cases, but it's also |
||||
exposed so that developers can write their own fancy outputs. |
||||
|
||||
At present, it always writes into memory. |
||||
|
||||
:param indent_increment: the additional increment for each level. |
||||
:param width: the width for the text. This defaults to the terminal |
||||
width clamped to a maximum of 78. |
||||
""" |
||||
|
||||
def __init__(self, indent_increment=2, width=None, max_width=None): |
||||
self.indent_increment = indent_increment |
||||
if max_width is None: |
||||
max_width = 80 |
||||
if width is None: |
||||
width = FORCED_WIDTH |
||||
if width is None: |
||||
width = max(min(get_terminal_size()[0], max_width) - 2, 50) |
||||
self.width = width |
||||
self.current_indent = 0 |
||||
self.buffer = [] |
||||
|
||||
def write(self, string): |
||||
"""Writes a unicode string into the internal buffer.""" |
||||
self.buffer.append(string) |
||||
|
||||
def indent(self): |
||||
"""Increases the indentation.""" |
||||
self.current_indent += self.indent_increment |
||||
|
||||
def dedent(self): |
||||
"""Decreases the indentation.""" |
||||
self.current_indent -= self.indent_increment |
||||
|
||||
def write_usage(self, prog, args='', prefix='Usage: '): |
||||
"""Writes a usage line into the buffer. |
||||
|
||||
:param prog: the program name. |
||||
:param args: whitespace separated list of arguments. |
||||
:param prefix: the prefix for the first line. |
||||
""" |
||||
usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog) |
||||
text_width = self.width - self.current_indent |
||||
|
||||
if text_width >= (term_len(usage_prefix) + 20): |
||||
# The arguments will fit to the right of the prefix. |
||||
indent = ' ' * term_len(usage_prefix) |
||||
self.write(wrap_text(args, text_width, |
||||
initial_indent=usage_prefix, |
||||
subsequent_indent=indent)) |
||||
else: |
||||
# The prefix is too long, put the arguments on the next line. |
||||
self.write(usage_prefix) |
||||
self.write('\n') |
||||
indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4) |
||||
self.write(wrap_text(args, text_width, |
||||
initial_indent=indent, |
||||
subsequent_indent=indent)) |
||||
|
||||
self.write('\n') |
||||
|
||||
def write_heading(self, heading): |
||||
"""Writes a heading into the buffer.""" |
||||
self.write('%*s%s:\n' % (self.current_indent, '', heading)) |
||||
|
||||
def write_paragraph(self): |
||||
"""Writes a paragraph into the buffer.""" |
||||
if self.buffer: |
||||
self.write('\n') |
||||
|
||||
def write_text(self, text): |
||||
"""Writes re-indented text into the buffer. This rewraps and |
||||
preserves paragraphs. |
||||
""" |
||||
text_width = max(self.width - self.current_indent, 11) |
||||
indent = ' ' * self.current_indent |
||||
self.write(wrap_text(text, text_width, |
||||
initial_indent=indent, |
||||
subsequent_indent=indent, |
||||
preserve_paragraphs=True)) |
||||
self.write('\n') |
||||
|
||||
def write_dl(self, rows, col_max=30, col_spacing=2): |
||||
"""Writes a definition list into the buffer. This is how options |
||||
and commands are usually formatted. |
||||
|
||||
:param rows: a list of two item tuples for the terms and values. |
||||
:param col_max: the maximum width of the first column. |
||||
:param col_spacing: the number of spaces between the first and |
||||
second column. |
||||
""" |
||||
rows = list(rows) |
||||
widths = measure_table(rows) |
||||
if len(widths) != 2: |
||||
raise TypeError('Expected two columns for definition list') |
||||
|
||||
first_col = min(widths[0], col_max) + col_spacing |
||||
|
||||
for first, second in iter_rows(rows, len(widths)): |
||||
self.write('%*s%s' % (self.current_indent, '', first)) |
||||
if not second: |
||||
self.write('\n') |
||||
continue |
||||
if term_len(first) <= first_col - col_spacing: |
||||
self.write(' ' * (first_col - term_len(first))) |
||||
else: |
||||
self.write('\n') |
||||
self.write(' ' * (first_col + self.current_indent)) |
||||
|
||||
text_width = max(self.width - first_col - 2, 10) |
||||
lines = iter(wrap_text(second, text_width).splitlines()) |
||||
if lines: |
||||
self.write(next(lines) + '\n') |
||||
for line in lines: |
||||
self.write('%*s%s\n' % ( |
||||
first_col + self.current_indent, '', line)) |
||||
else: |
||||
self.write('\n') |
||||
|
||||
@contextmanager |
||||
def section(self, name): |
||||
"""Helpful context manager that writes a paragraph, a heading, |
||||
and the indents. |
||||
|
||||
:param name: the section name that is written as heading. |
||||
""" |
||||
self.write_paragraph() |
||||
self.write_heading(name) |
||||
self.indent() |
||||
try: |
||||
yield |
||||
finally: |
||||
self.dedent() |
||||
|
||||
@contextmanager |
||||
def indentation(self): |
||||
"""A context manager that increases the indentation.""" |
||||
self.indent() |
||||
try: |
||||
yield |
||||
finally: |
||||
self.dedent() |
||||
|
||||
def getvalue(self): |
||||
"""Returns the buffer contents.""" |
||||
return ''.join(self.buffer) |
||||
|
||||
|
||||
def join_options(options): |
||||
"""Given a list of option strings this joins them in the most appropriate |
||||
way and returns them in the form ``(formatted_string, |
||||
any_prefix_is_slash)`` where the second item in the tuple is a flag that |
||||
indicates if any of the option prefixes was a slash. |
||||
""" |
||||
rv = [] |
||||
any_prefix_is_slash = False |
||||
for opt in options: |
||||
prefix = split_opt(opt)[0] |
||||
if prefix == '/': |
||||
any_prefix_is_slash = True |
||||
rv.append((len(prefix), opt)) |
||||
|
||||
rv.sort(key=lambda x: x[0]) |
||||
|
||||
rv = ', '.join(x[1] for x in rv) |
||||
return rv, any_prefix_is_slash |
@ -0,0 +1,48 @@ |
||||
from threading import local |
||||
|
||||
|
||||
_local = local() |
||||
|
||||
|
||||
def get_current_context(silent=False): |
||||
"""Returns the current click context. This can be used as a way to |
||||
access the current context object from anywhere. This is a more implicit |
||||
alternative to the :func:`pass_context` decorator. This function is |
||||
primarily useful for helpers such as :func:`echo` which might be |
||||
interested in changing it's behavior based on the current context. |
||||
|
||||
To push the current context, :meth:`Context.scope` can be used. |
||||
|
||||
.. versionadded:: 5.0 |
||||
|
||||
:param silent: is set to `True` the return value is `None` if no context |
||||
is available. The default behavior is to raise a |
||||
:exc:`RuntimeError`. |
||||
""" |
||||
try: |
||||
return getattr(_local, 'stack')[-1] |
||||
except (AttributeError, IndexError): |
||||
if not silent: |
||||
raise RuntimeError('There is no active click context.') |
||||
|
||||
|
||||
def push_context(ctx): |
||||
"""Pushes a new context to the current stack.""" |
||||
_local.__dict__.setdefault('stack', []).append(ctx) |
||||
|
||||
|
||||
def pop_context(): |
||||
"""Removes the top level from the stack.""" |
||||
_local.stack.pop() |
||||
|
||||
|
||||
def resolve_color_default(color=None): |
||||
""""Internal helper to get the default value of the color flag. If a |
||||
value is passed it's returned unchanged, otherwise it's looked up from |
||||
the current context. |
||||
""" |
||||
if color is not None: |
||||
return color |
||||
ctx = get_current_context(silent=True) |
||||
if ctx is not None: |
||||
return ctx.color |
@ -0,0 +1,426 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
click.parser |
||||
~~~~~~~~~~~~ |
||||
|
||||
This module started out as largely a copy paste from the stdlib's |
||||
optparse module with the features removed that we do not need from |
||||
optparse because we implement them in Click on a higher level (for |
||||
instance type handling, help formatting and a lot more). |
||||
|
||||
The plan is to remove more and more from here over time. |
||||
|
||||
The reason this is a different module and not optparse from the stdlib |
||||
is that there are differences in 2.x and 3.x about the error messages |
||||
generated and optparse in the stdlib uses gettext for no good reason |
||||
and might cause us issues. |
||||
""" |
||||
import re |
||||
from collections import deque |
||||
from .exceptions import UsageError, NoSuchOption, BadOptionUsage, \ |
||||
BadArgumentUsage |
||||
|
||||
|
||||
def _unpack_args(args, nargs_spec): |
||||
"""Given an iterable of arguments and an iterable of nargs specifications, |
||||
it returns a tuple with all the unpacked arguments at the first index |
||||
and all remaining arguments as the second. |
||||
|
||||
The nargs specification is the number of arguments that should be consumed |
||||
or `-1` to indicate that this position should eat up all the remainders. |
||||
|
||||
Missing items are filled with `None`. |
||||
""" |
||||
args = deque(args) |
||||
nargs_spec = deque(nargs_spec) |
||||
rv = [] |
||||
spos = None |
||||
|
||||
def _fetch(c): |
||||
try: |
||||
if spos is None: |
||||
return c.popleft() |
||||
else: |
||||
return c.pop() |
||||
except IndexError: |
||||
return None |
||||
|
||||
while nargs_spec: |
||||
nargs = _fetch(nargs_spec) |
||||
if nargs == 1: |
||||
rv.append(_fetch(args)) |
||||
elif nargs > 1: |
||||
x = [_fetch(args) for _ in range(nargs)] |
||||
# If we're reversed, we're pulling in the arguments in reverse, |
||||
# so we need to turn them around. |
||||
if spos is not None: |
||||
x.reverse() |
||||
rv.append(tuple(x)) |
||||
elif nargs < 0: |
||||
if spos is not None: |
||||
raise TypeError('Cannot have two nargs < 0') |
||||
spos = len(rv) |
||||
rv.append(None) |
||||
|
||||
# spos is the position of the wildcard (star). If it's not `None`, |
||||
# we fill it with the remainder. |
||||
if spos is not None: |
||||
rv[spos] = tuple(args) |
||||
args = [] |
||||
rv[spos + 1:] = reversed(rv[spos + 1:]) |
||||
|
||||
return tuple(rv), list(args) |
||||
|
||||
|
||||
def _error_opt_args(nargs, opt): |
||||
if nargs == 1: |
||||
raise BadOptionUsage('%s option requires an argument' % opt) |
||||
raise BadOptionUsage('%s option requires %d arguments' % (opt, nargs)) |
||||
|
||||
|
||||
def split_opt(opt): |
||||
first = opt[:1] |
||||
if first.isalnum(): |
||||
return '', opt |
||||
if opt[1:2] == first: |
||||
return opt[:2], opt[2:] |
||||
return first, opt[1:] |
||||
|
||||
|
||||
def normalize_opt(opt, ctx): |
||||
if ctx is None or ctx.token_normalize_func is None: |
||||
return opt |
||||
prefix, opt = split_opt(opt) |
||||
return prefix + ctx.token_normalize_func(opt) |
||||
|
||||
|
||||
def split_arg_string(string): |
||||
"""Given an argument string this attempts to split it into small parts.""" |
||||
rv = [] |
||||
for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'" |
||||
r'|"([^"\\]*(?:\\.[^"\\]*)*)"' |
||||
r'|\S+)\s*', string, re.S): |
||||
arg = match.group().strip() |
||||
if arg[:1] == arg[-1:] and arg[:1] in '"\'': |
||||
arg = arg[1:-1].encode('ascii', 'backslashreplace') \ |
||||
.decode('unicode-escape') |
||||
try: |
||||
arg = type(string)(arg) |
||||
except UnicodeError: |
||||
pass |
||||
rv.append(arg) |
||||
return rv |
||||
|
||||
|
||||
class Option(object): |
||||
|
||||
def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None): |
||||
self._short_opts = [] |
||||
self._long_opts = [] |
||||
self.prefixes = set() |
||||
|
||||
for opt in opts: |
||||
prefix, value = split_opt(opt) |
||||
if not prefix: |
||||
raise ValueError('Invalid start character for option (%s)' |
||||
% opt) |
||||
self.prefixes.add(prefix[0]) |
||||
if len(prefix) == 1 and len(value) == 1: |
||||
self._short_opts.append(opt) |
||||
else: |
||||
self._long_opts.append(opt) |
||||
self.prefixes.add(prefix) |
||||
|
||||
if action is None: |
||||
action = 'store' |
||||
|
||||
self.dest = dest |
||||
self.action = action |
||||
self.nargs = nargs |
||||
self.const = const |
||||
self.obj = obj |
||||
|
||||
@property |
||||
def takes_value(self): |
||||
return self.action in ('store', 'append') |
||||
|
||||
def process(self, value, state): |
||||
if self.action == 'store': |
||||
state.opts[self.dest] = value |
||||
elif self.action == 'store_const': |
||||
state.opts[self.dest] = self.const |
||||
elif self.action == 'append': |
||||
state.opts.setdefault(self.dest, []).append(value) |
||||
elif self.action == 'append_const': |
||||
state.opts.setdefault(self.dest, []).append(self.const) |
||||
elif self.action == 'count': |
||||
state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 |
||||
else: |
||||
raise ValueError('unknown action %r' % self.action) |
||||
state.order.append(self.obj) |
||||
|
||||
|
||||
class Argument(object): |
||||
|
||||
def __init__(self, dest, nargs=1, obj=None): |
||||
self.dest = dest |
||||
self.nargs = nargs |
||||
self.obj = obj |
||||
|
||||
def process(self, value, state): |
||||
if self.nargs > 1: |
||||
holes = sum(1 for x in value if x is None) |
||||
if holes == len(value): |
||||
value = None |
||||
elif holes != 0: |
||||
raise BadArgumentUsage('argument %s takes %d values' |
||||
% (self.dest, self.nargs)) |
||||
state.opts[self.dest] = value |
||||
state.order.append(self.obj) |
||||
|
||||
|
||||
class ParsingState(object): |
||||
|
||||
def __init__(self, rargs): |
||||
self.opts = {} |
||||
self.largs = [] |
||||
self.rargs = rargs |
||||
self.order = [] |
||||
|
||||
|
||||
class OptionParser(object): |
||||
"""The option parser is an internal class that is ultimately used to |
||||
parse options and arguments. It's modelled after optparse and brings |
||||
a similar but vastly simplified API. It should generally not be used |
||||
directly as the high level Click classes wrap it for you. |
||||
|
||||
It's not nearly as extensible as optparse or argparse as it does not |
||||
implement features that are implemented on a higher level (such as |
||||
types or defaults). |
||||
|
||||
:param ctx: optionally the :class:`~click.Context` where this parser |
||||
should go with. |
||||
""" |
||||
|
||||
def __init__(self, ctx=None): |
||||
#: The :class:`~click.Context` for this parser. This might be |
||||
#: `None` for some advanced use cases. |
||||
self.ctx = ctx |
||||
#: This controls how the parser deals with interspersed arguments. |
||||
#: If this is set to `False`, the parser will stop on the first |
||||
#: non-option. Click uses this to implement nested subcommands |
||||
#: safely. |
||||
self.allow_interspersed_args = True |
||||
#: This tells the parser how to deal with unknown options. By |
||||
#: default it will error out (which is sensible), but there is a |
||||
#: second mode where it will ignore it and continue processing |
||||
#: after shifting all the unknown options into the resulting args. |
||||
self.ignore_unknown_options = False |
||||
if ctx is not None: |
||||
self.allow_interspersed_args = ctx.allow_interspersed_args |
||||
self.ignore_unknown_options = ctx.ignore_unknown_options |
||||
self._short_opt = {} |
||||
self._long_opt = {} |
||||
self._opt_prefixes = set(['-', '--']) |
||||
self._args = [] |
||||
|
||||
def add_option(self, opts, dest, action=None, nargs=1, const=None, |
||||
obj=None): |
||||
"""Adds a new option named `dest` to the parser. The destination |
||||
is not inferred (unlike with optparse) and needs to be explicitly |
||||
provided. Action can be any of ``store``, ``store_const``, |
||||
``append``, ``appnd_const`` or ``count``. |
||||
|
||||
The `obj` can be used to identify the option in the order list |
||||
that is returned from the parser. |
||||
""" |
||||
if obj is None: |
||||
obj = dest |
||||
opts = [normalize_opt(opt, self.ctx) for opt in opts] |
||||
option = Option(opts, dest, action=action, nargs=nargs, |
||||
const=const, obj=obj) |
||||
self._opt_prefixes.update(option.prefixes) |
||||
for opt in option._short_opts: |
||||
self._short_opt[opt] = option |
||||
for opt in option._long_opts: |
||||
self._long_opt[opt] = option |
||||
|
||||
def add_argument(self, dest, nargs=1, obj=None): |
||||
"""Adds a positional argument named `dest` to the parser. |
||||
|
||||
The `obj` can be used to identify the option in the order list |
||||
that is returned from the parser. |
||||
""" |
||||
if obj is None: |
||||
obj = dest |
||||
self._args.append(Argument(dest=dest, nargs=nargs, obj=obj)) |
||||
|
||||
def parse_args(self, args): |
||||
"""Parses positional arguments and returns ``(values, args, order)`` |
||||
for the parsed options and arguments as well as the leftover |
||||
arguments if there are any. The order is a list of objects as they |
||||
appear on the command line. If arguments appear multiple times they |
||||
will be memorized multiple times as well. |
||||
""" |
||||
state = ParsingState(args) |
||||
try: |
||||
self._process_args_for_options(state) |
||||
self._process_args_for_args(state) |
||||
except UsageError: |
||||
if self.ctx is None or not self.ctx.resilient_parsing: |
||||
raise |
||||
return state.opts, state.largs, state.order |
||||
|
||||
def _process_args_for_args(self, state): |
||||
pargs, args = _unpack_args(state.largs + state.rargs, |
||||
[x.nargs for x in self._args]) |
||||
|
||||
for idx, arg in enumerate(self._args): |
||||
arg.process(pargs[idx], state) |
||||
|
||||
state.largs = args |
||||
state.rargs = [] |
||||
|
||||
def _process_args_for_options(self, state): |
||||
while state.rargs: |
||||
arg = state.rargs.pop(0) |
||||
arglen = len(arg) |
||||
# Double dashes always handled explicitly regardless of what |
||||
# prefixes are valid. |
||||
if arg == '--': |
||||
return |
||||
elif arg[:1] in self._opt_prefixes and arglen > 1: |
||||
self._process_opts(arg, state) |
||||
elif self.allow_interspersed_args: |
||||
state.largs.append(arg) |
||||
else: |
||||
state.rargs.insert(0, arg) |
||||
return |
||||
|
||||
# Say this is the original argument list: |
||||
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] |
||||
# ^ |
||||
# (we are about to process arg(i)). |
||||
# |
||||
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of |
||||
# [arg0, ..., arg(i-1)] (any options and their arguments will have |
||||
# been removed from largs). |
||||
# |
||||
# The while loop will usually consume 1 or more arguments per pass. |
||||
# If it consumes 1 (eg. arg is an option that takes no arguments), |
||||
# then after _process_arg() is done the situation is: |
||||
# |
||||
# largs = subset of [arg0, ..., arg(i)] |
||||
# rargs = [arg(i+1), ..., arg(N-1)] |
||||
# |
||||
# If allow_interspersed_args is false, largs will always be |
||||
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but |
||||
# not a very interesting subset! |
||||
|
||||
def _match_long_opt(self, opt, explicit_value, state): |
||||
if opt not in self._long_opt: |
||||
possibilities = [word for word in self._long_opt |
||||
if word.startswith(opt)] |
||||
raise NoSuchOption(opt, possibilities=possibilities) |
||||
|
||||
option = self._long_opt[opt] |
||||
if option.takes_value: |
||||
# At this point it's safe to modify rargs by injecting the |
||||
# explicit value, because no exception is raised in this |
||||
# branch. This means that the inserted value will be fully |
||||
# consumed. |
||||
if explicit_value is not None: |
||||
state.rargs.insert(0, explicit_value) |
||||
|
||||
nargs = option.nargs |
||||
if len(state.rargs) < nargs: |
||||
_error_opt_args(nargs, opt) |
||||
elif nargs == 1: |
||||
value = state.rargs.pop(0) |
||||
else: |
||||
value = tuple(state.rargs[:nargs]) |
||||
del state.rargs[:nargs] |
||||
|
||||
elif explicit_value is not None: |
||||
raise BadOptionUsage('%s option does not take a value' % opt) |
||||
|
||||
else: |
||||
value = None |
||||
|
||||
option.process(value, state) |
||||
|
||||
def _match_short_opt(self, arg, state): |
||||
stop = False |
||||
i = 1 |
||||
prefix = arg[0] |
||||
unknown_options = [] |
||||
|
||||
for ch in arg[1:]: |
||||
opt = normalize_opt(prefix + ch, self.ctx) |
||||
option = self._short_opt.get(opt) |
||||
i += 1 |
||||
|
||||
if not option: |
||||
if self.ignore_unknown_options: |
||||
unknown_options.append(ch) |
||||
continue |
||||
raise NoSuchOption(opt) |
||||
if option.takes_value: |
||||
# Any characters left in arg? Pretend they're the |
||||
# next arg, and stop consuming characters of arg. |
||||
if i < len(arg): |
||||
state.rargs.insert(0, arg[i:]) |
||||
stop = True |
||||
|
||||
nargs = option.nargs |
||||
if len(state.rargs) < nargs: |
||||
_error_opt_args(nargs, opt) |
||||
elif nargs == 1: |
||||
value = state.rargs.pop(0) |
||||
else: |
||||
value = tuple(state.rargs[:nargs]) |
||||
del state.rargs[:nargs] |
||||
|
||||
else: |
||||
value = None |
||||
|
||||
option.process(value, state) |
||||
|
||||
if stop: |
||||
break |
||||
|
||||
# If we got any unknown options we re-combinate the string of the |
||||
# remaining options and re-attach the prefix, then report that |
||||
# to the state as new larg. This way there is basic combinatorics |
||||
# that can be achieved while still ignoring unknown arguments. |
||||
if self.ignore_unknown_options and unknown_options: |
||||
state.largs.append(prefix + ''.join(unknown_options)) |
||||
|
||||
def _process_opts(self, arg, state): |
||||
explicit_value = None |
||||
# Long option handling happens in two parts. The first part is |
||||
# supporting explicitly attached values. In any case, we will try |
||||
# to long match the option first. |
||||
if '=' in arg: |
||||
long_opt, explicit_value = arg.split('=', 1) |
||||
else: |
||||
long_opt = arg |
||||
norm_long_opt = normalize_opt(long_opt, self.ctx) |
||||
|
||||
# At this point we will match the (assumed) long option through |
||||
# the long option matching code. Note that this allows options |
||||
# like "-foo" to be matched as long options. |
||||
try: |
||||
self._match_long_opt(norm_long_opt, explicit_value, state) |
||||
except NoSuchOption: |
||||
# At this point the long option matching failed, and we need |
||||
# to try with short options. However there is a special rule |
||||
# which says, that if we have a two character options prefix |
||||
# (applies to "--foo" for instance), we do not dispatch to the |
||||
# short option code and will instead raise the no option |
||||
# error. |
||||
if arg[:2] not in self._opt_prefixes: |
||||
return self._match_short_opt(arg, state) |
||||
if not self.ignore_unknown_options: |
||||
raise |
||||
state.largs.append(arg) |
@ -0,0 +1,539 @@ |
||||
import os |
||||
import sys |
||||
import struct |
||||
|
||||
from ._compat import raw_input, text_type, string_types, \ |
||||
isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WIN |
||||
from .utils import echo |
||||
from .exceptions import Abort, UsageError |
||||
from .types import convert_type |
||||
from .globals import resolve_color_default |
||||
|
||||
|
||||
# The prompt functions to use. The doc tools currently override these |
||||
# functions to customize how they work. |
||||
visible_prompt_func = raw_input |
||||
|
||||
_ansi_colors = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', |
||||
'cyan', 'white', 'reset') |
||||
_ansi_reset_all = '\033[0m' |
||||
|
||||
|
||||
def hidden_prompt_func(prompt): |
||||
import getpass |
||||
return getpass.getpass(prompt) |
||||
|
||||
|
||||
def _build_prompt(text, suffix, show_default=False, default=None): |
||||
prompt = text |
||||
if default is not None and show_default: |
||||
prompt = '%s [%s]' % (prompt, default) |
||||
return prompt + suffix |
||||
|
||||
|
||||
def prompt(text, default=None, hide_input=False, |
||||
confirmation_prompt=False, type=None, |
||||
value_proc=None, prompt_suffix=': ', |
||||
show_default=True, err=False): |
||||
"""Prompts a user for input. This is a convenience function that can |
||||
be used to prompt a user for input later. |
||||
|
||||
If the user aborts the input by sending a interrupt signal, this |
||||
function will catch it and raise a :exc:`Abort` exception. |
||||
|
||||
.. versionadded:: 6.0 |
||||
Added unicode support for cmd.exe on Windows. |
||||
|
||||
.. versionadded:: 4.0 |
||||
Added the `err` parameter. |
||||
|
||||
:param text: the text to show for the prompt. |
||||
:param default: the default value to use if no input happens. If this |
||||
is not given it will prompt until it's aborted. |
||||
:param hide_input: if this is set to true then the input value will |
||||
be hidden. |
||||
:param confirmation_prompt: asks for confirmation for the value. |
||||
:param type: the type to use to check the value against. |
||||
:param value_proc: if this parameter is provided it's a function that |
||||
is invoked instead of the type conversion to |
||||
convert a value. |
||||
:param prompt_suffix: a suffix that should be added to the prompt. |
||||
:param show_default: shows or hides the default value in the prompt. |
||||
:param err: if set to true the file defaults to ``stderr`` instead of |
||||
``stdout``, the same as with echo. |
||||
""" |
||||
result = None |
||||
|
||||
def prompt_func(text): |
||||
f = hide_input and hidden_prompt_func or visible_prompt_func |
||||
try: |
||||
# Write the prompt separately so that we get nice |
||||
# coloring through colorama on Windows |
||||
echo(text, nl=False, err=err) |
||||
return f('') |
||||
except (KeyboardInterrupt, EOFError): |
||||
# getpass doesn't print a newline if the user aborts input with ^C. |
||||
# Allegedly this behavior is inherited from getpass(3). |
||||
# A doc bug has been filed at https://bugs.python.org/issue24711 |
||||
if hide_input: |
||||
echo(None, err=err) |
||||
raise Abort() |
||||
|
||||
if value_proc is None: |
||||
value_proc = convert_type(type, default) |
||||
|
||||
prompt = _build_prompt(text, prompt_suffix, show_default, default) |
||||
|
||||
while 1: |
||||
while 1: |
||||
value = prompt_func(prompt) |
||||
if value: |
||||
break |
||||
# If a default is set and used, then the confirmation |
||||
# prompt is always skipped because that's the only thing |
||||
# that really makes sense. |
||||
elif default is not None: |
||||
return default |
||||
try: |
||||
result = value_proc(value) |
||||
except UsageError as e: |
||||
echo('Error: %s' % e.message, err=err) |
||||
continue |
||||
if not confirmation_prompt: |
||||
return result |
||||
while 1: |
||||
value2 = prompt_func('Repeat for confirmation: ') |
||||
if value2: |
||||
break |
||||
if value == value2: |
||||
return result |
||||
echo('Error: the two entered values do not match', err=err) |
||||
|
||||
|
||||
def confirm(text, default=False, abort=False, prompt_suffix=': ', |
||||
show_default=True, err=False): |
||||
"""Prompts for confirmation (yes/no question). |
||||
|
||||
If the user aborts the input by sending a interrupt signal this |
||||
function will catch it and raise a :exc:`Abort` exception. |
||||
|
||||
.. versionadded:: 4.0 |
||||
Added the `err` parameter. |
||||
|
||||
:param text: the question to ask. |
||||
:param default: the default for the prompt. |
||||
:param abort: if this is set to `True` a negative answer aborts the |
||||
exception by raising :exc:`Abort`. |
||||
:param prompt_suffix: a suffix that should be added to the prompt. |
||||
:param show_default: shows or hides the default value in the prompt. |
||||
:param err: if set to true the file defaults to ``stderr`` instead of |
||||
``stdout``, the same as with echo. |
||||
""" |
||||
prompt = _build_prompt(text, prompt_suffix, show_default, |
||||
default and 'Y/n' or 'y/N') |
||||
while 1: |
||||
try: |
||||
# Write the prompt separately so that we get nice |
||||
# coloring through colorama on Windows |
||||
echo(prompt, nl=False, err=err) |
||||
value = visible_prompt_func('').lower().strip() |
||||
except (KeyboardInterrupt, EOFError): |
||||
raise Abort() |
||||
if value in ('y', 'yes'): |
||||
rv = True |
||||
elif value in ('n', 'no'): |
||||
rv = False |
||||
elif value == '': |
||||
rv = default |
||||
else: |
||||
echo('Error: invalid input', err=err) |
||||
continue |
||||
break |
||||
if abort and not rv: |
||||
raise Abort() |
||||
return rv |
||||
|
||||
|
||||
def get_terminal_size(): |
||||
"""Returns the current size of the terminal as tuple in the form |
||||
``(width, height)`` in columns and rows. |
||||
""" |
||||
# If shutil has get_terminal_size() (Python 3.3 and later) use that |
||||
if sys.version_info >= (3, 3): |
||||
import shutil |
||||
shutil_get_terminal_size = getattr(shutil, 'get_terminal_size', None) |
||||
if shutil_get_terminal_size: |
||||
sz = shutil_get_terminal_size() |
||||
return sz.columns, sz.lines |
||||
|
||||
if get_winterm_size is not None: |
||||
return get_winterm_size() |
||||
|
||||
def ioctl_gwinsz(fd): |
||||
try: |
||||
import fcntl |
||||
import termios |
||||
cr = struct.unpack( |
||||
'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) |
||||
except Exception: |
||||
return |
||||
return cr |
||||
|
||||
cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) |
||||
if not cr: |
||||
try: |
||||
fd = os.open(os.ctermid(), os.O_RDONLY) |
||||
try: |
||||
cr = ioctl_gwinsz(fd) |
||||
finally: |
||||
os.close(fd) |
||||
except Exception: |
||||
pass |
||||
if not cr or not cr[0] or not cr[1]: |
||||
cr = (os.environ.get('LINES', 25), |
||||
os.environ.get('COLUMNS', DEFAULT_COLUMNS)) |
||||
return int(cr[1]), int(cr[0]) |
||||
|
||||
|
||||
def echo_via_pager(text, color=None): |
||||
"""This function takes a text and shows it via an environment specific |
||||
pager on stdout. |
||||
|
||||
.. versionchanged:: 3.0 |
||||
Added the `color` flag. |
||||
|
||||
:param text: the text to page. |
||||
:param color: controls if the pager supports ANSI colors or not. The |
||||
default is autodetection. |
||||
""" |
||||
color = resolve_color_default(color) |
||||
if not isinstance(text, string_types): |
||||
text = text_type(text) |
||||
from ._termui_impl import pager |
||||
return pager(text + '\n', color) |
||||
|
||||
|
||||
def progressbar(iterable=None, length=None, label=None, show_eta=True, |
||||
show_percent=None, show_pos=False, |
||||
item_show_func=None, fill_char='#', empty_char='-', |
||||
bar_template='%(label)s [%(bar)s] %(info)s', |
||||
info_sep=' ', width=36, file=None, color=None): |
||||
"""This function creates an iterable context manager that can be used |
||||
to iterate over something while showing a progress bar. It will |
||||
either iterate over the `iterable` or `length` items (that are counted |
||||
up). While iteration happens, this function will print a rendered |
||||
progress bar to the given `file` (defaults to stdout) and will attempt |
||||
to calculate remaining time and more. By default, this progress bar |
||||
will not be rendered if the file is not a terminal. |
||||
|
||||
The context manager creates the progress bar. When the context |
||||
manager is entered the progress bar is already displayed. With every |
||||
iteration over the progress bar, the iterable passed to the bar is |
||||
advanced and the bar is updated. When the context manager exits, |
||||
a newline is printed and the progress bar is finalized on screen. |
||||
|
||||
No printing must happen or the progress bar will be unintentionally |
||||
destroyed. |
||||
|
||||
Example usage:: |
||||
|
||||
with progressbar(items) as bar: |
||||
for item in bar: |
||||
do_something_with(item) |
||||
|
||||
Alternatively, if no iterable is specified, one can manually update the |
||||
progress bar through the `update()` method instead of directly |
||||
iterating over the progress bar. The update method accepts the number |
||||
of steps to increment the bar with:: |
||||
|
||||
with progressbar(length=chunks.total_bytes) as bar: |
||||
for chunk in chunks: |
||||
process_chunk(chunk) |
||||
bar.update(chunks.bytes) |
||||
|
||||
.. versionadded:: 2.0 |
||||
|
||||
.. versionadded:: 4.0 |
||||
Added the `color` parameter. Added a `update` method to the |
||||
progressbar object. |
||||
|
||||
:param iterable: an iterable to iterate over. If not provided the length |
||||
is required. |
||||
:param length: the number of items to iterate over. By default the |
||||
progressbar will attempt to ask the iterator about its |
||||
length, which might or might not work. If an iterable is |
||||
also provided this parameter can be used to override the |
||||
length. If an iterable is not provided the progress bar |
||||
will iterate over a range of that length. |
||||
:param label: the label to show next to the progress bar. |
||||
:param show_eta: enables or disables the estimated time display. This is |
||||
automatically disabled if the length cannot be |
||||
determined. |
||||
:param show_percent: enables or disables the percentage display. The |
||||
default is `True` if the iterable has a length or |
||||
`False` if not. |
||||
:param show_pos: enables or disables the absolute position display. The |
||||
default is `False`. |
||||
:param item_show_func: a function called with the current item which |
||||
can return a string to show the current item |
||||
next to the progress bar. Note that the current |
||||
item can be `None`! |
||||
:param fill_char: the character to use to show the filled part of the |
||||
progress bar. |
||||
:param empty_char: the character to use to show the non-filled part of |
||||
the progress bar. |
||||
:param bar_template: the format string to use as template for the bar. |
||||
The parameters in it are ``label`` for the label, |
||||
``bar`` for the progress bar and ``info`` for the |
||||
info section. |
||||
:param info_sep: the separator between multiple info items (eta etc.) |
||||
:param width: the width of the progress bar in characters, 0 means full |
||||
terminal width |
||||
:param file: the file to write to. If this is not a terminal then |
||||
only the label is printed. |
||||
:param color: controls if the terminal supports ANSI colors or not. The |
||||
default is autodetection. This is only needed if ANSI |
||||
codes are included anywhere in the progress bar output |
||||
which is not the case by default. |
||||
""" |
||||
from ._termui_impl import ProgressBar |
||||
color = resolve_color_default(color) |
||||
return ProgressBar(iterable=iterable, length=length, show_eta=show_eta, |
||||
show_percent=show_percent, show_pos=show_pos, |
||||
item_show_func=item_show_func, fill_char=fill_char, |
||||
empty_char=empty_char, bar_template=bar_template, |
||||
info_sep=info_sep, file=file, label=label, |
||||
width=width, color=color) |
||||
|
||||
|
||||
def clear(): |
||||
"""Clears the terminal screen. This will have the effect of clearing |
||||
the whole visible space of the terminal and moving the cursor to the |
||||
top left. This does not do anything if not connected to a terminal. |
||||
|
||||
.. versionadded:: 2.0 |
||||
""" |
||||
if not isatty(sys.stdout): |
||||
return |
||||
# If we're on Windows and we don't have colorama available, then we |
||||
# clear the screen by shelling out. Otherwise we can use an escape |
||||
# sequence. |
||||
if WIN: |
||||
os.system('cls') |
||||
else: |
||||
sys.stdout.write('\033[2J\033[1;1H') |
||||
|
||||
|
||||
def style(text, fg=None, bg=None, bold=None, dim=None, underline=None, |
||||
blink=None, reverse=None, reset=True): |
||||
"""Styles a text with ANSI styles and returns the new string. By |
||||
default the styling is self contained which means that at the end |
||||
of the string a reset code is issued. This can be prevented by |
||||
passing ``reset=False``. |
||||
|
||||
Examples:: |
||||
|
||||
click.echo(click.style('Hello World!', fg='green')) |
||||
click.echo(click.style('ATTENTION!', blink=True)) |
||||
click.echo(click.style('Some things', reverse=True, fg='cyan')) |
||||
|
||||
Supported color names: |
||||
|
||||
* ``black`` (might be a gray) |
||||
* ``red`` |
||||
* ``green`` |
||||
* ``yellow`` (might be an orange) |
||||
* ``blue`` |
||||
* ``magenta`` |
||||
* ``cyan`` |
||||
* ``white`` (might be light gray) |
||||
* ``reset`` (reset the color code only) |
||||
|
||||
.. versionadded:: 2.0 |
||||
|
||||
:param text: the string to style with ansi codes. |
||||
:param fg: if provided this will become the foreground color. |
||||
:param bg: if provided this will become the background color. |
||||
:param bold: if provided this will enable or disable bold mode. |
||||
:param dim: if provided this will enable or disable dim mode. This is |
||||
badly supported. |
||||
:param underline: if provided this will enable or disable underline. |
||||
:param blink: if provided this will enable or disable blinking. |
||||
:param reverse: if provided this will enable or disable inverse |
||||
rendering (foreground becomes background and the |
||||
other way round). |
||||
:param reset: by default a reset-all code is added at the end of the |
||||
string which means that styles do not carry over. This |
||||
can be disabled to compose styles. |
||||
""" |
||||
bits = [] |
||||
if fg: |
||||
try: |
||||
bits.append('\033[%dm' % (_ansi_colors.index(fg) + 30)) |
||||
except ValueError: |
||||
raise TypeError('Unknown color %r' % fg) |
||||
if bg: |
||||
try: |
||||
bits.append('\033[%dm' % (_ansi_colors.index(bg) + 40)) |
||||
except ValueError: |
||||
raise TypeError('Unknown color %r' % bg) |
||||
if bold is not None: |
||||
bits.append('\033[%dm' % (1 if bold else 22)) |
||||
if dim is not None: |
||||
bits.append('\033[%dm' % (2 if dim else 22)) |
||||
if underline is not None: |
||||
bits.append('\033[%dm' % (4 if underline else 24)) |
||||
if blink is not None: |
||||
bits.append('\033[%dm' % (5 if blink else 25)) |
||||
if reverse is not None: |
||||
bits.append('\033[%dm' % (7 if reverse else 27)) |
||||
bits.append(text) |
||||
if reset: |
||||
bits.append(_ansi_reset_all) |
||||
return ''.join(bits) |
||||
|
||||
|
||||
def unstyle(text): |
||||
"""Removes ANSI styling information from a string. Usually it's not |
||||
necessary to use this function as Click's echo function will |
||||
automatically remove styling if necessary. |
||||
|
||||
.. versionadded:: 2.0 |
||||
|
||||
:param text: the text to remove style information from. |
||||
""" |
||||
return strip_ansi(text) |
||||
|
||||
|
||||
def secho(text, file=None, nl=True, err=False, color=None, **styles): |
||||
"""This function combines :func:`echo` and :func:`style` into one |
||||
call. As such the following two calls are the same:: |
||||
|
||||
click.secho('Hello World!', fg='green') |
||||
click.echo(click.style('Hello World!', fg='green')) |
||||
|
||||
All keyword arguments are forwarded to the underlying functions |
||||
depending on which one they go with. |
||||
|
||||
.. versionadded:: 2.0 |
||||
""" |
||||
return echo(style(text, **styles), file=file, nl=nl, err=err, color=color) |
||||
|
||||
|
||||
def edit(text=None, editor=None, env=None, require_save=True, |
||||
extension='.txt', filename=None): |
||||
r"""Edits the given text in the defined editor. If an editor is given |
||||
(should be the full path to the executable but the regular operating |
||||
system search path is used for finding the executable) it overrides |
||||
the detected editor. Optionally, some environment variables can be |
||||
used. If the editor is closed without changes, `None` is returned. In |
||||
case a file is edited directly the return value is always `None` and |
||||
`require_save` and `extension` are ignored. |
||||
|
||||
If the editor cannot be opened a :exc:`UsageError` is raised. |
||||
|
||||
Note for Windows: to simplify cross-platform usage, the newlines are |
||||
automatically converted from POSIX to Windows and vice versa. As such, |
||||
the message here will have ``\n`` as newline markers. |
||||
|
||||
:param text: the text to edit. |
||||
:param editor: optionally the editor to use. Defaults to automatic |
||||
detection. |
||||
:param env: environment variables to forward to the editor. |
||||
:param require_save: if this is true, then not saving in the editor |
||||
will make the return value become `None`. |
||||
:param extension: the extension to tell the editor about. This defaults |
||||
to `.txt` but changing this might change syntax |
||||
highlighting. |
||||
:param filename: if provided it will edit this file instead of the |
||||
provided text contents. It will not use a temporary |
||||
file as an indirection in that case. |
||||
""" |
||||
from ._termui_impl import Editor |
||||
editor = Editor(editor=editor, env=env, require_save=require_save, |
||||
extension=extension) |
||||
if filename is None: |
||||
return editor.edit(text) |
||||
editor.edit_file(filename) |
||||
|
||||
|
||||
def launch(url, wait=False, locate=False): |
||||
"""This function launches the given URL (or filename) in the default |
||||
viewer application for this file type. If this is an executable, it |
||||
might launch the executable in a new session. The return value is |
||||
the exit code of the launched application. Usually, ``0`` indicates |
||||
success. |
||||
|
||||
Examples:: |
||||
|
||||
click.launch('http://click.pocoo.org/') |
||||
click.launch('/my/downloaded/file', locate=True) |
||||
|
||||
.. versionadded:: 2.0 |
||||
|
||||
:param url: URL or filename of the thing to launch. |
||||
:param wait: waits for the program to stop. |
||||
:param locate: if this is set to `True` then instead of launching the |
||||
application associated with the URL it will attempt to |
||||
launch a file manager with the file located. This |
||||
might have weird effects if the URL does not point to |
||||
the filesystem. |
||||
""" |
||||
from ._termui_impl import open_url |
||||
return open_url(url, wait=wait, locate=locate) |
||||
|
||||
|
||||
# If this is provided, getchar() calls into this instead. This is used |
||||
# for unittesting purposes. |
||||
_getchar = None |
||||
|
||||
|
||||
def getchar(echo=False): |
||||
"""Fetches a single character from the terminal and returns it. This |
||||
will always return a unicode character and under certain rare |
||||
circumstances this might return more than one character. The |
||||
situations which more than one character is returned is when for |
||||
whatever reason multiple characters end up in the terminal buffer or |
||||
standard input was not actually a terminal. |
||||
|
||||
Note that this will always read from the terminal, even if something |
||||
is piped into the standard input. |
||||
|
||||
.. versionadded:: 2.0 |
||||
|
||||
:param echo: if set to `True`, the character read will also show up on |
||||
the terminal. The default is to not show it. |
||||
""" |
||||
f = _getchar |
||||
if f is None: |
||||
from ._termui_impl import getchar as f |
||||
return f(echo) |
||||
|
||||
|
||||
def pause(info='Press any key to continue ...', err=False): |
||||
"""This command stops execution and waits for the user to press any |
||||
key to continue. This is similar to the Windows batch "pause" |
||||
command. If the program is not run through a terminal, this command |
||||
will instead do nothing. |
||||
|
||||
.. versionadded:: 2.0 |
||||
|
||||
.. versionadded:: 4.0 |
||||
Added the `err` parameter. |
||||
|
||||
:param info: the info string to print before pausing. |
||||
:param err: if set to message goes to ``stderr`` instead of |
||||
``stdout``, the same as with echo. |
||||
""" |
||||
if not isatty(sys.stdin) or not isatty(sys.stdout): |
||||
return |
||||
try: |
||||
if info: |
||||
echo(info, nl=False, err=err) |
||||
try: |
||||
getchar() |
||||
except (KeyboardInterrupt, EOFError): |
||||
pass |
||||
finally: |
||||
if info: |
||||
echo(err=err) |
@ -0,0 +1,322 @@ |
||||
import os |
||||
import sys |
||||
import shutil |
||||
import tempfile |
||||
import contextlib |
||||
|
||||
from ._compat import iteritems, PY2 |
||||
|
||||
|
||||
# If someone wants to vendor click, we want to ensure the |
||||
# correct package is discovered. Ideally we could use a |
||||
# relative import here but unfortunately Python does not |
||||
# support that. |
||||
clickpkg = sys.modules[__name__.rsplit('.', 1)[0]] |
||||
|
||||
|
||||
if PY2: |
||||
from cStringIO import StringIO |
||||
else: |
||||
import io |
||||
from ._compat import _find_binary_reader |
||||
|
||||
|
||||
class EchoingStdin(object): |
||||
|
||||
def __init__(self, input, output): |
||||
self._input = input |
||||
self._output = output |
||||
|
||||
def __getattr__(self, x): |
||||
return getattr(self._input, x) |
||||
|
||||
def _echo(self, rv): |
||||
self._output.write(rv) |
||||
return rv |
||||
|
||||
def read(self, n=-1): |
||||
return self._echo(self._input.read(n)) |
||||
|
||||
def readline(self, n=-1): |
||||
return self._echo(self._input.readline(n)) |
||||
|
||||
def readlines(self): |
||||
return [self._echo(x) for x in self._input.readlines()] |
||||
|
||||
def __iter__(self): |
||||
return iter(self._echo(x) for x in self._input) |
||||
|
||||
def __repr__(self): |
||||
return repr(self._input) |
||||
|
||||
|
||||
def make_input_stream(input, charset): |
||||
# Is already an input stream. |
||||
if hasattr(input, 'read'): |
||||
if PY2: |
||||
return input |
||||
rv = _find_binary_reader(input) |
||||
if rv is not None: |
||||
return rv |
||||
raise TypeError('Could not find binary reader for input stream.') |
||||
|
||||
if input is None: |
||||
input = b'' |
||||
elif not isinstance(input, bytes): |
||||
input = input.encode(charset) |
||||
if PY2: |
||||
return StringIO(input) |
||||
return io.BytesIO(input) |
||||
|
||||
|
||||
class Result(object): |
||||
"""Holds the captured result of an invoked CLI script.""" |
||||
|
||||
def __init__(self, runner, output_bytes, exit_code, exception, |
||||
exc_info=None): |
||||
#: The runner that created the result |
||||
self.runner = runner |
||||
#: The output as bytes. |
||||
self.output_bytes = output_bytes |
||||
#: The exit code as integer. |
||||
self.exit_code = exit_code |
||||
#: The exception that happend if one did. |
||||
self.exception = exception |
||||
#: The traceback |
||||
self.exc_info = exc_info |
||||
|
||||
@property |
||||
def output(self): |
||||
"""The output as unicode string.""" |
||||
return self.output_bytes.decode(self.runner.charset, 'replace') \ |
||||
.replace('\r\n', '\n') |
||||
|
||||
def __repr__(self): |
||||
return '<Result %s>' % ( |
||||
self.exception and repr(self.exception) or 'okay', |
||||
) |
||||
|
||||
|
||||
class CliRunner(object): |
||||
"""The CLI runner provides functionality to invoke a Click command line |
||||
script for unittesting purposes in a isolated environment. This only |
||||
works in single-threaded systems without any concurrency as it changes the |
||||
global interpreter state. |
||||
|
||||
:param charset: the character set for the input and output data. This is |
||||
UTF-8 by default and should not be changed currently as |
||||
the reporting to Click only works in Python 2 properly. |
||||
:param env: a dictionary with environment variables for overriding. |
||||
:param echo_stdin: if this is set to `True`, then reading from stdin writes |
||||
to stdout. This is useful for showing examples in |
||||
some circumstances. Note that regular prompts |
||||
will automatically echo the input. |
||||
""" |
||||
|
||||
def __init__(self, charset=None, env=None, echo_stdin=False): |
||||
if charset is None: |
||||
charset = 'utf-8' |
||||
self.charset = charset |
||||
self.env = env or {} |
||||
self.echo_stdin = echo_stdin |
||||
|
||||
def get_default_prog_name(self, cli): |
||||
"""Given a command object it will return the default program name |
||||
for it. The default is the `name` attribute or ``"root"`` if not |
||||
set. |
||||
""" |
||||
return cli.name or 'root' |
||||
|
||||
def make_env(self, overrides=None): |
||||
"""Returns the environment overrides for invoking a script.""" |
||||
rv = dict(self.env) |
||||
if overrides: |
||||
rv.update(overrides) |
||||
return rv |
||||
|
||||
@contextlib.contextmanager |
||||
def isolation(self, input=None, env=None, color=False): |
||||
"""A context manager that sets up the isolation for invoking of a |
||||
command line tool. This sets up stdin with the given input data |
||||
and `os.environ` with the overrides from the given dictionary. |
||||
This also rebinds some internals in Click to be mocked (like the |
||||
prompt functionality). |
||||
|
||||
This is automatically done in the :meth:`invoke` method. |
||||
|
||||
.. versionadded:: 4.0 |
||||
The ``color`` parameter was added. |
||||
|
||||
:param input: the input stream to put into sys.stdin. |
||||
:param env: the environment overrides as dictionary. |
||||
:param color: whether the output should contain color codes. The |
||||
application can still override this explicitly. |
||||
""" |
||||
input = make_input_stream(input, self.charset) |
||||
|
||||
old_stdin = sys.stdin |
||||
old_stdout = sys.stdout |
||||
old_stderr = sys.stderr |
||||
old_forced_width = clickpkg.formatting.FORCED_WIDTH |
||||
clickpkg.formatting.FORCED_WIDTH = 80 |
||||
|
||||
env = self.make_env(env) |
||||
|
||||
if PY2: |
||||
sys.stdout = sys.stderr = bytes_output = StringIO() |
||||
if self.echo_stdin: |
||||
input = EchoingStdin(input, bytes_output) |
||||
else: |
||||
bytes_output = io.BytesIO() |
||||
if self.echo_stdin: |
||||
input = EchoingStdin(input, bytes_output) |
||||
input = io.TextIOWrapper(input, encoding=self.charset) |
||||
sys.stdout = sys.stderr = io.TextIOWrapper( |
||||
bytes_output, encoding=self.charset) |
||||
|
||||
sys.stdin = input |
||||
|
||||
def visible_input(prompt=None): |
||||
sys.stdout.write(prompt or '') |
||||
val = input.readline().rstrip('\r\n') |
||||
sys.stdout.write(val + '\n') |
||||
sys.stdout.flush() |
||||
return val |
||||
|
||||
def hidden_input(prompt=None): |
||||
sys.stdout.write((prompt or '') + '\n') |
||||
sys.stdout.flush() |
||||
return input.readline().rstrip('\r\n') |
||||
|
||||
def _getchar(echo): |
||||
char = sys.stdin.read(1) |
||||
if echo: |
||||
sys.stdout.write(char) |
||||
sys.stdout.flush() |
||||
return char |
||||
|
||||
default_color = color |
||||
def should_strip_ansi(stream=None, color=None): |
||||
if color is None: |
||||
return not default_color |
||||
return not color |
||||
|
||||
old_visible_prompt_func = clickpkg.termui.visible_prompt_func |
||||
old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func |
||||
old__getchar_func = clickpkg.termui._getchar |
||||
old_should_strip_ansi = clickpkg.utils.should_strip_ansi |
||||
clickpkg.termui.visible_prompt_func = visible_input |
||||
clickpkg.termui.hidden_prompt_func = hidden_input |
||||
clickpkg.termui._getchar = _getchar |
||||
clickpkg.utils.should_strip_ansi = should_strip_ansi |
||||
|
||||
old_env = {} |
||||
try: |
||||
for key, value in iteritems(env): |
||||
old_env[key] = os.environ.get(key) |
||||
if value is None: |
||||
try: |
||||
del os.environ[key] |
||||
except Exception: |
||||
pass |
||||
else: |
||||
os.environ[key] = value |
||||
yield bytes_output |
||||
finally: |
||||
for key, value in iteritems(old_env): |
||||
if value is None: |
||||
try: |
||||
del os.environ[key] |
||||
except Exception: |
||||
pass |
||||
else: |
||||
os.environ[key] = value |
||||
sys.stdout = old_stdout |
||||
sys.stderr = old_stderr |
||||
sys.stdin = old_stdin |
||||
clickpkg.termui.visible_prompt_func = old_visible_prompt_func |
||||
clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func |
||||
clickpkg.termui._getchar = old__getchar_func |
||||
clickpkg.utils.should_strip_ansi = old_should_strip_ansi |
||||
clickpkg.formatting.FORCED_WIDTH = old_forced_width |
||||
|
||||
def invoke(self, cli, args=None, input=None, env=None, |
||||
catch_exceptions=True, color=False, **extra): |
||||
"""Invokes a command in an isolated environment. The arguments are |
||||
forwarded directly to the command line script, the `extra` keyword |
||||
arguments are passed to the :meth:`~clickpkg.Command.main` function of |
||||
the command. |
||||
|
||||
This returns a :class:`Result` object. |
||||
|
||||
.. versionadded:: 3.0 |
||||
The ``catch_exceptions`` parameter was added. |
||||
|
||||
.. versionchanged:: 3.0 |
||||
The result object now has an `exc_info` attribute with the |
||||
traceback if available. |
||||
|
||||
.. versionadded:: 4.0 |
||||
The ``color`` parameter was added. |
||||
|
||||
:param cli: the command to invoke |
||||
:param args: the arguments to invoke |
||||
:param input: the input data for `sys.stdin`. |
||||
:param env: the environment overrides. |
||||
:param catch_exceptions: Whether to catch any other exceptions than |
||||
``SystemExit``. |
||||
:param extra: the keyword arguments to pass to :meth:`main`. |
||||
:param color: whether the output should contain color codes. The |
||||
application can still override this explicitly. |
||||
""" |
||||
exc_info = None |
||||
with self.isolation(input=input, env=env, color=color) as out: |
||||
exception = None |
||||
exit_code = 0 |
||||
|
||||
try: |
||||
cli.main(args=args or (), |
||||
prog_name=self.get_default_prog_name(cli), **extra) |
||||
except SystemExit as e: |
||||
if e.code != 0: |
||||
exception = e |
||||
|
||||
exc_info = sys.exc_info() |
||||
|
||||
exit_code = e.code |
||||
if not isinstance(exit_code, int): |
||||
sys.stdout.write(str(exit_code)) |
||||
sys.stdout.write('\n') |
||||
exit_code = 1 |
||||
except Exception as e: |
||||
if not catch_exceptions: |
||||
raise |
||||
exception = e |
||||
exit_code = -1 |
||||
exc_info = sys.exc_info() |
||||
finally: |
||||
sys.stdout.flush() |
||||
output = out.getvalue() |
||||
|
||||
return Result(runner=self, |
||||
output_bytes=output, |
||||
exit_code=exit_code, |
||||
exception=exception, |
||||
exc_info=exc_info) |
||||
|
||||
@contextlib.contextmanager |
||||
def isolated_filesystem(self): |
||||
"""A context manager that creates a temporary folder and changes |
||||
the current working directory to it for isolated filesystem tests. |
||||
""" |
||||
cwd = os.getcwd() |
||||
t = tempfile.mkdtemp() |
||||
os.chdir(t) |
||||
try: |
||||
yield t |
||||
finally: |
||||
os.chdir(cwd) |
||||
try: |
||||
shutil.rmtree(t) |
||||
except (OSError, IOError): |
||||
pass |
@ -0,0 +1,550 @@ |
||||
import os |
||||
import stat |
||||
|
||||
from ._compat import open_stream, text_type, filename_to_ui, \ |
||||
get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2 |
||||
from .exceptions import BadParameter |
||||
from .utils import safecall, LazyFile |
||||
|
||||
|
||||
class ParamType(object): |
||||
"""Helper for converting values through types. The following is |
||||
necessary for a valid type: |
||||
|
||||
* it needs a name |
||||
* it needs to pass through None unchanged |
||||
* it needs to convert from a string |
||||
* it needs to convert its result type through unchanged |
||||
(eg: needs to be idempotent) |
||||
* it needs to be able to deal with param and context being `None`. |
||||
This can be the case when the object is used with prompt |
||||
inputs. |
||||
""" |
||||
is_composite = False |
||||
|
||||
#: the descriptive name of this type |
||||
name = None |
||||
|
||||
#: if a list of this type is expected and the value is pulled from a |
||||
#: string environment variable, this is what splits it up. `None` |
||||
#: means any whitespace. For all parameters the general rule is that |
||||
#: whitespace splits them up. The exception are paths and files which |
||||
#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on |
||||
#: Windows). |
||||
envvar_list_splitter = None |
||||
|
||||
def __call__(self, value, param=None, ctx=None): |
||||
if value is not None: |
||||
return self.convert(value, param, ctx) |
||||
|
||||
def get_metavar(self, param): |
||||
"""Returns the metavar default for this param if it provides one.""" |
||||
|
||||
def get_missing_message(self, param): |
||||
"""Optionally might return extra information about a missing |
||||
parameter. |
||||
|
||||
.. versionadded:: 2.0 |
||||
""" |
||||
|
||||
def convert(self, value, param, ctx): |
||||
"""Converts the value. This is not invoked for values that are |
||||
`None` (the missing value). |
||||
""" |
||||
return value |
||||
|
||||
def split_envvar_value(self, rv): |
||||
"""Given a value from an environment variable this splits it up |
||||
into small chunks depending on the defined envvar list splitter. |
||||
|
||||
If the splitter is set to `None`, which means that whitespace splits, |
||||
then leading and trailing whitespace is ignored. Otherwise, leading |
||||
and trailing splitters usually lead to empty items being included. |
||||
""" |
||||
return (rv or '').split(self.envvar_list_splitter) |
||||
|
||||
def fail(self, message, param=None, ctx=None): |
||||
"""Helper method to fail with an invalid value message.""" |
||||
raise BadParameter(message, ctx=ctx, param=param) |
||||
|
||||
|
||||
class CompositeParamType(ParamType): |
||||
is_composite = True |
||||
|
||||
@property |
||||
def arity(self): |
||||
raise NotImplementedError() |
||||
|
||||
|
||||
class FuncParamType(ParamType): |
||||
|
||||
def __init__(self, func): |
||||
self.name = func.__name__ |
||||
self.func = func |
||||
|
||||
def convert(self, value, param, ctx): |
||||
try: |
||||
return self.func(value) |
||||
except ValueError: |
||||
try: |
||||
value = text_type(value) |
||||
except UnicodeError: |
||||
value = str(value).decode('utf-8', 'replace') |
||||
self.fail(value, param, ctx) |
||||
|
||||
|
||||
class UnprocessedParamType(ParamType): |
||||
name = 'text' |
||||
|
||||
def convert(self, value, param, ctx): |
||||
return value |
||||
|
||||
def __repr__(self): |
||||
return 'UNPROCESSED' |
||||
|
||||
|
||||
class StringParamType(ParamType): |
||||
name = 'text' |
||||
|
||||
def convert(self, value, param, ctx): |
||||
if isinstance(value, bytes): |
||||
enc = _get_argv_encoding() |
||||
try: |
||||
value = value.decode(enc) |
||||
except UnicodeError: |
||||
fs_enc = get_filesystem_encoding() |
||||
if fs_enc != enc: |
||||
try: |
||||
value = value.decode(fs_enc) |
||||
except UnicodeError: |
||||
value = value.decode('utf-8', 'replace') |
||||
return value |
||||
return value |
||||
|
||||
def __repr__(self): |
||||
return 'STRING' |
||||
|
||||
|
||||
class Choice(ParamType): |
||||
"""The choice type allows a value to be checked against a fixed set of |
||||
supported values. All of these values have to be strings. |
||||
|
||||
See :ref:`choice-opts` for an example. |
||||
""" |
||||
name = 'choice' |
||||
|
||||
def __init__(self, choices): |
||||
self.choices = choices |
||||
|
||||
def get_metavar(self, param): |
||||
return '[%s]' % '|'.join(self.choices) |
||||
|
||||
def get_missing_message(self, param): |
||||
return 'Choose from %s.' % ', '.join(self.choices) |
||||
|
||||
def convert(self, value, param, ctx): |
||||
# Exact match |
||||
if value in self.choices: |
||||
return value |
||||
|
||||
# Match through normalization |
||||
if ctx is not None and \ |
||||
ctx.token_normalize_func is not None: |
||||
value = ctx.token_normalize_func(value) |
||||
for choice in self.choices: |
||||
if ctx.token_normalize_func(choice) == value: |
||||
return choice |
||||
|
||||
self.fail('invalid choice: %s. (choose from %s)' % |
||||
(value, ', '.join(self.choices)), param, ctx) |
||||
|
||||
def __repr__(self): |
||||
return 'Choice(%r)' % list(self.choices) |
||||
|
||||
|
||||
class IntParamType(ParamType): |
||||
name = 'integer' |
||||
|
||||
def convert(self, value, param, ctx): |
||||
try: |
||||
return int(value) |
||||
except (ValueError, UnicodeError): |
||||
self.fail('%s is not a valid integer' % value, param, ctx) |
||||
|
||||
def __repr__(self): |
||||
return 'INT' |
||||
|
||||
|
||||
class IntRange(IntParamType): |
||||
"""A parameter that works similar to :data:`click.INT` but restricts |
||||
the value to fit into a range. The default behavior is to fail if the |
||||
value falls outside the range, but it can also be silently clamped |
||||
between the two edges. |
||||
|
||||
See :ref:`ranges` for an example. |
||||
""" |
||||
name = 'integer range' |
||||
|
||||
def __init__(self, min=None, max=None, clamp=False): |
||||
self.min = min |
||||
self.max = max |
||||
self.clamp = clamp |
||||
|
||||
def convert(self, value, param, ctx): |
||||
rv = IntParamType.convert(self, value, param, ctx) |
||||
if self.clamp: |
||||
if self.min is not None and rv < self.min: |
||||
return self.min |
||||
if self.max is not None and rv > self.max: |
||||
return self.max |
||||
if self.min is not None and rv < self.min or \ |
||||
self.max is not None and rv > self.max: |
||||
if self.min is None: |
||||
self.fail('%s is bigger than the maximum valid value ' |
||||
'%s.' % (rv, self.max), param, ctx) |
||||
elif self.max is None: |
||||
self.fail('%s is smaller than the minimum valid value ' |
||||
'%s.' % (rv, self.min), param, ctx) |
||||
else: |
||||
self.fail('%s is not in the valid range of %s to %s.' |
||||
% (rv, self.min, self.max), param, ctx) |
||||
return rv |
||||
|
||||
def __repr__(self): |
||||
return 'IntRange(%r, %r)' % (self.min, self.max) |
||||
|
||||
|
||||
class BoolParamType(ParamType): |
||||
name = 'boolean' |
||||
|
||||
def convert(self, value, param, ctx): |
||||
if isinstance(value, bool): |
||||
return bool(value) |
||||
value = value.lower() |
||||
if value in ('true', '1', 'yes', 'y'): |
||||
return True |
||||
elif value in ('false', '0', 'no', 'n'): |
||||
return False |
||||
self.fail('%s is not a valid boolean' % value, param, ctx) |
||||
|
||||
def __repr__(self): |
||||
return 'BOOL' |
||||
|
||||
|
||||
class FloatParamType(ParamType): |
||||
name = 'float' |
||||
|
||||
def convert(self, value, param, ctx): |
||||
try: |
||||
return float(value) |
||||
except (UnicodeError, ValueError): |
||||
self.fail('%s is not a valid floating point value' % |
||||
value, param, ctx) |
||||
|
||||
def __repr__(self): |
||||
return 'FLOAT' |
||||
|
||||
|
||||
class UUIDParameterType(ParamType): |
||||
name = 'uuid' |
||||
|
||||
def convert(self, value, param, ctx): |
||||
import uuid |
||||
try: |
||||
if PY2 and isinstance(value, text_type): |
||||
value = value.encode('ascii') |
||||
return uuid.UUID(value) |
||||
except (UnicodeError, ValueError): |
||||
self.fail('%s is not a valid UUID value' % value, param, ctx) |
||||
|
||||
def __repr__(self): |
||||
return 'UUID' |
||||
|
||||
|
||||
class File(ParamType): |
||||
"""Declares a parameter to be a file for reading or writing. The file |
||||
is automatically closed once the context tears down (after the command |
||||
finished working). |
||||
|
||||
Files can be opened for reading or writing. The special value ``-`` |
||||
indicates stdin or stdout depending on the mode. |
||||
|
||||
By default, the file is opened for reading text data, but it can also be |
||||
opened in binary mode or for writing. The encoding parameter can be used |
||||
to force a specific encoding. |
||||
|
||||
The `lazy` flag controls if the file should be opened immediately or |
||||
upon first IO. The default is to be non lazy for standard input and |
||||
output streams as well as files opened for reading, lazy otherwise. |
||||
|
||||
Starting with Click 2.0, files can also be opened atomically in which |
||||
case all writes go into a separate file in the same folder and upon |
||||
completion the file will be moved over to the original location. This |
||||
is useful if a file regularly read by other users is modified. |
||||
|
||||
See :ref:`file-args` for more information. |
||||
""" |
||||
name = 'filename' |
||||
envvar_list_splitter = os.path.pathsep |
||||
|
||||
def __init__(self, mode='r', encoding=None, errors='strict', lazy=None, |
||||
atomic=False): |
||||
self.mode = mode |
||||
self.encoding = encoding |
||||
self.errors = errors |
||||
self.lazy = lazy |
||||
self.atomic = atomic |
||||
|
||||
def resolve_lazy_flag(self, value): |
||||
if self.lazy is not None: |
||||
return self.lazy |
||||
if value == '-': |
||||
return False |
||||
elif 'w' in self.mode: |
||||
return True |
||||
return False |
||||
|
||||
def convert(self, value, param, ctx): |
||||
try: |
||||
if hasattr(value, 'read') or hasattr(value, 'write'): |
||||
return value |
||||
|
||||
lazy = self.resolve_lazy_flag(value) |
||||
|
||||
if lazy: |
||||
f = LazyFile(value, self.mode, self.encoding, self.errors, |
||||
atomic=self.atomic) |
||||
if ctx is not None: |
||||
ctx.call_on_close(f.close_intelligently) |
||||
return f |
||||
|
||||
f, should_close = open_stream(value, self.mode, |
||||
self.encoding, self.errors, |
||||
atomic=self.atomic) |
||||
# If a context is provided, we automatically close the file |
||||
# at the end of the context execution (or flush out). If a |
||||
# context does not exist, it's the caller's responsibility to |
||||
# properly close the file. This for instance happens when the |
||||
# type is used with prompts. |
||||
if ctx is not None: |
||||
if should_close: |
||||
ctx.call_on_close(safecall(f.close)) |
||||
else: |
||||
ctx.call_on_close(safecall(f.flush)) |
||||
return f |
||||
except (IOError, OSError) as e: |
||||
self.fail('Could not open file: %s: %s' % ( |
||||
filename_to_ui(value), |
||||
get_streerror(e), |
||||
), param, ctx) |
||||
|
||||
|
||||
class Path(ParamType): |
||||
"""The path type is similar to the :class:`File` type but it performs |
||||
different checks. First of all, instead of returning an open file |
||||
handle it returns just the filename. Secondly, it can perform various |
||||
basic checks about what the file or directory should be. |
||||
|
||||
.. versionchanged:: 6.0 |
||||
`allow_dash` was added. |
||||
|
||||
:param exists: if set to true, the file or directory needs to exist for |
||||
this value to be valid. If this is not required and a |
||||
file does indeed not exist, then all further checks are |
||||
silently skipped. |
||||
:param file_okay: controls if a file is a possible value. |
||||
:param dir_okay: controls if a directory is a possible value. |
||||
:param writable: if true, a writable check is performed. |
||||
:param readable: if true, a readable check is performed. |
||||
:param resolve_path: if this is true, then the path is fully resolved |
||||
before the value is passed onwards. This means |
||||
that it's absolute and symlinks are resolved. |
||||
:param allow_dash: If this is set to `True`, a single dash to indicate |
||||
standard streams is permitted. |
||||
:param type: optionally a string type that should be used to |
||||
represent the path. The default is `None` which |
||||
means the return value will be either bytes or |
||||
unicode depending on what makes most sense given the |
||||
input data Click deals with. |
||||
""" |
||||
envvar_list_splitter = os.path.pathsep |
||||
|
||||
def __init__(self, exists=False, file_okay=True, dir_okay=True, |
||||
writable=False, readable=True, resolve_path=False, |
||||
allow_dash=False, path_type=None): |
||||
self.exists = exists |
||||
self.file_okay = file_okay |
||||
self.dir_okay = dir_okay |
||||
self.writable = writable |
||||
self.readable = readable |
||||
self.resolve_path = resolve_path |
||||
self.allow_dash = allow_dash |
||||
self.type = path_type |
||||
|
||||
if self.file_okay and not self.dir_okay: |
||||
self.name = 'file' |
||||
self.path_type = 'File' |
||||
if self.dir_okay and not self.file_okay: |
||||
self.name = 'directory' |
||||
self.path_type = 'Directory' |
||||
else: |
||||
self.name = 'path' |
||||
self.path_type = 'Path' |
||||
|
||||
def coerce_path_result(self, rv): |
||||
if self.type is not None and not isinstance(rv, self.type): |
||||
if self.type is text_type: |
||||
rv = rv.decode(get_filesystem_encoding()) |
||||
else: |
||||
rv = rv.encode(get_filesystem_encoding()) |
||||
return rv |
||||
|
||||
def convert(self, value, param, ctx): |
||||
rv = value |
||||
|
||||
is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-') |
||||
|
||||
if not is_dash: |
||||
if self.resolve_path: |
||||
rv = os.path.realpath(rv) |
||||
|
||||
try: |
||||
st = os.stat(rv) |
||||
except OSError: |
||||
if not self.exists: |
||||
return self.coerce_path_result(rv) |
||||
self.fail('%s "%s" does not exist.' % ( |
||||
self.path_type, |
||||
filename_to_ui(value) |
||||
), param, ctx) |
||||
|
||||
if not self.file_okay and stat.S_ISREG(st.st_mode): |
||||
self.fail('%s "%s" is a file.' % ( |
||||
self.path_type, |
||||
filename_to_ui(value) |
||||
), param, ctx) |
||||
if not self.dir_okay and stat.S_ISDIR(st.st_mode): |
||||
self.fail('%s "%s" is a directory.' % ( |
||||
self.path_type, |
||||
filename_to_ui(value) |
||||
), param, ctx) |
||||
if self.writable and not os.access(value, os.W_OK): |
||||
self.fail('%s "%s" is not writable.' % ( |
||||
self.path_type, |
||||
filename_to_ui(value) |
||||
), param, ctx) |
||||
if self.readable and not os.access(value, os.R_OK): |
||||
self.fail('%s "%s" is not readable.' % ( |
||||
self.path_type, |
||||
filename_to_ui(value) |
||||
), param, ctx) |
||||
|
||||
return self.coerce_path_result(rv) |
||||
|
||||
|
||||
class Tuple(CompositeParamType): |
||||
"""The default behavior of Click is to apply a type on a value directly. |
||||
This works well in most cases, except for when `nargs` is set to a fixed |
||||
count and different types should be used for different items. In this |
||||
case the :class:`Tuple` type can be used. This type can only be used |
||||
if `nargs` is set to a fixed number. |
||||
|
||||
For more information see :ref:`tuple-type`. |
||||
|
||||
This can be selected by using a Python tuple literal as a type. |
||||
|
||||
:param types: a list of types that should be used for the tuple items. |
||||
""" |
||||
|
||||
def __init__(self, types): |
||||
self.types = [convert_type(ty) for ty in types] |
||||
|
||||
@property |
||||
def name(self): |
||||
return "<" + " ".join(ty.name for ty in self.types) + ">" |
||||
|
||||
@property |
||||
def arity(self): |
||||
return len(self.types) |
||||
|
||||
def convert(self, value, param, ctx): |
||||
if len(value) != len(self.types): |
||||
raise TypeError('It would appear that nargs is set to conflict ' |
||||
'with the composite type arity.') |
||||
return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) |
||||
|
||||
|
||||
def convert_type(ty, default=None): |
||||
"""Converts a callable or python ty into the most appropriate param |
||||
ty. |
||||
""" |
||||
guessed_type = False |
||||
if ty is None and default is not None: |
||||
if isinstance(default, tuple): |
||||
ty = tuple(map(type, default)) |
||||
else: |
||||
ty = type(default) |
||||
guessed_type = True |
||||
|
||||
if isinstance(ty, tuple): |
||||
return Tuple(ty) |
||||
if isinstance(ty, ParamType): |
||||
return ty |
||||
if ty is text_type or ty is str or ty is None: |
||||
return STRING |
||||
if ty is int: |
||||
return INT |
||||
# Booleans are only okay if not guessed. This is done because for |
||||
# flags the default value is actually a bit of a lie in that it |
||||
# indicates which of the flags is the one we want. See get_default() |
||||
# for more information. |
||||
if ty is bool and not guessed_type: |
||||
return BOOL |
||||
if ty is float: |
||||
return FLOAT |
||||
if guessed_type: |
||||
return STRING |
||||
|
||||
# Catch a common mistake |
||||
if __debug__: |
||||
try: |
||||
if issubclass(ty, ParamType): |
||||
raise AssertionError('Attempted to use an uninstantiated ' |
||||
'parameter type (%s).' % ty) |
||||
except TypeError: |
||||
pass |
||||
return FuncParamType(ty) |
||||
|
||||
|
||||
#: A dummy parameter type that just does nothing. From a user's |
||||
#: perspective this appears to just be the same as `STRING` but internally |
||||
#: no string conversion takes place. This is necessary to achieve the |
||||
#: same bytes/unicode behavior on Python 2/3 in situations where you want |
||||
#: to not convert argument types. This is usually useful when working |
||||
#: with file paths as they can appear in bytes and unicode. |
||||
#: |
||||
#: For path related uses the :class:`Path` type is a better choice but |
||||
#: there are situations where an unprocessed type is useful which is why |
||||
#: it is is provided. |
||||
#: |
||||
#: .. versionadded:: 4.0 |
||||
UNPROCESSED = UnprocessedParamType() |
||||
|
||||
#: A unicode string parameter type which is the implicit default. This |
||||
#: can also be selected by using ``str`` as type. |
||||
STRING = StringParamType() |
||||
|
||||
#: An integer parameter. This can also be selected by using ``int`` as |
||||
#: type. |
||||
INT = IntParamType() |
||||
|
||||
#: A floating point value parameter. This can also be selected by using |
||||
#: ``float`` as type. |
||||
FLOAT = FloatParamType() |
||||
|
||||
#: A boolean parameter. This is the default for boolean flags. This can |
||||
#: also be selected by using ``bool`` as a type. |
||||
BOOL = BoolParamType() |
||||
|
||||
#: A UUID parameter. |
||||
UUID = UUIDParameterType() |
@ -0,0 +1,415 @@ |
||||
import os |
||||
import sys |
||||
|
||||
from .globals import resolve_color_default |
||||
|
||||
from ._compat import text_type, open_stream, get_filesystem_encoding, \ |
||||
get_streerror, string_types, PY2, binary_streams, text_streams, \ |
||||
filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \ |
||||
_default_text_stdout, _default_text_stderr, is_bytes, WIN |
||||
|
||||
if not PY2: |
||||
from ._compat import _find_binary_writer |
||||
elif WIN: |
||||
from ._winconsole import _get_windows_argv, \ |
||||
_hash_py_argv, _initial_argv_hash |
||||
|
||||
|
||||
echo_native_types = string_types + (bytes, bytearray) |
||||
|
||||
|
||||
def _posixify(name): |
||||
return '-'.join(name.split()).lower() |
||||
|
||||
|
||||
def safecall(func): |
||||
"""Wraps a function so that it swallows exceptions.""" |
||||
def wrapper(*args, **kwargs): |
||||
try: |
||||
return func(*args, **kwargs) |
||||
except Exception: |
||||
pass |
||||
return wrapper |
||||
|
||||
|
||||
def make_str(value): |
||||
"""Converts a value into a valid string.""" |
||||
if isinstance(value, bytes): |
||||
try: |
||||
return value.decode(get_filesystem_encoding()) |
||||
except UnicodeError: |
||||
return value.decode('utf-8', 'replace') |
||||
return text_type(value) |
||||
|
||||
|
||||
def make_default_short_help(help, max_length=45): |
||||
words = help.split() |
||||
total_length = 0 |
||||
result = [] |
||||
done = False |
||||
|
||||
for word in words: |
||||
if word[-1:] == '.': |
||||
done = True |
||||
new_length = result and 1 + len(word) or len(word) |
||||
if total_length + new_length > max_length: |
||||
result.append('...') |
||||
done = True |
||||
else: |
||||
if result: |
||||
result.append(' ') |
||||
result.append(word) |
||||
if done: |
||||
break |
||||
total_length += new_length |
||||
|
||||
return ''.join(result) |
||||
|
||||
|
||||
class LazyFile(object): |
||||
"""A lazy file works like a regular file but it does not fully open |
||||
the file but it does perform some basic checks early to see if the |
||||
filename parameter does make sense. This is useful for safely opening |
||||
files for writing. |
||||
""" |
||||
|
||||
def __init__(self, filename, mode='r', encoding=None, errors='strict', |
||||
atomic=False): |
||||
self.name = filename |
||||
self.mode = mode |
||||
self.encoding = encoding |
||||
self.errors = errors |
||||
self.atomic = atomic |
||||
|
||||
if filename == '-': |
||||
self._f, self.should_close = open_stream(filename, mode, |
||||
encoding, errors) |
||||
else: |
||||
if 'r' in mode: |
||||
# Open and close the file in case we're opening it for |
||||
# reading so that we can catch at least some errors in |
||||
# some cases early. |
||||
open(filename, mode).close() |
||||
self._f = None |
||||
self.should_close = True |
||||
|
||||
def __getattr__(self, name): |
||||
return getattr(self.open(), name) |
||||
|
||||
def __repr__(self): |
||||
if self._f is not None: |
||||
return repr(self._f) |
||||
return '<unopened file %r %s>' % (self.name, self.mode) |
||||
|
||||
def open(self): |
||||
"""Opens the file if it's not yet open. This call might fail with |
||||
a :exc:`FileError`. Not handling this error will produce an error |
||||
that Click shows. |
||||
""" |
||||
if self._f is not None: |
||||
return self._f |
||||
try: |
||||
rv, self.should_close = open_stream(self.name, self.mode, |
||||
self.encoding, |
||||
self.errors, |
||||
atomic=self.atomic) |
||||
except (IOError, OSError) as e: |
||||
from .exceptions import FileError |
||||
raise FileError(self.name, hint=get_streerror(e)) |
||||
self._f = rv |
||||
return rv |
||||
|
||||
def close(self): |
||||
"""Closes the underlying file, no matter what.""" |
||||
if self._f is not None: |
||||
self._f.close() |
||||
|
||||
def close_intelligently(self): |
||||
"""This function only closes the file if it was opened by the lazy |
||||
file wrapper. For instance this will never close stdin. |
||||
""" |
||||
if self.should_close: |
||||
self.close() |
||||
|
||||
def __enter__(self): |
||||
return self |
||||
|
||||
def __exit__(self, exc_type, exc_value, tb): |
||||
self.close_intelligently() |
||||
|
||||
def __iter__(self): |
||||
self.open() |
||||
return iter(self._f) |
||||
|
||||
|
||||
class KeepOpenFile(object): |
||||
|
||||
def __init__(self, file): |
||||
self._file = file |
||||
|
||||
def __getattr__(self, name): |
||||
return getattr(self._file, name) |
||||
|
||||
def __enter__(self): |
||||
return self |
||||
|
||||
def __exit__(self, exc_type, exc_value, tb): |
||||
pass |
||||
|
||||
def __repr__(self): |
||||
return repr(self._file) |
||||
|
||||
def __iter__(self): |
||||
return iter(self._file) |
||||
|
||||
|
||||
def echo(message=None, file=None, nl=True, err=False, color=None): |
||||
"""Prints a message plus a newline to the given file or stdout. On |
||||
first sight, this looks like the print function, but it has improved |
||||
support for handling Unicode and binary data that does not fail no |
||||
matter how badly configured the system is. |
||||
|
||||
Primarily it means that you can print binary data as well as Unicode |
||||
data on both 2.x and 3.x to the given file in the most appropriate way |
||||
possible. This is a very carefree function as in that it will try its |
||||
best to not fail. As of Click 6.0 this includes support for unicode |
||||
output on the Windows console. |
||||
|
||||
In addition to that, if `colorama`_ is installed, the echo function will |
||||
also support clever handling of ANSI codes. Essentially it will then |
||||
do the following: |
||||
|
||||
- add transparent handling of ANSI color codes on Windows. |
||||
- hide ANSI codes automatically if the destination file is not a |
||||
terminal. |
||||
|
||||
.. _colorama: http://pypi.python.org/pypi/colorama |
||||
|
||||
.. versionchanged:: 6.0 |
||||
As of Click 6.0 the echo function will properly support unicode |
||||
output on the windows console. Not that click does not modify |
||||
the interpreter in any way which means that `sys.stdout` or the |
||||
print statement or function will still not provide unicode support. |
||||
|
||||
.. versionchanged:: 2.0 |
||||
Starting with version 2.0 of Click, the echo function will work |
||||
with colorama if it's installed. |
||||
|
||||
.. versionadded:: 3.0 |
||||
The `err` parameter was added. |
||||
|
||||
.. versionchanged:: 4.0 |
||||
Added the `color` flag. |
||||
|
||||
:param message: the message to print |
||||
:param file: the file to write to (defaults to ``stdout``) |
||||
:param err: if set to true the file defaults to ``stderr`` instead of |
||||
``stdout``. This is faster and easier than calling |
||||
:func:`get_text_stderr` yourself. |
||||
:param nl: if set to `True` (the default) a newline is printed afterwards. |
||||
:param color: controls if the terminal supports ANSI colors or not. The |
||||
default is autodetection. |
||||
""" |
||||
if file is None: |
||||
if err: |
||||
file = _default_text_stderr() |
||||
else: |
||||
file = _default_text_stdout() |
||||
|
||||
# Convert non bytes/text into the native string type. |
||||
if message is not None and not isinstance(message, echo_native_types): |
||||
message = text_type(message) |
||||
|
||||
if nl: |
||||
message = message or u'' |
||||
if isinstance(message, text_type): |
||||
message += u'\n' |
||||
else: |
||||
message += b'\n' |
||||
|
||||
# If there is a message, and we're in Python 3, and the value looks |
||||
# like bytes, we manually need to find the binary stream and write the |
||||
# message in there. This is done separately so that most stream |
||||
# types will work as you would expect. Eg: you can write to StringIO |
||||
# for other cases. |
||||
if message and not PY2 and is_bytes(message): |
||||
binary_file = _find_binary_writer(file) |
||||
if binary_file is not None: |
||||
file.flush() |
||||
binary_file.write(message) |
||||
binary_file.flush() |
||||
return |
||||
|
||||
# ANSI-style support. If there is no message or we are dealing with |
||||
# bytes nothing is happening. If we are connected to a file we want |
||||
# to strip colors. If we are on windows we either wrap the stream |
||||
# to strip the color or we use the colorama support to translate the |
||||
# ansi codes to API calls. |
||||
if message and not is_bytes(message): |
||||
color = resolve_color_default(color) |
||||
if should_strip_ansi(file, color): |
||||
message = strip_ansi(message) |
||||
elif WIN: |
||||
if auto_wrap_for_ansi is not None: |
||||
file = auto_wrap_for_ansi(file) |
||||
elif not color: |
||||
message = strip_ansi(message) |
||||
|
||||
if message: |
||||
file.write(message) |
||||
file.flush() |
||||
|
||||
|
||||
def get_binary_stream(name): |
||||
"""Returns a system stream for byte processing. This essentially |
||||
returns the stream from the sys module with the given name but it |
||||
solves some compatibility issues between different Python versions. |
||||
Primarily this function is necessary for getting binary streams on |
||||
Python 3. |
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``, |
||||
``'stdout'`` and ``'stderr'`` |
||||
""" |
||||
opener = binary_streams.get(name) |
||||
if opener is None: |
||||
raise TypeError('Unknown standard stream %r' % name) |
||||
return opener() |
||||
|
||||
|
||||
def get_text_stream(name, encoding=None, errors='strict'): |
||||
"""Returns a system stream for text processing. This usually returns |
||||
a wrapped stream around a binary stream returned from |
||||
:func:`get_binary_stream` but it also can take shortcuts on Python 3 |
||||
for already correctly configured streams. |
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``, |
||||
``'stdout'`` and ``'stderr'`` |
||||
:param encoding: overrides the detected default encoding. |
||||
:param errors: overrides the default error mode. |
||||
""" |
||||
opener = text_streams.get(name) |
||||
if opener is None: |
||||
raise TypeError('Unknown standard stream %r' % name) |
||||
return opener(encoding, errors) |
||||
|
||||
|
||||
def open_file(filename, mode='r', encoding=None, errors='strict', |
||||
lazy=False, atomic=False): |
||||
"""This is similar to how the :class:`File` works but for manual |
||||
usage. Files are opened non lazy by default. This can open regular |
||||
files as well as stdin/stdout if ``'-'`` is passed. |
||||
|
||||
If stdin/stdout is returned the stream is wrapped so that the context |
||||
manager will not close the stream accidentally. This makes it possible |
||||
to always use the function like this without having to worry to |
||||
accidentally close a standard stream:: |
||||
|
||||
with open_file(filename) as f: |
||||
... |
||||
|
||||
.. versionadded:: 3.0 |
||||
|
||||
:param filename: the name of the file to open (or ``'-'`` for stdin/stdout). |
||||
:param mode: the mode in which to open the file. |
||||
:param encoding: the encoding to use. |
||||
:param errors: the error handling for this file. |
||||
:param lazy: can be flipped to true to open the file lazily. |
||||
:param atomic: in atomic mode writes go into a temporary file and it's |
||||
moved on close. |
||||
""" |
||||
if lazy: |
||||
return LazyFile(filename, mode, encoding, errors, atomic=atomic) |
||||
f, should_close = open_stream(filename, mode, encoding, errors, |
||||
atomic=atomic) |
||||
if not should_close: |
||||
f = KeepOpenFile(f) |
||||
return f |
||||
|
||||
|
||||
def get_os_args(): |
||||
"""This returns the argument part of sys.argv in the most appropriate |
||||
form for processing. What this means is that this return value is in |
||||
a format that works for Click to process but does not necessarily |
||||
correspond well to what's actually standard for the interpreter. |
||||
|
||||
On most environments the return value is ``sys.argv[:1]`` unchanged. |
||||
However if you are on Windows and running Python 2 the return value |
||||
will actually be a list of unicode strings instead because the |
||||
default behavior on that platform otherwise will not be able to |
||||
carry all possible values that sys.argv can have. |
||||
|
||||
.. versionadded:: 6.0 |
||||
""" |
||||
# We can only extract the unicode argv if sys.argv has not been |
||||
# changed since the startup of the application. |
||||
if PY2 and WIN and _initial_argv_hash == _hash_py_argv(): |
||||
return _get_windows_argv() |
||||
return sys.argv[1:] |
||||
|
||||
|
||||
def format_filename(filename, shorten=False): |
||||
"""Formats a filename for user display. The main purpose of this |
||||
function is to ensure that the filename can be displayed at all. This |
||||
will decode the filename to unicode if necessary in a way that it will |
||||
not fail. Optionally, it can shorten the filename to not include the |
||||
full path to the filename. |
||||
|
||||
:param filename: formats a filename for UI display. This will also convert |
||||
the filename into unicode without failing. |
||||
:param shorten: this optionally shortens the filename to strip of the |
||||
path that leads up to it. |
||||
""" |
||||
if shorten: |
||||
filename = os.path.basename(filename) |
||||
return filename_to_ui(filename) |
||||
|
||||
|
||||
def get_app_dir(app_name, roaming=True, force_posix=False): |
||||
r"""Returns the config folder for the application. The default behavior |
||||
is to return whatever is most appropriate for the operating system. |
||||
|
||||
To give you an idea, for an app called ``"Foo Bar"``, something like |
||||
the following folders could be returned: |
||||
|
||||
Mac OS X: |
||||
``~/Library/Application Support/Foo Bar`` |
||||
Mac OS X (POSIX): |
||||
``~/.foo-bar`` |
||||
Unix: |
||||
``~/.config/foo-bar`` |
||||
Unix (POSIX): |
||||
``~/.foo-bar`` |
||||
Win XP (roaming): |
||||
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar`` |
||||
Win XP (not roaming): |
||||
``C:\Documents and Settings\<user>\Application Data\Foo Bar`` |
||||
Win 7 (roaming): |
||||
``C:\Users\<user>\AppData\Roaming\Foo Bar`` |
||||
Win 7 (not roaming): |
||||
``C:\Users\<user>\AppData\Local\Foo Bar`` |
||||
|
||||
.. versionadded:: 2.0 |
||||
|
||||
:param app_name: the application name. This should be properly capitalized |
||||
and can contain whitespace. |
||||
:param roaming: controls if the folder should be roaming or not on Windows. |
||||
Has no affect otherwise. |
||||
:param force_posix: if this is set to `True` then on any POSIX system the |
||||
folder will be stored in the home folder with a leading |
||||
dot instead of the XDG config home or darwin's |
||||
application support folder. |
||||
""" |
||||
if WIN: |
||||
key = roaming and 'APPDATA' or 'LOCALAPPDATA' |
||||
folder = os.environ.get(key) |
||||
if folder is None: |
||||
folder = os.path.expanduser('~') |
||||
return os.path.join(folder, app_name) |
||||
if force_posix: |
||||
return os.path.join(os.path.expanduser('~/.' + _posixify(app_name))) |
||||
if sys.platform == 'darwin': |
||||
return os.path.join(os.path.expanduser( |
||||
'~/Library/Application Support'), app_name) |
||||
return os.path.join( |
||||
os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')), |
||||
_posixify(app_name)) |
@ -0,0 +1,49 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask |
||||
~~~~~ |
||||
|
||||
A microframework based on Werkzeug. It's extensively documented |
||||
and follows best practice patterns. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
__version__ = '1.0.2' |
||||
|
||||
# utilities we import from Werkzeug and Jinja2 that are unused |
||||
# in the module but are exported as public interface. |
||||
from werkzeug.exceptions import abort |
||||
from werkzeug.utils import redirect |
||||
from jinja2 import Markup, escape |
||||
|
||||
from .app import Flask, Request, Response |
||||
from .config import Config |
||||
from .helpers import url_for, flash, send_file, send_from_directory, \ |
||||
get_flashed_messages, get_template_attribute, make_response, safe_join, \ |
||||
stream_with_context |
||||
from .globals import current_app, g, request, session, _request_ctx_stack, \ |
||||
_app_ctx_stack |
||||
from .ctx import has_request_context, has_app_context, \ |
||||
after_this_request, copy_current_request_context |
||||
from .blueprints import Blueprint |
||||
from .templating import render_template, render_template_string |
||||
|
||||
# the signals |
||||
from .signals import signals_available, template_rendered, request_started, \ |
||||
request_finished, got_request_exception, request_tearing_down, \ |
||||
appcontext_tearing_down, appcontext_pushed, \ |
||||
appcontext_popped, message_flashed, before_render_template |
||||
|
||||
# We're not exposing the actual json module but a convenient wrapper around |
||||
# it. |
||||
from . import json |
||||
|
||||
# This was the only thing that Flask used to export at one point and it had |
||||
# a more generic name. |
||||
jsonify = json.jsonify |
||||
|
||||
# backwards compat, goes away in 1.0 |
||||
from .sessions import SecureCookieSession as Session |
||||
json_available = True |
@ -0,0 +1,14 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.__main__ |
||||
~~~~~~~~~~~~~~ |
||||
|
||||
Alias for flask.run for the command line. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
if __name__ == '__main__': |
||||
from .cli import main |
||||
main(as_module=True) |
@ -0,0 +1,99 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask._compat |
||||
~~~~~~~~~~~~~ |
||||
|
||||
Some py2/py3 compatibility support based on a stripped down |
||||
version of six so we don't have to depend on a specific version |
||||
of it. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
import sys |
||||
|
||||
PY2 = sys.version_info[0] == 2 |
||||
_identity = lambda x: x |
||||
|
||||
|
||||
if not PY2: |
||||
text_type = str |
||||
string_types = (str,) |
||||
integer_types = (int,) |
||||
|
||||
iterkeys = lambda d: iter(d.keys()) |
||||
itervalues = lambda d: iter(d.values()) |
||||
iteritems = lambda d: iter(d.items()) |
||||
|
||||
from inspect import getfullargspec as getargspec |
||||
from io import StringIO |
||||
|
||||
def reraise(tp, value, tb=None): |
||||
if value.__traceback__ is not tb: |
||||
raise value.with_traceback(tb) |
||||
raise value |
||||
|
||||
implements_to_string = _identity |
||||
|
||||
else: |
||||
text_type = unicode |
||||
string_types = (str, unicode) |
||||
integer_types = (int, long) |
||||
|
||||
iterkeys = lambda d: d.iterkeys() |
||||
itervalues = lambda d: d.itervalues() |
||||
iteritems = lambda d: d.iteritems() |
||||
|
||||
from inspect import getargspec |
||||
from cStringIO import StringIO |
||||
|
||||
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') |
||||
|
||||
def implements_to_string(cls): |
||||
cls.__unicode__ = cls.__str__ |
||||
cls.__str__ = lambda x: x.__unicode__().encode('utf-8') |
||||
return cls |
||||
|
||||
|
||||
def with_metaclass(meta, *bases): |
||||
"""Create a base class with a metaclass.""" |
||||
# This requires a bit of explanation: the basic idea is to make a |
||||
# dummy metaclass for one level of class instantiation that replaces |
||||
# itself with the actual metaclass. |
||||
class metaclass(type): |
||||
def __new__(cls, name, this_bases, d): |
||||
return meta(name, bases, d) |
||||
return type.__new__(metaclass, 'temporary_class', (), {}) |
||||
|
||||
|
||||
# Certain versions of pypy have a bug where clearing the exception stack |
||||
# breaks the __exit__ function in a very peculiar way. The second level of |
||||
# exception blocks is necessary because pypy seems to forget to check if an |
||||
# exception happened until the next bytecode instruction? |
||||
# |
||||
# Relevant PyPy bugfix commit: |
||||
# https://bitbucket.org/pypy/pypy/commits/77ecf91c635a287e88e60d8ddb0f4e9df4003301 |
||||
# According to ronan on #pypy IRC, it is released in PyPy2 2.3 and later |
||||
# versions. |
||||
# |
||||
# Ubuntu 14.04 has PyPy 2.2.1, which does exhibit this bug. |
||||
BROKEN_PYPY_CTXMGR_EXIT = False |
||||
if hasattr(sys, 'pypy_version_info'): |
||||
class _Mgr(object): |
||||
def __enter__(self): |
||||
return self |
||||
def __exit__(self, *args): |
||||
if hasattr(sys, 'exc_clear'): |
||||
# Python 3 (PyPy3) doesn't have exc_clear |
||||
sys.exc_clear() |
||||
try: |
||||
try: |
||||
with _Mgr(): |
||||
raise AssertionError() |
||||
except: |
||||
raise |
||||
except TypeError: |
||||
BROKEN_PYPY_CTXMGR_EXIT = True |
||||
except AssertionError: |
||||
pass |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,448 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.blueprints |
||||
~~~~~~~~~~~~~~~~ |
||||
|
||||
Blueprints are the recommended way to implement larger or more |
||||
pluggable applications in Flask 0.7 and later. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
from functools import update_wrapper |
||||
from werkzeug.urls import url_join |
||||
|
||||
from .helpers import _PackageBoundObject, _endpoint_from_view_func |
||||
|
||||
|
||||
class BlueprintSetupState(object): |
||||
"""Temporary holder object for registering a blueprint with the |
||||
application. An instance of this class is created by the |
||||
:meth:`~flask.Blueprint.make_setup_state` method and later passed |
||||
to all register callback functions. |
||||
""" |
||||
|
||||
def __init__(self, blueprint, app, options, first_registration): |
||||
#: a reference to the current application |
||||
self.app = app |
||||
|
||||
#: a reference to the blueprint that created this setup state. |
||||
self.blueprint = blueprint |
||||
|
||||
#: a dictionary with all options that were passed to the |
||||
#: :meth:`~flask.Flask.register_blueprint` method. |
||||
self.options = options |
||||
|
||||
#: as blueprints can be registered multiple times with the |
||||
#: application and not everything wants to be registered |
||||
#: multiple times on it, this attribute can be used to figure |
||||
#: out if the blueprint was registered in the past already. |
||||
self.first_registration = first_registration |
||||
|
||||
subdomain = self.options.get('subdomain') |
||||
if subdomain is None: |
||||
subdomain = self.blueprint.subdomain |
||||
|
||||
#: The subdomain that the blueprint should be active for, ``None`` |
||||
#: otherwise. |
||||
self.subdomain = subdomain |
||||
|
||||
url_prefix = self.options.get('url_prefix') |
||||
if url_prefix is None: |
||||
url_prefix = self.blueprint.url_prefix |
||||
#: The prefix that should be used for all URLs defined on the |
||||
#: blueprint. |
||||
self.url_prefix = url_prefix |
||||
|
||||
#: A dictionary with URL defaults that is added to each and every |
||||
#: URL that was defined with the blueprint. |
||||
self.url_defaults = dict(self.blueprint.url_values_defaults) |
||||
self.url_defaults.update(self.options.get('url_defaults', ())) |
||||
|
||||
def add_url_rule(self, rule, endpoint=None, view_func=None, **options): |
||||
"""A helper method to register a rule (and optionally a view function) |
||||
to the application. The endpoint is automatically prefixed with the |
||||
blueprint's name. |
||||
""" |
||||
if self.url_prefix is not None: |
||||
if rule: |
||||
rule = '/'.join(( |
||||
self.url_prefix.rstrip('/'), rule.lstrip('/'))) |
||||
else: |
||||
rule = self.url_prefix |
||||
options.setdefault('subdomain', self.subdomain) |
||||
if endpoint is None: |
||||
endpoint = _endpoint_from_view_func(view_func) |
||||
defaults = self.url_defaults |
||||
if 'defaults' in options: |
||||
defaults = dict(defaults, **options.pop('defaults')) |
||||
self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint), |
||||
view_func, defaults=defaults, **options) |
||||
|
||||
|
||||
class Blueprint(_PackageBoundObject): |
||||
"""Represents a blueprint. A blueprint is an object that records |
||||
functions that will be called with the |
||||
:class:`~flask.blueprints.BlueprintSetupState` later to register functions |
||||
or other things on the main application. See :ref:`blueprints` for more |
||||
information. |
||||
|
||||
.. versionadded:: 0.7 |
||||
""" |
||||
|
||||
warn_on_modifications = False |
||||
_got_registered_once = False |
||||
|
||||
#: Blueprint local JSON decoder class to use. |
||||
#: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_encoder`. |
||||
json_encoder = None |
||||
#: Blueprint local JSON decoder class to use. |
||||
#: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_decoder`. |
||||
json_decoder = None |
||||
|
||||
# TODO remove the next three attrs when Sphinx :inherited-members: works |
||||
# https://github.com/sphinx-doc/sphinx/issues/741 |
||||
|
||||
#: The name of the package or module that this app belongs to. Do not |
||||
#: change this once it is set by the constructor. |
||||
import_name = None |
||||
|
||||
#: Location of the template files to be added to the template lookup. |
||||
#: ``None`` if templates should not be added. |
||||
template_folder = None |
||||
|
||||
#: Absolute path to the package on the filesystem. Used to look up |
||||
#: resources contained in the package. |
||||
root_path = None |
||||
|
||||
def __init__(self, name, import_name, static_folder=None, |
||||
static_url_path=None, template_folder=None, |
||||
url_prefix=None, subdomain=None, url_defaults=None, |
||||
root_path=None): |
||||
_PackageBoundObject.__init__(self, import_name, template_folder, |
||||
root_path=root_path) |
||||
self.name = name |
||||
self.url_prefix = url_prefix |
||||
self.subdomain = subdomain |
||||
self.static_folder = static_folder |
||||
self.static_url_path = static_url_path |
||||
self.deferred_functions = [] |
||||
if url_defaults is None: |
||||
url_defaults = {} |
||||
self.url_values_defaults = url_defaults |
||||
|
||||
def record(self, func): |
||||
"""Registers a function that is called when the blueprint is |
||||
registered on the application. This function is called with the |
||||
state as argument as returned by the :meth:`make_setup_state` |
||||
method. |
||||
""" |
||||
if self._got_registered_once and self.warn_on_modifications: |
||||
from warnings import warn |
||||
warn(Warning('The blueprint was already registered once ' |
||||
'but is getting modified now. These changes ' |
||||
'will not show up.')) |
||||
self.deferred_functions.append(func) |
||||
|
||||
def record_once(self, func): |
||||
"""Works like :meth:`record` but wraps the function in another |
||||
function that will ensure the function is only called once. If the |
||||
blueprint is registered a second time on the application, the |
||||
function passed is not called. |
||||
""" |
||||
def wrapper(state): |
||||
if state.first_registration: |
||||
func(state) |
||||
return self.record(update_wrapper(wrapper, func)) |
||||
|
||||
def make_setup_state(self, app, options, first_registration=False): |
||||
"""Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` |
||||
object that is later passed to the register callback functions. |
||||
Subclasses can override this to return a subclass of the setup state. |
||||
""" |
||||
return BlueprintSetupState(self, app, options, first_registration) |
||||
|
||||
def register(self, app, options, first_registration=False): |
||||
"""Called by :meth:`Flask.register_blueprint` to register all views |
||||
and callbacks registered on the blueprint with the application. Creates |
||||
a :class:`.BlueprintSetupState` and calls each :meth:`record` callback |
||||
with it. |
||||
|
||||
:param app: The application this blueprint is being registered with. |
||||
:param options: Keyword arguments forwarded from |
||||
:meth:`~Flask.register_blueprint`. |
||||
:param first_registration: Whether this is the first time this |
||||
blueprint has been registered on the application. |
||||
""" |
||||
self._got_registered_once = True |
||||
state = self.make_setup_state(app, options, first_registration) |
||||
|
||||
if self.has_static_folder: |
||||
state.add_url_rule( |
||||
self.static_url_path + '/<path:filename>', |
||||
view_func=self.send_static_file, endpoint='static' |
||||
) |
||||
|
||||
for deferred in self.deferred_functions: |
||||
deferred(state) |
||||
|
||||
def route(self, rule, **options): |
||||
"""Like :meth:`Flask.route` but for a blueprint. The endpoint for the |
||||
:func:`url_for` function is prefixed with the name of the blueprint. |
||||
""" |
||||
def decorator(f): |
||||
endpoint = options.pop("endpoint", f.__name__) |
||||
self.add_url_rule(rule, endpoint, f, **options) |
||||
return f |
||||
return decorator |
||||
|
||||
def add_url_rule(self, rule, endpoint=None, view_func=None, **options): |
||||
"""Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for |
||||
the :func:`url_for` function is prefixed with the name of the blueprint. |
||||
""" |
||||
if endpoint: |
||||
assert '.' not in endpoint, "Blueprint endpoints should not contain dots" |
||||
if view_func and hasattr(view_func, '__name__'): |
||||
assert '.' not in view_func.__name__, "Blueprint view function name should not contain dots" |
||||
self.record(lambda s: |
||||
s.add_url_rule(rule, endpoint, view_func, **options)) |
||||
|
||||
def endpoint(self, endpoint): |
||||
"""Like :meth:`Flask.endpoint` but for a blueprint. This does not |
||||
prefix the endpoint with the blueprint name, this has to be done |
||||
explicitly by the user of this method. If the endpoint is prefixed |
||||
with a `.` it will be registered to the current blueprint, otherwise |
||||
it's an application independent endpoint. |
||||
""" |
||||
def decorator(f): |
||||
def register_endpoint(state): |
||||
state.app.view_functions[endpoint] = f |
||||
self.record_once(register_endpoint) |
||||
return f |
||||
return decorator |
||||
|
||||
def app_template_filter(self, name=None): |
||||
"""Register a custom template filter, available application wide. Like |
||||
:meth:`Flask.template_filter` but for a blueprint. |
||||
|
||||
:param name: the optional name of the filter, otherwise the |
||||
function name will be used. |
||||
""" |
||||
def decorator(f): |
||||
self.add_app_template_filter(f, name=name) |
||||
return f |
||||
return decorator |
||||
|
||||
def add_app_template_filter(self, f, name=None): |
||||
"""Register a custom template filter, available application wide. Like |
||||
:meth:`Flask.add_template_filter` but for a blueprint. Works exactly |
||||
like the :meth:`app_template_filter` decorator. |
||||
|
||||
:param name: the optional name of the filter, otherwise the |
||||
function name will be used. |
||||
""" |
||||
def register_template(state): |
||||
state.app.jinja_env.filters[name or f.__name__] = f |
||||
self.record_once(register_template) |
||||
|
||||
def app_template_test(self, name=None): |
||||
"""Register a custom template test, available application wide. Like |
||||
:meth:`Flask.template_test` but for a blueprint. |
||||
|
||||
.. versionadded:: 0.10 |
||||
|
||||
:param name: the optional name of the test, otherwise the |
||||
function name will be used. |
||||
""" |
||||
def decorator(f): |
||||
self.add_app_template_test(f, name=name) |
||||
return f |
||||
return decorator |
||||
|
||||
def add_app_template_test(self, f, name=None): |
||||
"""Register a custom template test, available application wide. Like |
||||
:meth:`Flask.add_template_test` but for a blueprint. Works exactly |
||||
like the :meth:`app_template_test` decorator. |
||||
|
||||
.. versionadded:: 0.10 |
||||
|
||||
:param name: the optional name of the test, otherwise the |
||||
function name will be used. |
||||
""" |
||||
def register_template(state): |
||||
state.app.jinja_env.tests[name or f.__name__] = f |
||||
self.record_once(register_template) |
||||
|
||||
def app_template_global(self, name=None): |
||||
"""Register a custom template global, available application wide. Like |
||||
:meth:`Flask.template_global` but for a blueprint. |
||||
|
||||
.. versionadded:: 0.10 |
||||
|
||||
:param name: the optional name of the global, otherwise the |
||||
function name will be used. |
||||
""" |
||||
def decorator(f): |
||||
self.add_app_template_global(f, name=name) |
||||
return f |
||||
return decorator |
||||
|
||||
def add_app_template_global(self, f, name=None): |
||||
"""Register a custom template global, available application wide. Like |
||||
:meth:`Flask.add_template_global` but for a blueprint. Works exactly |
||||
like the :meth:`app_template_global` decorator. |
||||
|
||||
.. versionadded:: 0.10 |
||||
|
||||
:param name: the optional name of the global, otherwise the |
||||
function name will be used. |
||||
""" |
||||
def register_template(state): |
||||
state.app.jinja_env.globals[name or f.__name__] = f |
||||
self.record_once(register_template) |
||||
|
||||
def before_request(self, f): |
||||
"""Like :meth:`Flask.before_request` but for a blueprint. This function |
||||
is only executed before each request that is handled by a function of |
||||
that blueprint. |
||||
""" |
||||
self.record_once(lambda s: s.app.before_request_funcs |
||||
.setdefault(self.name, []).append(f)) |
||||
return f |
||||
|
||||
def before_app_request(self, f): |
||||
"""Like :meth:`Flask.before_request`. Such a function is executed |
||||
before each request, even if outside of a blueprint. |
||||
""" |
||||
self.record_once(lambda s: s.app.before_request_funcs |
||||
.setdefault(None, []).append(f)) |
||||
return f |
||||
|
||||
def before_app_first_request(self, f): |
||||
"""Like :meth:`Flask.before_first_request`. Such a function is |
||||
executed before the first request to the application. |
||||
""" |
||||
self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) |
||||
return f |
||||
|
||||
def after_request(self, f): |
||||
"""Like :meth:`Flask.after_request` but for a blueprint. This function |
||||
is only executed after each request that is handled by a function of |
||||
that blueprint. |
||||
""" |
||||
self.record_once(lambda s: s.app.after_request_funcs |
||||
.setdefault(self.name, []).append(f)) |
||||
return f |
||||
|
||||
def after_app_request(self, f): |
||||
"""Like :meth:`Flask.after_request` but for a blueprint. Such a function |
||||
is executed after each request, even if outside of the blueprint. |
||||
""" |
||||
self.record_once(lambda s: s.app.after_request_funcs |
||||
.setdefault(None, []).append(f)) |
||||
return f |
||||
|
||||
def teardown_request(self, f): |
||||
"""Like :meth:`Flask.teardown_request` but for a blueprint. This |
||||
function is only executed when tearing down requests handled by a |
||||
function of that blueprint. Teardown request functions are executed |
||||
when the request context is popped, even when no actual request was |
||||
performed. |
||||
""" |
||||
self.record_once(lambda s: s.app.teardown_request_funcs |
||||
.setdefault(self.name, []).append(f)) |
||||
return f |
||||
|
||||
def teardown_app_request(self, f): |
||||
"""Like :meth:`Flask.teardown_request` but for a blueprint. Such a |
||||
function is executed when tearing down each request, even if outside of |
||||
the blueprint. |
||||
""" |
||||
self.record_once(lambda s: s.app.teardown_request_funcs |
||||
.setdefault(None, []).append(f)) |
||||
return f |
||||
|
||||
def context_processor(self, f): |
||||
"""Like :meth:`Flask.context_processor` but for a blueprint. This |
||||
function is only executed for requests handled by a blueprint. |
||||
""" |
||||
self.record_once(lambda s: s.app.template_context_processors |
||||
.setdefault(self.name, []).append(f)) |
||||
return f |
||||
|
||||
def app_context_processor(self, f): |
||||
"""Like :meth:`Flask.context_processor` but for a blueprint. Such a |
||||
function is executed each request, even if outside of the blueprint. |
||||
""" |
||||
self.record_once(lambda s: s.app.template_context_processors |
||||
.setdefault(None, []).append(f)) |
||||
return f |
||||
|
||||
def app_errorhandler(self, code): |
||||
"""Like :meth:`Flask.errorhandler` but for a blueprint. This |
||||
handler is used for all requests, even if outside of the blueprint. |
||||
""" |
||||
def decorator(f): |
||||
self.record_once(lambda s: s.app.errorhandler(code)(f)) |
||||
return f |
||||
return decorator |
||||
|
||||
def url_value_preprocessor(self, f): |
||||
"""Registers a function as URL value preprocessor for this |
||||
blueprint. It's called before the view functions are called and |
||||
can modify the url values provided. |
||||
""" |
||||
self.record_once(lambda s: s.app.url_value_preprocessors |
||||
.setdefault(self.name, []).append(f)) |
||||
return f |
||||
|
||||
def url_defaults(self, f): |
||||
"""Callback function for URL defaults for this blueprint. It's called |
||||
with the endpoint and values and should update the values passed |
||||
in place. |
||||
""" |
||||
self.record_once(lambda s: s.app.url_default_functions |
||||
.setdefault(self.name, []).append(f)) |
||||
return f |
||||
|
||||
def app_url_value_preprocessor(self, f): |
||||
"""Same as :meth:`url_value_preprocessor` but application wide. |
||||
""" |
||||
self.record_once(lambda s: s.app.url_value_preprocessors |
||||
.setdefault(None, []).append(f)) |
||||
return f |
||||
|
||||
def app_url_defaults(self, f): |
||||
"""Same as :meth:`url_defaults` but application wide. |
||||
""" |
||||
self.record_once(lambda s: s.app.url_default_functions |
||||
.setdefault(None, []).append(f)) |
||||
return f |
||||
|
||||
def errorhandler(self, code_or_exception): |
||||
"""Registers an error handler that becomes active for this blueprint |
||||
only. Please be aware that routing does not happen local to a |
||||
blueprint so an error handler for 404 usually is not handled by |
||||
a blueprint unless it is caused inside a view function. Another |
||||
special case is the 500 internal server error which is always looked |
||||
up from the application. |
||||
|
||||
Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator |
||||
of the :class:`~flask.Flask` object. |
||||
""" |
||||
def decorator(f): |
||||
self.record_once(lambda s: s.app._register_error_handler( |
||||
self.name, code_or_exception, f)) |
||||
return f |
||||
return decorator |
||||
|
||||
def register_error_handler(self, code_or_exception, f): |
||||
"""Non-decorator version of the :meth:`errorhandler` error attach |
||||
function, akin to the :meth:`~flask.Flask.register_error_handler` |
||||
application-wide function of the :class:`~flask.Flask` object but |
||||
for error handlers limited to this blueprint. |
||||
|
||||
.. versionadded:: 0.11 |
||||
""" |
||||
self.record_once(lambda s: s.app._register_error_handler( |
||||
self.name, code_or_exception, f)) |
@ -0,0 +1,898 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.cli |
||||
~~~~~~~~~ |
||||
|
||||
A simple command line application to run flask apps. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
from __future__ import print_function |
||||
|
||||
import ast |
||||
import inspect |
||||
import os |
||||
import re |
||||
import ssl |
||||
import sys |
||||
import traceback |
||||
from functools import update_wrapper |
||||
from operator import attrgetter |
||||
from threading import Lock, Thread |
||||
|
||||
import click |
||||
from werkzeug.utils import import_string |
||||
|
||||
from . import __version__ |
||||
from ._compat import getargspec, iteritems, reraise, text_type |
||||
from .globals import current_app |
||||
from .helpers import get_debug_flag, get_env, get_load_dotenv |
||||
|
||||
try: |
||||
import dotenv |
||||
except ImportError: |
||||
dotenv = None |
||||
|
||||
|
||||
class NoAppException(click.UsageError): |
||||
"""Raised if an application cannot be found or loaded.""" |
||||
|
||||
|
||||
def find_best_app(script_info, module): |
||||
"""Given a module instance this tries to find the best possible |
||||
application in the module or raises an exception. |
||||
""" |
||||
from . import Flask |
||||
|
||||
# Search for the most common names first. |
||||
for attr_name in ('app', 'application'): |
||||
app = getattr(module, attr_name, None) |
||||
|
||||
if isinstance(app, Flask): |
||||
return app |
||||
|
||||
# Otherwise find the only object that is a Flask instance. |
||||
matches = [ |
||||
v for k, v in iteritems(module.__dict__) if isinstance(v, Flask) |
||||
] |
||||
|
||||
if len(matches) == 1: |
||||
return matches[0] |
||||
elif len(matches) > 1: |
||||
raise NoAppException( |
||||
'Detected multiple Flask applications in module "{module}". Use ' |
||||
'"FLASK_APP={module}:name" to specify the correct ' |
||||
'one.'.format(module=module.__name__) |
||||
) |
||||
|
||||
# Search for app factory functions. |
||||
for attr_name in ('create_app', 'make_app'): |
||||
app_factory = getattr(module, attr_name, None) |
||||
|
||||
if inspect.isfunction(app_factory): |
||||
try: |
||||
app = call_factory(script_info, app_factory) |
||||
|
||||
if isinstance(app, Flask): |
||||
return app |
||||
except TypeError: |
||||
if not _called_with_wrong_args(app_factory): |
||||
raise |
||||
raise NoAppException( |
||||
'Detected factory "{factory}" in module "{module}", but ' |
||||
'could not call it without arguments. Use ' |
||||
'"FLASK_APP=\'{module}:{factory}(args)\'" to specify ' |
||||
'arguments.'.format( |
||||
factory=attr_name, module=module.__name__ |
||||
) |
||||
) |
||||
|
||||
raise NoAppException( |
||||
'Failed to find Flask application or factory in module "{module}". ' |
||||
'Use "FLASK_APP={module}:name to specify one.'.format( |
||||
module=module.__name__ |
||||
) |
||||
) |
||||
|
||||
|
||||
def call_factory(script_info, app_factory, arguments=()): |
||||
"""Takes an app factory, a ``script_info` object and optionally a tuple |
||||
of arguments. Checks for the existence of a script_info argument and calls |
||||
the app_factory depending on that and the arguments provided. |
||||
""" |
||||
args_spec = getargspec(app_factory) |
||||
arg_names = args_spec.args |
||||
arg_defaults = args_spec.defaults |
||||
|
||||
if 'script_info' in arg_names: |
||||
return app_factory(*arguments, script_info=script_info) |
||||
elif arguments: |
||||
return app_factory(*arguments) |
||||
elif not arguments and len(arg_names) == 1 and arg_defaults is None: |
||||
return app_factory(script_info) |
||||
|
||||
return app_factory() |
||||
|
||||
|
||||
def _called_with_wrong_args(factory): |
||||
"""Check whether calling a function raised a ``TypeError`` because |
||||
the call failed or because something in the factory raised the |
||||
error. |
||||
|
||||
:param factory: the factory function that was called |
||||
:return: true if the call failed |
||||
""" |
||||
tb = sys.exc_info()[2] |
||||
|
||||
try: |
||||
while tb is not None: |
||||
if tb.tb_frame.f_code is factory.__code__: |
||||
# in the factory, it was called successfully |
||||
return False |
||||
|
||||
tb = tb.tb_next |
||||
|
||||
# didn't reach the factory |
||||
return True |
||||
finally: |
||||
del tb |
||||
|
||||
|
||||
def find_app_by_string(script_info, module, app_name): |
||||
"""Checks if the given string is a variable name or a function. If it is a |
||||
function, it checks for specified arguments and whether it takes a |
||||
``script_info`` argument and calls the function with the appropriate |
||||
arguments. |
||||
""" |
||||
from flask import Flask |
||||
match = re.match(r'^ *([^ ()]+) *(?:\((.*?) *,? *\))? *$', app_name) |
||||
|
||||
if not match: |
||||
raise NoAppException( |
||||
'"{name}" is not a valid variable name or function ' |
||||
'expression.'.format(name=app_name) |
||||
) |
||||
|
||||
name, args = match.groups() |
||||
|
||||
try: |
||||
attr = getattr(module, name) |
||||
except AttributeError as e: |
||||
raise NoAppException(e.args[0]) |
||||
|
||||
if inspect.isfunction(attr): |
||||
if args: |
||||
try: |
||||
args = ast.literal_eval('({args},)'.format(args=args)) |
||||
except (ValueError, SyntaxError)as e: |
||||
raise NoAppException( |
||||
'Could not parse the arguments in ' |
||||
'"{app_name}".'.format(e=e, app_name=app_name) |
||||
) |
||||
else: |
||||
args = () |
||||
|
||||
try: |
||||
app = call_factory(script_info, attr, args) |
||||
except TypeError as e: |
||||
if not _called_with_wrong_args(attr): |
||||
raise |
||||
|
||||
raise NoAppException( |
||||
'{e}\nThe factory "{app_name}" in module "{module}" could not ' |
||||
'be called with the specified arguments.'.format( |
||||
e=e, app_name=app_name, module=module.__name__ |
||||
) |
||||
) |
||||
else: |
||||
app = attr |
||||
|
||||
if isinstance(app, Flask): |
||||
return app |
||||
|
||||
raise NoAppException( |
||||
'A valid Flask application was not obtained from ' |
||||
'"{module}:{app_name}".'.format( |
||||
module=module.__name__, app_name=app_name |
||||
) |
||||
) |
||||
|
||||
|
||||
def prepare_import(path): |
||||
"""Given a filename this will try to calculate the python path, add it |
||||
to the search path and return the actual module name that is expected. |
||||
""" |
||||
path = os.path.realpath(path) |
||||
|
||||
if os.path.splitext(path)[1] == '.py': |
||||
path = os.path.splitext(path)[0] |
||||
|
||||
if os.path.basename(path) == '__init__': |
||||
path = os.path.dirname(path) |
||||
|
||||
module_name = [] |
||||
|
||||
# move up until outside package structure (no __init__.py) |
||||
while True: |
||||
path, name = os.path.split(path) |
||||
module_name.append(name) |
||||
|
||||
if not os.path.exists(os.path.join(path, '__init__.py')): |
||||
break |
||||
|
||||
if sys.path[0] != path: |
||||
sys.path.insert(0, path) |
||||
|
||||
return '.'.join(module_name[::-1]) |
||||
|
||||
|
||||
def locate_app(script_info, module_name, app_name, raise_if_not_found=True): |
||||
__traceback_hide__ = True |
||||
|
||||
try: |
||||
__import__(module_name) |
||||
except ImportError: |
||||
# Reraise the ImportError if it occurred within the imported module. |
||||
# Determine this by checking whether the trace has a depth > 1. |
||||
if sys.exc_info()[-1].tb_next: |
||||
raise NoAppException( |
||||
'While importing "{name}", an ImportError was raised:' |
||||
'\n\n{tb}'.format(name=module_name, tb=traceback.format_exc()) |
||||
) |
||||
elif raise_if_not_found: |
||||
raise NoAppException( |
||||
'Could not import "{name}".'.format(name=module_name) |
||||
) |
||||
else: |
||||
return |
||||
|
||||
module = sys.modules[module_name] |
||||
|
||||
if app_name is None: |
||||
return find_best_app(script_info, module) |
||||
else: |
||||
return find_app_by_string(script_info, module, app_name) |
||||
|
||||
|
||||
def get_version(ctx, param, value): |
||||
if not value or ctx.resilient_parsing: |
||||
return |
||||
message = 'Flask %(version)s\nPython %(python_version)s' |
||||
click.echo(message % { |
||||
'version': __version__, |
||||
'python_version': sys.version, |
||||
}, color=ctx.color) |
||||
ctx.exit() |
||||
|
||||
|
||||
version_option = click.Option( |
||||
['--version'], |
||||
help='Show the flask version', |
||||
expose_value=False, |
||||
callback=get_version, |
||||
is_flag=True, |
||||
is_eager=True |
||||
) |
||||
|
||||
|
||||
class DispatchingApp(object): |
||||
"""Special application that dispatches to a Flask application which |
||||
is imported by name in a background thread. If an error happens |
||||
it is recorded and shown as part of the WSGI handling which in case |
||||
of the Werkzeug debugger means that it shows up in the browser. |
||||
""" |
||||
|
||||
def __init__(self, loader, use_eager_loading=False): |
||||
self.loader = loader |
||||
self._app = None |
||||
self._lock = Lock() |
||||
self._bg_loading_exc_info = None |
||||
if use_eager_loading: |
||||
self._load_unlocked() |
||||
else: |
||||
self._load_in_background() |
||||
|
||||
def _load_in_background(self): |
||||
def _load_app(): |
||||
__traceback_hide__ = True |
||||
with self._lock: |
||||
try: |
||||
self._load_unlocked() |
||||
except Exception: |
||||
self._bg_loading_exc_info = sys.exc_info() |
||||
t = Thread(target=_load_app, args=()) |
||||
t.start() |
||||
|
||||
def _flush_bg_loading_exception(self): |
||||
__traceback_hide__ = True |
||||
exc_info = self._bg_loading_exc_info |
||||
if exc_info is not None: |
||||
self._bg_loading_exc_info = None |
||||
reraise(*exc_info) |
||||
|
||||
def _load_unlocked(self): |
||||
__traceback_hide__ = True |
||||
self._app = rv = self.loader() |
||||
self._bg_loading_exc_info = None |
||||
return rv |
||||
|
||||
def __call__(self, environ, start_response): |
||||
__traceback_hide__ = True |
||||
if self._app is not None: |
||||
return self._app(environ, start_response) |
||||
self._flush_bg_loading_exception() |
||||
with self._lock: |
||||
if self._app is not None: |
||||
rv = self._app |
||||
else: |
||||
rv = self._load_unlocked() |
||||
return rv(environ, start_response) |
||||
|
||||
|
||||
class ScriptInfo(object): |
||||
"""Help object to deal with Flask applications. This is usually not |
||||
necessary to interface with as it's used internally in the dispatching |
||||
to click. In future versions of Flask this object will most likely play |
||||
a bigger role. Typically it's created automatically by the |
||||
:class:`FlaskGroup` but you can also manually create it and pass it |
||||
onwards as click object. |
||||
""" |
||||
|
||||
def __init__(self, app_import_path=None, create_app=None): |
||||
#: Optionally the import path for the Flask application. |
||||
self.app_import_path = app_import_path or os.environ.get('FLASK_APP') |
||||
#: Optionally a function that is passed the script info to create |
||||
#: the instance of the application. |
||||
self.create_app = create_app |
||||
#: A dictionary with arbitrary data that can be associated with |
||||
#: this script info. |
||||
self.data = {} |
||||
self._loaded_app = None |
||||
|
||||
def load_app(self): |
||||
"""Loads the Flask app (if not yet loaded) and returns it. Calling |
||||
this multiple times will just result in the already loaded app to |
||||
be returned. |
||||
""" |
||||
__traceback_hide__ = True |
||||
|
||||
if self._loaded_app is not None: |
||||
return self._loaded_app |
||||
|
||||
app = None |
||||
|
||||
if self.create_app is not None: |
||||
app = call_factory(self, self.create_app) |
||||
else: |
||||
if self.app_import_path: |
||||
path, name = (self.app_import_path.split(':', 1) + [None])[:2] |
||||
import_name = prepare_import(path) |
||||
app = locate_app(self, import_name, name) |
||||
else: |
||||
for path in ('wsgi.py', 'app.py'): |
||||
import_name = prepare_import(path) |
||||
app = locate_app(self, import_name, None, |
||||
raise_if_not_found=False) |
||||
|
||||
if app: |
||||
break |
||||
|
||||
if not app: |
||||
raise NoAppException( |
||||
'Could not locate a Flask application. You did not provide ' |
||||
'the "FLASK_APP" environment variable, and a "wsgi.py" or ' |
||||
'"app.py" module was not found in the current directory.' |
||||
) |
||||
|
||||
debug = get_debug_flag() |
||||
|
||||
# Update the app's debug flag through the descriptor so that other |
||||
# values repopulate as well. |
||||
if debug is not None: |
||||
app.debug = debug |
||||
|
||||
self._loaded_app = app |
||||
return app |
||||
|
||||
|
||||
pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) |
||||
|
||||
|
||||
def with_appcontext(f): |
||||
"""Wraps a callback so that it's guaranteed to be executed with the |
||||
script's application context. If callbacks are registered directly |
||||
to the ``app.cli`` object then they are wrapped with this function |
||||
by default unless it's disabled. |
||||
""" |
||||
@click.pass_context |
||||
def decorator(__ctx, *args, **kwargs): |
||||
with __ctx.ensure_object(ScriptInfo).load_app().app_context(): |
||||
return __ctx.invoke(f, *args, **kwargs) |
||||
return update_wrapper(decorator, f) |
||||
|
||||
|
||||
class AppGroup(click.Group): |
||||
"""This works similar to a regular click :class:`~click.Group` but it |
||||
changes the behavior of the :meth:`command` decorator so that it |
||||
automatically wraps the functions in :func:`with_appcontext`. |
||||
|
||||
Not to be confused with :class:`FlaskGroup`. |
||||
""" |
||||
|
||||
def command(self, *args, **kwargs): |
||||
"""This works exactly like the method of the same name on a regular |
||||
:class:`click.Group` but it wraps callbacks in :func:`with_appcontext` |
||||
unless it's disabled by passing ``with_appcontext=False``. |
||||
""" |
||||
wrap_for_ctx = kwargs.pop('with_appcontext', True) |
||||
def decorator(f): |
||||
if wrap_for_ctx: |
||||
f = with_appcontext(f) |
||||
return click.Group.command(self, *args, **kwargs)(f) |
||||
return decorator |
||||
|
||||
def group(self, *args, **kwargs): |
||||
"""This works exactly like the method of the same name on a regular |
||||
:class:`click.Group` but it defaults the group class to |
||||
:class:`AppGroup`. |
||||
""" |
||||
kwargs.setdefault('cls', AppGroup) |
||||
return click.Group.group(self, *args, **kwargs) |
||||
|
||||
|
||||
class FlaskGroup(AppGroup): |
||||
"""Special subclass of the :class:`AppGroup` group that supports |
||||
loading more commands from the configured Flask app. Normally a |
||||
developer does not have to interface with this class but there are |
||||
some very advanced use cases for which it makes sense to create an |
||||
instance of this. |
||||
|
||||
For information as of why this is useful see :ref:`custom-scripts`. |
||||
|
||||
:param add_default_commands: if this is True then the default run and |
||||
shell commands wil be added. |
||||
:param add_version_option: adds the ``--version`` option. |
||||
:param create_app: an optional callback that is passed the script info and |
||||
returns the loaded app. |
||||
:param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` |
||||
files to set environment variables. Will also change the working |
||||
directory to the directory containing the first file found. |
||||
|
||||
.. versionchanged:: 1.0 |
||||
If installed, python-dotenv will be used to load environment variables |
||||
from :file:`.env` and :file:`.flaskenv` files. |
||||
""" |
||||
|
||||
def __init__(self, add_default_commands=True, create_app=None, |
||||
add_version_option=True, load_dotenv=True, **extra): |
||||
params = list(extra.pop('params', None) or ()) |
||||
|
||||
if add_version_option: |
||||
params.append(version_option) |
||||
|
||||
AppGroup.__init__(self, params=params, **extra) |
||||
self.create_app = create_app |
||||
self.load_dotenv = load_dotenv |
||||
|
||||
if add_default_commands: |
||||
self.add_command(run_command) |
||||
self.add_command(shell_command) |
||||
self.add_command(routes_command) |
||||
|
||||
self._loaded_plugin_commands = False |
||||
|
||||
def _load_plugin_commands(self): |
||||
if self._loaded_plugin_commands: |
||||
return |
||||
try: |
||||
import pkg_resources |
||||
except ImportError: |
||||
self._loaded_plugin_commands = True |
||||
return |
||||
|
||||
for ep in pkg_resources.iter_entry_points('flask.commands'): |
||||
self.add_command(ep.load(), ep.name) |
||||
self._loaded_plugin_commands = True |
||||
|
||||
def get_command(self, ctx, name): |
||||
self._load_plugin_commands() |
||||
|
||||
# We load built-in commands first as these should always be the |
||||
# same no matter what the app does. If the app does want to |
||||
# override this it needs to make a custom instance of this group |
||||
# and not attach the default commands. |
||||
# |
||||
# This also means that the script stays functional in case the |
||||
# application completely fails. |
||||
rv = AppGroup.get_command(self, ctx, name) |
||||
if rv is not None: |
||||
return rv |
||||
|
||||
info = ctx.ensure_object(ScriptInfo) |
||||
try: |
||||
rv = info.load_app().cli.get_command(ctx, name) |
||||
if rv is not None: |
||||
return rv |
||||
except NoAppException: |
||||
pass |
||||
|
||||
def list_commands(self, ctx): |
||||
self._load_plugin_commands() |
||||
|
||||
# The commands available is the list of both the application (if |
||||
# available) plus the builtin commands. |
||||
rv = set(click.Group.list_commands(self, ctx)) |
||||
info = ctx.ensure_object(ScriptInfo) |
||||
try: |
||||
rv.update(info.load_app().cli.list_commands(ctx)) |
||||
except Exception: |
||||
# Here we intentionally swallow all exceptions as we don't |
||||
# want the help page to break if the app does not exist. |
||||
# If someone attempts to use the command we try to create |
||||
# the app again and this will give us the error. |
||||
# However, we will not do so silently because that would confuse |
||||
# users. |
||||
traceback.print_exc() |
||||
return sorted(rv) |
||||
|
||||
def main(self, *args, **kwargs): |
||||
# Set a global flag that indicates that we were invoked from the |
||||
# command line interface. This is detected by Flask.run to make the |
||||
# call into a no-op. This is necessary to avoid ugly errors when the |
||||
# script that is loaded here also attempts to start a server. |
||||
os.environ['FLASK_RUN_FROM_CLI'] = 'true' |
||||
|
||||
if get_load_dotenv(self.load_dotenv): |
||||
load_dotenv() |
||||
|
||||
obj = kwargs.get('obj') |
||||
|
||||
if obj is None: |
||||
obj = ScriptInfo(create_app=self.create_app) |
||||
|
||||
kwargs['obj'] = obj |
||||
kwargs.setdefault('auto_envvar_prefix', 'FLASK') |
||||
return super(FlaskGroup, self).main(*args, **kwargs) |
||||
|
||||
|
||||
def _path_is_ancestor(path, other): |
||||
"""Take ``other`` and remove the length of ``path`` from it. Then join it |
||||
to ``path``. If it is the original value, ``path`` is an ancestor of |
||||
``other``.""" |
||||
return os.path.join(path, other[len(path):].lstrip(os.sep)) == other |
||||
|
||||
|
||||
def load_dotenv(path=None): |
||||
"""Load "dotenv" files in order of precedence to set environment variables. |
||||
|
||||
If an env var is already set it is not overwritten, so earlier files in the |
||||
list are preferred over later files. |
||||
|
||||
Changes the current working directory to the location of the first file |
||||
found, with the assumption that it is in the top level project directory |
||||
and will be where the Python path should import local packages from. |
||||
|
||||
This is a no-op if `python-dotenv`_ is not installed. |
||||
|
||||
.. _python-dotenv: https://github.com/theskumar/python-dotenv#readme |
||||
|
||||
:param path: Load the file at this location instead of searching. |
||||
:return: ``True`` if a file was loaded. |
||||
|
||||
.. versionadded:: 1.0 |
||||
""" |
||||
if dotenv is None: |
||||
if path or os.path.exists('.env') or os.path.exists('.flaskenv'): |
||||
click.secho( |
||||
' * Tip: There are .env files present.' |
||||
' Do "pip install python-dotenv" to use them.', |
||||
fg='yellow') |
||||
return |
||||
|
||||
if path is not None: |
||||
return dotenv.load_dotenv(path) |
||||
|
||||
new_dir = None |
||||
|
||||
for name in ('.env', '.flaskenv'): |
||||
path = dotenv.find_dotenv(name, usecwd=True) |
||||
|
||||
if not path: |
||||
continue |
||||
|
||||
if new_dir is None: |
||||
new_dir = os.path.dirname(path) |
||||
|
||||
dotenv.load_dotenv(path) |
||||
|
||||
if new_dir and os.getcwd() != new_dir: |
||||
os.chdir(new_dir) |
||||
|
||||
return new_dir is not None # at least one file was located and loaded |
||||
|
||||
|
||||
def show_server_banner(env, debug, app_import_path, eager_loading): |
||||
"""Show extra startup messages the first time the server is run, |
||||
ignoring the reloader. |
||||
""" |
||||
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true': |
||||
return |
||||
|
||||
if app_import_path is not None: |
||||
message = ' * Serving Flask app "{0}"'.format(app_import_path) |
||||
|
||||
if not eager_loading: |
||||
message += ' (lazy loading)' |
||||
|
||||
click.echo(message) |
||||
|
||||
click.echo(' * Environment: {0}'.format(env)) |
||||
|
||||
if env == 'production': |
||||
click.secho( |
||||
' WARNING: Do not use the development server in a production' |
||||
' environment.', fg='red') |
||||
click.secho(' Use a production WSGI server instead.', dim=True) |
||||
|
||||
if debug is not None: |
||||
click.echo(' * Debug mode: {0}'.format('on' if debug else 'off')) |
||||
|
||||
|
||||
class CertParamType(click.ParamType): |
||||
"""Click option type for the ``--cert`` option. Allows either an |
||||
existing file, the string ``'adhoc'``, or an import for a |
||||
:class:`~ssl.SSLContext` object. |
||||
""" |
||||
|
||||
name = 'path' |
||||
|
||||
def __init__(self): |
||||
self.path_type = click.Path( |
||||
exists=True, dir_okay=False, resolve_path=True) |
||||
|
||||
def convert(self, value, param, ctx): |
||||
try: |
||||
return self.path_type(value, param, ctx) |
||||
except click.BadParameter: |
||||
value = click.STRING(value, param, ctx).lower() |
||||
|
||||
if value == 'adhoc': |
||||
try: |
||||
import OpenSSL |
||||
except ImportError: |
||||
raise click.BadParameter( |
||||
'Using ad-hoc certificates requires pyOpenSSL.', |
||||
ctx, param) |
||||
|
||||
return value |
||||
|
||||
obj = import_string(value, silent=True) |
||||
|
||||
if sys.version_info < (2, 7): |
||||
if obj: |
||||
return obj |
||||
else: |
||||
if isinstance(obj, ssl.SSLContext): |
||||
return obj |
||||
|
||||
raise |
||||
|
||||
|
||||
def _validate_key(ctx, param, value): |
||||
"""The ``--key`` option must be specified when ``--cert`` is a file. |
||||
Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. |
||||
""" |
||||
cert = ctx.params.get('cert') |
||||
is_adhoc = cert == 'adhoc' |
||||
|
||||
if sys.version_info < (2, 7): |
||||
is_context = cert and not isinstance(cert, (text_type, bytes)) |
||||
else: |
||||
is_context = isinstance(cert, ssl.SSLContext) |
||||
|
||||
if value is not None: |
||||
if is_adhoc: |
||||
raise click.BadParameter( |
||||
'When "--cert" is "adhoc", "--key" is not used.', |
||||
ctx, param) |
||||
|
||||
if is_context: |
||||
raise click.BadParameter( |
||||
'When "--cert" is an SSLContext object, "--key is not used.', |
||||
ctx, param) |
||||
|
||||
if not cert: |
||||
raise click.BadParameter( |
||||
'"--cert" must also be specified.', |
||||
ctx, param) |
||||
|
||||
ctx.params['cert'] = cert, value |
||||
|
||||
else: |
||||
if cert and not (is_adhoc or is_context): |
||||
raise click.BadParameter( |
||||
'Required when using "--cert".', |
||||
ctx, param) |
||||
|
||||
return value |
||||
|
||||
|
||||
@click.command('run', short_help='Runs a development server.') |
||||
@click.option('--host', '-h', default='127.0.0.1', |
||||
help='The interface to bind to.') |
||||
@click.option('--port', '-p', default=5000, |
||||
help='The port to bind to.') |
||||
@click.option('--cert', type=CertParamType(), |
||||
help='Specify a certificate file to use HTTPS.') |
||||
@click.option('--key', |
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True), |
||||
callback=_validate_key, expose_value=False, |
||||
help='The key file to use when specifying a certificate.') |
||||
@click.option('--reload/--no-reload', default=None, |
||||
help='Enable or disable the reloader. By default the reloader ' |
||||
'is active if debug is enabled.') |
||||
@click.option('--debugger/--no-debugger', default=None, |
||||
help='Enable or disable the debugger. By default the debugger ' |
||||
'is active if debug is enabled.') |
||||
@click.option('--eager-loading/--lazy-loader', default=None, |
||||
help='Enable or disable eager loading. By default eager ' |
||||
'loading is enabled if the reloader is disabled.') |
||||
@click.option('--with-threads/--without-threads', default=True, |
||||
help='Enable or disable multithreading.') |
||||
@pass_script_info |
||||
def run_command(info, host, port, reload, debugger, eager_loading, |
||||
with_threads, cert): |
||||
"""Run a local development server. |
||||
|
||||
This server is for development purposes only. It does not provide |
||||
the stability, security, or performance of production WSGI servers. |
||||
|
||||
The reloader and debugger are enabled by default if |
||||
FLASK_ENV=development or FLASK_DEBUG=1. |
||||
""" |
||||
debug = get_debug_flag() |
||||
|
||||
if reload is None: |
||||
reload = debug |
||||
|
||||
if debugger is None: |
||||
debugger = debug |
||||
|
||||
if eager_loading is None: |
||||
eager_loading = not reload |
||||
|
||||
show_server_banner(get_env(), debug, info.app_import_path, eager_loading) |
||||
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading) |
||||
|
||||
from werkzeug.serving import run_simple |
||||
run_simple(host, port, app, use_reloader=reload, use_debugger=debugger, |
||||
threaded=with_threads, ssl_context=cert) |
||||
|
||||
|
||||
@click.command('shell', short_help='Runs a shell in the app context.') |
||||
@with_appcontext |
||||
def shell_command(): |
||||
"""Runs an interactive Python shell in the context of a given |
||||
Flask application. The application will populate the default |
||||
namespace of this shell according to it's configuration. |
||||
|
||||
This is useful for executing small snippets of management code |
||||
without having to manually configure the application. |
||||
""" |
||||
import code |
||||
from flask.globals import _app_ctx_stack |
||||
app = _app_ctx_stack.top.app |
||||
banner = 'Python %s on %s\nApp: %s [%s]\nInstance: %s' % ( |
||||
sys.version, |
||||
sys.platform, |
||||
app.import_name, |
||||
app.env, |
||||
app.instance_path, |
||||
) |
||||
ctx = {} |
||||
|
||||
# Support the regular Python interpreter startup script if someone |
||||
# is using it. |
||||
startup = os.environ.get('PYTHONSTARTUP') |
||||
if startup and os.path.isfile(startup): |
||||
with open(startup, 'r') as f: |
||||
eval(compile(f.read(), startup, 'exec'), ctx) |
||||
|
||||
ctx.update(app.make_shell_context()) |
||||
|
||||
code.interact(banner=banner, local=ctx) |
||||
|
||||
|
||||
@click.command('routes', short_help='Show the routes for the app.') |
||||
@click.option( |
||||
'--sort', '-s', |
||||
type=click.Choice(('endpoint', 'methods', 'rule', 'match')), |
||||
default='endpoint', |
||||
help=( |
||||
'Method to sort routes by. "match" is the order that Flask will match ' |
||||
'routes when dispatching a request.' |
||||
) |
||||
) |
||||
@click.option( |
||||
'--all-methods', |
||||
is_flag=True, |
||||
help="Show HEAD and OPTIONS methods." |
||||
) |
||||
@with_appcontext |
||||
def routes_command(sort, all_methods): |
||||
"""Show all registered routes with endpoints and methods.""" |
||||
|
||||
rules = list(current_app.url_map.iter_rules()) |
||||
if not rules: |
||||
click.echo('No routes were registered.') |
||||
return |
||||
|
||||
ignored_methods = set(() if all_methods else ('HEAD', 'OPTIONS')) |
||||
|
||||
if sort in ('endpoint', 'rule'): |
||||
rules = sorted(rules, key=attrgetter(sort)) |
||||
elif sort == 'methods': |
||||
rules = sorted(rules, key=lambda rule: sorted(rule.methods)) |
||||
|
||||
rule_methods = [ |
||||
', '.join(sorted(rule.methods - ignored_methods)) for rule in rules |
||||
] |
||||
|
||||
headers = ('Endpoint', 'Methods', 'Rule') |
||||
widths = ( |
||||
max(len(rule.endpoint) for rule in rules), |
||||
max(len(methods) for methods in rule_methods), |
||||
max(len(rule.rule) for rule in rules), |
||||
) |
||||
widths = [max(len(h), w) for h, w in zip(headers, widths)] |
||||
row = '{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}'.format(*widths) |
||||
|
||||
click.echo(row.format(*headers).strip()) |
||||
click.echo(row.format(*('-' * width for width in widths))) |
||||
|
||||
for rule, methods in zip(rules, rule_methods): |
||||
click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) |
||||
|
||||
|
||||
cli = FlaskGroup(help="""\ |
||||
A general utility script for Flask applications. |
||||
|
||||
Provides commands from Flask, extensions, and the application. Loads the |
||||
application defined in the FLASK_APP environment variable, or from a wsgi.py |
||||
file. Setting the FLASK_ENV environment variable to 'development' will enable |
||||
debug mode. |
||||
|
||||
\b |
||||
{prefix}{cmd} FLASK_APP=hello.py |
||||
{prefix}{cmd} FLASK_ENV=development |
||||
{prefix}flask run |
||||
""".format( |
||||
cmd='export' if os.name == 'posix' else 'set', |
||||
prefix='$ ' if os.name == 'posix' else '> ' |
||||
)) |
||||
|
||||
|
||||
def main(as_module=False): |
||||
args = sys.argv[1:] |
||||
|
||||
if as_module: |
||||
this_module = 'flask' |
||||
|
||||
if sys.version_info < (2, 7): |
||||
this_module += '.cli' |
||||
|
||||
name = 'python -m ' + this_module |
||||
|
||||
# Python rewrites "python -m flask" to the path to the file in argv. |
||||
# Restore the original command so that the reloader works. |
||||
sys.argv = ['-m', this_module] + args |
||||
else: |
||||
name = None |
||||
|
||||
cli.main(args=args, prog_name=name) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
main(as_module=True) |
@ -0,0 +1,265 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.config |
||||
~~~~~~~~~~~~ |
||||
|
||||
Implements the configuration related objects. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
import os |
||||
import types |
||||
import errno |
||||
|
||||
from werkzeug.utils import import_string |
||||
from ._compat import string_types, iteritems |
||||
from . import json |
||||
|
||||
|
||||
class ConfigAttribute(object): |
||||
"""Makes an attribute forward to the config""" |
||||
|
||||
def __init__(self, name, get_converter=None): |
||||
self.__name__ = name |
||||
self.get_converter = get_converter |
||||
|
||||
def __get__(self, obj, type=None): |
||||
if obj is None: |
||||
return self |
||||
rv = obj.config[self.__name__] |
||||
if self.get_converter is not None: |
||||
rv = self.get_converter(rv) |
||||
return rv |
||||
|
||||
def __set__(self, obj, value): |
||||
obj.config[self.__name__] = value |
||||
|
||||
|
||||
class Config(dict): |
||||
"""Works exactly like a dict but provides ways to fill it from files |
||||
or special dictionaries. There are two common patterns to populate the |
||||
config. |
||||
|
||||
Either you can fill the config from a config file:: |
||||
|
||||
app.config.from_pyfile('yourconfig.cfg') |
||||
|
||||
Or alternatively you can define the configuration options in the |
||||
module that calls :meth:`from_object` or provide an import path to |
||||
a module that should be loaded. It is also possible to tell it to |
||||
use the same module and with that provide the configuration values |
||||
just before the call:: |
||||
|
||||
DEBUG = True |
||||
SECRET_KEY = 'development key' |
||||
app.config.from_object(__name__) |
||||
|
||||
In both cases (loading from any Python file or loading from modules), |
||||
only uppercase keys are added to the config. This makes it possible to use |
||||
lowercase values in the config file for temporary values that are not added |
||||
to the config or to define the config keys in the same file that implements |
||||
the application. |
||||
|
||||
Probably the most interesting way to load configurations is from an |
||||
environment variable pointing to a file:: |
||||
|
||||
app.config.from_envvar('YOURAPPLICATION_SETTINGS') |
||||
|
||||
In this case before launching the application you have to set this |
||||
environment variable to the file you want to use. On Linux and OS X |
||||
use the export statement:: |
||||
|
||||
export YOURAPPLICATION_SETTINGS='/path/to/config/file' |
||||
|
||||
On windows use `set` instead. |
||||
|
||||
:param root_path: path to which files are read relative from. When the |
||||
config object is created by the application, this is |
||||
the application's :attr:`~flask.Flask.root_path`. |
||||
:param defaults: an optional dictionary of default values |
||||
""" |
||||
|
||||
def __init__(self, root_path, defaults=None): |
||||
dict.__init__(self, defaults or {}) |
||||
self.root_path = root_path |
||||
|
||||
def from_envvar(self, variable_name, silent=False): |
||||
"""Loads a configuration from an environment variable pointing to |
||||
a configuration file. This is basically just a shortcut with nicer |
||||
error messages for this line of code:: |
||||
|
||||
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) |
||||
|
||||
:param variable_name: name of the environment variable |
||||
:param silent: set to ``True`` if you want silent failure for missing |
||||
files. |
||||
:return: bool. ``True`` if able to load config, ``False`` otherwise. |
||||
""" |
||||
rv = os.environ.get(variable_name) |
||||
if not rv: |
||||
if silent: |
||||
return False |
||||
raise RuntimeError('The environment variable %r is not set ' |
||||
'and as such configuration could not be ' |
||||
'loaded. Set this variable and make it ' |
||||
'point to a configuration file' % |
||||
variable_name) |
||||
return self.from_pyfile(rv, silent=silent) |
||||
|
||||
def from_pyfile(self, filename, silent=False): |
||||
"""Updates the values in the config from a Python file. This function |
||||
behaves as if the file was imported as module with the |
||||
:meth:`from_object` function. |
||||
|
||||
:param filename: the filename of the config. This can either be an |
||||
absolute filename or a filename relative to the |
||||
root path. |
||||
:param silent: set to ``True`` if you want silent failure for missing |
||||
files. |
||||
|
||||
.. versionadded:: 0.7 |
||||
`silent` parameter. |
||||
""" |
||||
filename = os.path.join(self.root_path, filename) |
||||
d = types.ModuleType('config') |
||||
d.__file__ = filename |
||||
try: |
||||
with open(filename, mode='rb') as config_file: |
||||
exec(compile(config_file.read(), filename, 'exec'), d.__dict__) |
||||
except IOError as e: |
||||
if silent and e.errno in ( |
||||
errno.ENOENT, errno.EISDIR, errno.ENOTDIR |
||||
): |
||||
return False |
||||
e.strerror = 'Unable to load configuration file (%s)' % e.strerror |
||||
raise |
||||
self.from_object(d) |
||||
return True |
||||
|
||||
def from_object(self, obj): |
||||
"""Updates the values from the given object. An object can be of one |
||||
of the following two types: |
||||
|
||||
- a string: in this case the object with that name will be imported |
||||
- an actual object reference: that object is used directly |
||||
|
||||
Objects are usually either modules or classes. :meth:`from_object` |
||||
loads only the uppercase attributes of the module/class. A ``dict`` |
||||
object will not work with :meth:`from_object` because the keys of a |
||||
``dict`` are not attributes of the ``dict`` class. |
||||
|
||||
Example of module-based configuration:: |
||||
|
||||
app.config.from_object('yourapplication.default_config') |
||||
from yourapplication import default_config |
||||
app.config.from_object(default_config) |
||||
|
||||
You should not use this function to load the actual configuration but |
||||
rather configuration defaults. The actual config should be loaded |
||||
with :meth:`from_pyfile` and ideally from a location not within the |
||||
package because the package might be installed system wide. |
||||
|
||||
See :ref:`config-dev-prod` for an example of class-based configuration |
||||
using :meth:`from_object`. |
||||
|
||||
:param obj: an import name or object |
||||
""" |
||||
if isinstance(obj, string_types): |
||||
obj = import_string(obj) |
||||
for key in dir(obj): |
||||
if key.isupper(): |
||||
self[key] = getattr(obj, key) |
||||
|
||||
def from_json(self, filename, silent=False): |
||||
"""Updates the values in the config from a JSON file. This function |
||||
behaves as if the JSON object was a dictionary and passed to the |
||||
:meth:`from_mapping` function. |
||||
|
||||
:param filename: the filename of the JSON file. This can either be an |
||||
absolute filename or a filename relative to the |
||||
root path. |
||||
:param silent: set to ``True`` if you want silent failure for missing |
||||
files. |
||||
|
||||
.. versionadded:: 0.11 |
||||
""" |
||||
filename = os.path.join(self.root_path, filename) |
||||
|
||||
try: |
||||
with open(filename) as json_file: |
||||
obj = json.loads(json_file.read()) |
||||
except IOError as e: |
||||
if silent and e.errno in (errno.ENOENT, errno.EISDIR): |
||||
return False |
||||
e.strerror = 'Unable to load configuration file (%s)' % e.strerror |
||||
raise |
||||
return self.from_mapping(obj) |
||||
|
||||
def from_mapping(self, *mapping, **kwargs): |
||||
"""Updates the config like :meth:`update` ignoring items with non-upper |
||||
keys. |
||||
|
||||
.. versionadded:: 0.11 |
||||
""" |
||||
mappings = [] |
||||
if len(mapping) == 1: |
||||
if hasattr(mapping[0], 'items'): |
||||
mappings.append(mapping[0].items()) |
||||
else: |
||||
mappings.append(mapping[0]) |
||||
elif len(mapping) > 1: |
||||
raise TypeError( |
||||
'expected at most 1 positional argument, got %d' % len(mapping) |
||||
) |
||||
mappings.append(kwargs.items()) |
||||
for mapping in mappings: |
||||
for (key, value) in mapping: |
||||
if key.isupper(): |
||||
self[key] = value |
||||
return True |
||||
|
||||
def get_namespace(self, namespace, lowercase=True, trim_namespace=True): |
||||
"""Returns a dictionary containing a subset of configuration options |
||||
that match the specified namespace/prefix. Example usage:: |
||||
|
||||
app.config['IMAGE_STORE_TYPE'] = 'fs' |
||||
app.config['IMAGE_STORE_PATH'] = '/var/app/images' |
||||
app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' |
||||
image_store_config = app.config.get_namespace('IMAGE_STORE_') |
||||
|
||||
The resulting dictionary `image_store_config` would look like:: |
||||
|
||||
{ |
||||
'type': 'fs', |
||||
'path': '/var/app/images', |
||||
'base_url': 'http://img.website.com' |
||||
} |
||||
|
||||
This is often useful when configuration options map directly to |
||||
keyword arguments in functions or class constructors. |
||||
|
||||
:param namespace: a configuration namespace |
||||
:param lowercase: a flag indicating if the keys of the resulting |
||||
dictionary should be lowercase |
||||
:param trim_namespace: a flag indicating if the keys of the resulting |
||||
dictionary should not include the namespace |
||||
|
||||
.. versionadded:: 0.11 |
||||
""" |
||||
rv = {} |
||||
for k, v in iteritems(self): |
||||
if not k.startswith(namespace): |
||||
continue |
||||
if trim_namespace: |
||||
key = k[len(namespace):] |
||||
else: |
||||
key = k |
||||
if lowercase: |
||||
key = key.lower() |
||||
rv[key] = v |
||||
return rv |
||||
|
||||
def __repr__(self): |
||||
return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self)) |
@ -0,0 +1,457 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.ctx |
||||
~~~~~~~~~ |
||||
|
||||
Implements the objects required to keep the context. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
import sys |
||||
from functools import update_wrapper |
||||
|
||||
from werkzeug.exceptions import HTTPException |
||||
|
||||
from .globals import _request_ctx_stack, _app_ctx_stack |
||||
from .signals import appcontext_pushed, appcontext_popped |
||||
from ._compat import BROKEN_PYPY_CTXMGR_EXIT, reraise |
||||
|
||||
|
||||
# a singleton sentinel value for parameter defaults |
||||
_sentinel = object() |
||||
|
||||
|
||||
class _AppCtxGlobals(object): |
||||
"""A plain object. Used as a namespace for storing data during an |
||||
application context. |
||||
|
||||
Creating an app context automatically creates this object, which is |
||||
made available as the :data:`g` proxy. |
||||
|
||||
.. describe:: 'key' in g |
||||
|
||||
Check whether an attribute is present. |
||||
|
||||
.. versionadded:: 0.10 |
||||
|
||||
.. describe:: iter(g) |
||||
|
||||
Return an iterator over the attribute names. |
||||
|
||||
.. versionadded:: 0.10 |
||||
""" |
||||
|
||||
def get(self, name, default=None): |
||||
"""Get an attribute by name, or a default value. Like |
||||
:meth:`dict.get`. |
||||
|
||||
:param name: Name of attribute to get. |
||||
:param default: Value to return if the attribute is not present. |
||||
|
||||
.. versionadded:: 0.10 |
||||
""" |
||||
return self.__dict__.get(name, default) |
||||
|
||||
def pop(self, name, default=_sentinel): |
||||
"""Get and remove an attribute by name. Like :meth:`dict.pop`. |
||||
|
||||
:param name: Name of attribute to pop. |
||||
:param default: Value to return if the attribute is not present, |
||||
instead of raise a ``KeyError``. |
||||
|
||||
.. versionadded:: 0.11 |
||||
""" |
||||
if default is _sentinel: |
||||
return self.__dict__.pop(name) |
||||
else: |
||||
return self.__dict__.pop(name, default) |
||||
|
||||
def setdefault(self, name, default=None): |
||||
"""Get the value of an attribute if it is present, otherwise |
||||
set and return a default value. Like :meth:`dict.setdefault`. |
||||
|
||||
:param name: Name of attribute to get. |
||||
:param: default: Value to set and return if the attribute is not |
||||
present. |
||||
|
||||
.. versionadded:: 0.11 |
||||
""" |
||||
return self.__dict__.setdefault(name, default) |
||||
|
||||
def __contains__(self, item): |
||||
return item in self.__dict__ |
||||
|
||||
def __iter__(self): |
||||
return iter(self.__dict__) |
||||
|
||||
def __repr__(self): |
||||
top = _app_ctx_stack.top |
||||
if top is not None: |
||||
return '<flask.g of %r>' % top.app.name |
||||
return object.__repr__(self) |
||||
|
||||
|
||||
def after_this_request(f): |
||||
"""Executes a function after this request. This is useful to modify |
||||
response objects. The function is passed the response object and has |
||||
to return the same or a new one. |
||||
|
||||
Example:: |
||||
|
||||
@app.route('/') |
||||
def index(): |
||||
@after_this_request |
||||
def add_header(response): |
||||
response.headers['X-Foo'] = 'Parachute' |
||||
return response |
||||
return 'Hello World!' |
||||
|
||||
This is more useful if a function other than the view function wants to |
||||
modify a response. For instance think of a decorator that wants to add |
||||
some headers without converting the return value into a response object. |
||||
|
||||
.. versionadded:: 0.9 |
||||
""" |
||||
_request_ctx_stack.top._after_request_functions.append(f) |
||||
return f |
||||
|
||||
|
||||
def copy_current_request_context(f): |
||||
"""A helper function that decorates a function to retain the current |
||||
request context. This is useful when working with greenlets. The moment |
||||
the function is decorated a copy of the request context is created and |
||||
then pushed when the function is called. |
||||
|
||||
Example:: |
||||
|
||||
import gevent |
||||
from flask import copy_current_request_context |
||||
|
||||
@app.route('/') |
||||
def index(): |
||||
@copy_current_request_context |
||||
def do_some_work(): |
||||
# do some work here, it can access flask.request like you |
||||
# would otherwise in the view function. |
||||
... |
||||
gevent.spawn(do_some_work) |
||||
return 'Regular response' |
||||
|
||||
.. versionadded:: 0.10 |
||||
""" |
||||
top = _request_ctx_stack.top |
||||
if top is None: |
||||
raise RuntimeError('This decorator can only be used at local scopes ' |
||||
'when a request context is on the stack. For instance within ' |
||||
'view functions.') |
||||
reqctx = top.copy() |
||||
def wrapper(*args, **kwargs): |
||||
with reqctx: |
||||
return f(*args, **kwargs) |
||||
return update_wrapper(wrapper, f) |
||||
|
||||
|
||||
def has_request_context(): |
||||
"""If you have code that wants to test if a request context is there or |
||||
not this function can be used. For instance, you may want to take advantage |
||||
of request information if the request object is available, but fail |
||||
silently if it is unavailable. |
||||
|
||||
:: |
||||
|
||||
class User(db.Model): |
||||
|
||||
def __init__(self, username, remote_addr=None): |
||||
self.username = username |
||||
if remote_addr is None and has_request_context(): |
||||
remote_addr = request.remote_addr |
||||
self.remote_addr = remote_addr |
||||
|
||||
Alternatively you can also just test any of the context bound objects |
||||
(such as :class:`request` or :class:`g` for truthness):: |
||||
|
||||
class User(db.Model): |
||||
|
||||
def __init__(self, username, remote_addr=None): |
||||
self.username = username |
||||
if remote_addr is None and request: |
||||
remote_addr = request.remote_addr |
||||
self.remote_addr = remote_addr |
||||
|
||||
.. versionadded:: 0.7 |
||||
""" |
||||
return _request_ctx_stack.top is not None |
||||
|
||||
|
||||
def has_app_context(): |
||||
"""Works like :func:`has_request_context` but for the application |
||||
context. You can also just do a boolean check on the |
||||
:data:`current_app` object instead. |
||||
|
||||
.. versionadded:: 0.9 |
||||
""" |
||||
return _app_ctx_stack.top is not None |
||||
|
||||
|
||||
class AppContext(object): |
||||
"""The application context binds an application object implicitly |
||||
to the current thread or greenlet, similar to how the |
||||
:class:`RequestContext` binds request information. The application |
||||
context is also implicitly created if a request context is created |
||||
but the application is not on top of the individual application |
||||
context. |
||||
""" |
||||
|
||||
def __init__(self, app): |
||||
self.app = app |
||||
self.url_adapter = app.create_url_adapter(None) |
||||
self.g = app.app_ctx_globals_class() |
||||
|
||||
# Like request context, app contexts can be pushed multiple times |
||||
# but there a basic "refcount" is enough to track them. |
||||
self._refcnt = 0 |
||||
|
||||
def push(self): |
||||
"""Binds the app context to the current context.""" |
||||
self._refcnt += 1 |
||||
if hasattr(sys, 'exc_clear'): |
||||
sys.exc_clear() |
||||
_app_ctx_stack.push(self) |
||||
appcontext_pushed.send(self.app) |
||||
|
||||
def pop(self, exc=_sentinel): |
||||
"""Pops the app context.""" |
||||
try: |
||||
self._refcnt -= 1 |
||||
if self._refcnt <= 0: |
||||
if exc is _sentinel: |
||||
exc = sys.exc_info()[1] |
||||
self.app.do_teardown_appcontext(exc) |
||||
finally: |
||||
rv = _app_ctx_stack.pop() |
||||
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \ |
||||
% (rv, self) |
||||
appcontext_popped.send(self.app) |
||||
|
||||
def __enter__(self): |
||||
self.push() |
||||
return self |
||||
|
||||
def __exit__(self, exc_type, exc_value, tb): |
||||
self.pop(exc_value) |
||||
|
||||
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: |
||||
reraise(exc_type, exc_value, tb) |
||||
|
||||
|
||||
class RequestContext(object): |
||||
"""The request context contains all request relevant information. It is |
||||
created at the beginning of the request and pushed to the |
||||
`_request_ctx_stack` and removed at the end of it. It will create the |
||||
URL adapter and request object for the WSGI environment provided. |
||||
|
||||
Do not attempt to use this class directly, instead use |
||||
:meth:`~flask.Flask.test_request_context` and |
||||
:meth:`~flask.Flask.request_context` to create this object. |
||||
|
||||
When the request context is popped, it will evaluate all the |
||||
functions registered on the application for teardown execution |
||||
(:meth:`~flask.Flask.teardown_request`). |
||||
|
||||
The request context is automatically popped at the end of the request |
||||
for you. In debug mode the request context is kept around if |
||||
exceptions happen so that interactive debuggers have a chance to |
||||
introspect the data. With 0.4 this can also be forced for requests |
||||
that did not fail and outside of ``DEBUG`` mode. By setting |
||||
``'flask._preserve_context'`` to ``True`` on the WSGI environment the |
||||
context will not pop itself at the end of the request. This is used by |
||||
the :meth:`~flask.Flask.test_client` for example to implement the |
||||
deferred cleanup functionality. |
||||
|
||||
You might find this helpful for unittests where you need the |
||||
information from the context local around for a little longer. Make |
||||
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in |
||||
that situation, otherwise your unittests will leak memory. |
||||
""" |
||||
|
||||
def __init__(self, app, environ, request=None): |
||||
self.app = app |
||||
if request is None: |
||||
request = app.request_class(environ) |
||||
self.request = request |
||||
self.url_adapter = app.create_url_adapter(self.request) |
||||
self.flashes = None |
||||
self.session = None |
||||
|
||||
# Request contexts can be pushed multiple times and interleaved with |
||||
# other request contexts. Now only if the last level is popped we |
||||
# get rid of them. Additionally if an application context is missing |
||||
# one is created implicitly so for each level we add this information |
||||
self._implicit_app_ctx_stack = [] |
||||
|
||||
# indicator if the context was preserved. Next time another context |
||||
# is pushed the preserved context is popped. |
||||
self.preserved = False |
||||
|
||||
# remembers the exception for pop if there is one in case the context |
||||
# preservation kicks in. |
||||
self._preserved_exc = None |
||||
|
||||
# Functions that should be executed after the request on the response |
||||
# object. These will be called before the regular "after_request" |
||||
# functions. |
||||
self._after_request_functions = [] |
||||
|
||||
self.match_request() |
||||
|
||||
def _get_g(self): |
||||
return _app_ctx_stack.top.g |
||||
def _set_g(self, value): |
||||
_app_ctx_stack.top.g = value |
||||
g = property(_get_g, _set_g) |
||||
del _get_g, _set_g |
||||
|
||||
def copy(self): |
||||
"""Creates a copy of this request context with the same request object. |
||||
This can be used to move a request context to a different greenlet. |
||||
Because the actual request object is the same this cannot be used to |
||||
move a request context to a different thread unless access to the |
||||
request object is locked. |
||||
|
||||
.. versionadded:: 0.10 |
||||
""" |
||||
return self.__class__(self.app, |
||||
environ=self.request.environ, |
||||
request=self.request |
||||
) |
||||
|
||||
def match_request(self): |
||||
"""Can be overridden by a subclass to hook into the matching |
||||
of the request. |
||||
""" |
||||
try: |
||||
url_rule, self.request.view_args = \ |
||||
self.url_adapter.match(return_rule=True) |
||||
self.request.url_rule = url_rule |
||||
except HTTPException as e: |
||||
self.request.routing_exception = e |
||||
|
||||
def push(self): |
||||
"""Binds the request context to the current context.""" |
||||
# If an exception occurs in debug mode or if context preservation is |
||||
# activated under exception situations exactly one context stays |
||||
# on the stack. The rationale is that you want to access that |
||||
# information under debug situations. However if someone forgets to |
||||
# pop that context again we want to make sure that on the next push |
||||
# it's invalidated, otherwise we run at risk that something leaks |
||||
# memory. This is usually only a problem in test suite since this |
||||
# functionality is not active in production environments. |
||||
top = _request_ctx_stack.top |
||||
if top is not None and top.preserved: |
||||
top.pop(top._preserved_exc) |
||||
|
||||
# Before we push the request context we have to ensure that there |
||||
# is an application context. |
||||
app_ctx = _app_ctx_stack.top |
||||
if app_ctx is None or app_ctx.app != self.app: |
||||
app_ctx = self.app.app_context() |
||||
app_ctx.push() |
||||
self._implicit_app_ctx_stack.append(app_ctx) |
||||
else: |
||||
self._implicit_app_ctx_stack.append(None) |
||||
|
||||
if hasattr(sys, 'exc_clear'): |
||||
sys.exc_clear() |
||||
|
||||
_request_ctx_stack.push(self) |
||||
|
||||
# Open the session at the moment that the request context is available. |
||||
# This allows a custom open_session method to use the request context. |
||||
# Only open a new session if this is the first time the request was |
||||
# pushed, otherwise stream_with_context loses the session. |
||||
if self.session is None: |
||||
session_interface = self.app.session_interface |
||||
self.session = session_interface.open_session( |
||||
self.app, self.request |
||||
) |
||||
|
||||
if self.session is None: |
||||
self.session = session_interface.make_null_session(self.app) |
||||
|
||||
def pop(self, exc=_sentinel): |
||||
"""Pops the request context and unbinds it by doing that. This will |
||||
also trigger the execution of functions registered by the |
||||
:meth:`~flask.Flask.teardown_request` decorator. |
||||
|
||||
.. versionchanged:: 0.9 |
||||
Added the `exc` argument. |
||||
""" |
||||
app_ctx = self._implicit_app_ctx_stack.pop() |
||||
|
||||
try: |
||||
clear_request = False |
||||
if not self._implicit_app_ctx_stack: |
||||
self.preserved = False |
||||
self._preserved_exc = None |
||||
if exc is _sentinel: |
||||
exc = sys.exc_info()[1] |
||||
self.app.do_teardown_request(exc) |
||||
|
||||
# If this interpreter supports clearing the exception information |
||||
# we do that now. This will only go into effect on Python 2.x, |
||||
# on 3.x it disappears automatically at the end of the exception |
||||
# stack. |
||||
if hasattr(sys, 'exc_clear'): |
||||
sys.exc_clear() |
||||
|
||||
request_close = getattr(self.request, 'close', None) |
||||
if request_close is not None: |
||||
request_close() |
||||
clear_request = True |
||||
finally: |
||||
rv = _request_ctx_stack.pop() |
||||
|
||||
# get rid of circular dependencies at the end of the request |
||||
# so that we don't require the GC to be active. |
||||
if clear_request: |
||||
rv.request.environ['werkzeug.request'] = None |
||||
|
||||
# Get rid of the app as well if necessary. |
||||
if app_ctx is not None: |
||||
app_ctx.pop(exc) |
||||
|
||||
assert rv is self, 'Popped wrong request context. ' \ |
||||
'(%r instead of %r)' % (rv, self) |
||||
|
||||
def auto_pop(self, exc): |
||||
if self.request.environ.get('flask._preserve_context') or \ |
||||
(exc is not None and self.app.preserve_context_on_exception): |
||||
self.preserved = True |
||||
self._preserved_exc = exc |
||||
else: |
||||
self.pop(exc) |
||||
|
||||
def __enter__(self): |
||||
self.push() |
||||
return self |
||||
|
||||
def __exit__(self, exc_type, exc_value, tb): |
||||
# do not pop the request stack if we are in debug mode and an |
||||
# exception happened. This will allow the debugger to still |
||||
# access the request object in the interactive shell. Furthermore |
||||
# the context can be force kept alive for the test client. |
||||
# See flask.testing for how this works. |
||||
self.auto_pop(exc_value) |
||||
|
||||
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: |
||||
reraise(exc_type, exc_value, tb) |
||||
|
||||
def __repr__(self): |
||||
return '<%s \'%s\' [%s] of %s>' % ( |
||||
self.__class__.__name__, |
||||
self.request.url, |
||||
self.request.method, |
||||
self.app.name, |
||||
) |
@ -0,0 +1,168 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.debughelpers |
||||
~~~~~~~~~~~~~~~~~~ |
||||
|
||||
Various helpers to make the development experience better. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
import os |
||||
from warnings import warn |
||||
|
||||
from ._compat import implements_to_string, text_type |
||||
from .app import Flask |
||||
from .blueprints import Blueprint |
||||
from .globals import _request_ctx_stack |
||||
|
||||
|
||||
class UnexpectedUnicodeError(AssertionError, UnicodeError): |
||||
"""Raised in places where we want some better error reporting for |
||||
unexpected unicode or binary data. |
||||
""" |
||||
|
||||
|
||||
@implements_to_string |
||||
class DebugFilesKeyError(KeyError, AssertionError): |
||||
"""Raised from request.files during debugging. The idea is that it can |
||||
provide a better error message than just a generic KeyError/BadRequest. |
||||
""" |
||||
|
||||
def __init__(self, request, key): |
||||
form_matches = request.form.getlist(key) |
||||
buf = ['You tried to access the file "%s" in the request.files ' |
||||
'dictionary but it does not exist. The mimetype for the request ' |
||||
'is "%s" instead of "multipart/form-data" which means that no ' |
||||
'file contents were transmitted. To fix this error you should ' |
||||
'provide enctype="multipart/form-data" in your form.' % |
||||
(key, request.mimetype)] |
||||
if form_matches: |
||||
buf.append('\n\nThe browser instead transmitted some file names. ' |
||||
'This was submitted: %s' % ', '.join('"%s"' % x |
||||
for x in form_matches)) |
||||
self.msg = ''.join(buf) |
||||
|
||||
def __str__(self): |
||||
return self.msg |
||||
|
||||
|
||||
class FormDataRoutingRedirect(AssertionError): |
||||
"""This exception is raised by Flask in debug mode if it detects a |
||||
redirect caused by the routing system when the request method is not |
||||
GET, HEAD or OPTIONS. Reasoning: form data will be dropped. |
||||
""" |
||||
|
||||
def __init__(self, request): |
||||
exc = request.routing_exception |
||||
buf = ['A request was sent to this URL (%s) but a redirect was ' |
||||
'issued automatically by the routing system to "%s".' |
||||
% (request.url, exc.new_url)] |
||||
|
||||
# In case just a slash was appended we can be extra helpful |
||||
if request.base_url + '/' == exc.new_url.split('?')[0]: |
||||
buf.append(' The URL was defined with a trailing slash so ' |
||||
'Flask will automatically redirect to the URL ' |
||||
'with the trailing slash if it was accessed ' |
||||
'without one.') |
||||
|
||||
buf.append(' Make sure to directly send your %s-request to this URL ' |
||||
'since we can\'t make browsers or HTTP clients redirect ' |
||||
'with form data reliably or without user interaction.' % |
||||
request.method) |
||||
buf.append('\n\nNote: this exception is only raised in debug mode') |
||||
AssertionError.__init__(self, ''.join(buf).encode('utf-8')) |
||||
|
||||
|
||||
def attach_enctype_error_multidict(request): |
||||
"""Since Flask 0.8 we're monkeypatching the files object in case a |
||||
request is detected that does not use multipart form data but the files |
||||
object is accessed. |
||||
""" |
||||
oldcls = request.files.__class__ |
||||
class newcls(oldcls): |
||||
def __getitem__(self, key): |
||||
try: |
||||
return oldcls.__getitem__(self, key) |
||||
except KeyError: |
||||
if key not in request.form: |
||||
raise |
||||
raise DebugFilesKeyError(request, key) |
||||
newcls.__name__ = oldcls.__name__ |
||||
newcls.__module__ = oldcls.__module__ |
||||
request.files.__class__ = newcls |
||||
|
||||
|
||||
def _dump_loader_info(loader): |
||||
yield 'class: %s.%s' % (type(loader).__module__, type(loader).__name__) |
||||
for key, value in sorted(loader.__dict__.items()): |
||||
if key.startswith('_'): |
||||
continue |
||||
if isinstance(value, (tuple, list)): |
||||
if not all(isinstance(x, (str, text_type)) for x in value): |
||||
continue |
||||
yield '%s:' % key |
||||
for item in value: |
||||
yield ' - %s' % item |
||||
continue |
||||
elif not isinstance(value, (str, text_type, int, float, bool)): |
||||
continue |
||||
yield '%s: %r' % (key, value) |
||||
|
||||
|
||||
def explain_template_loading_attempts(app, template, attempts): |
||||
"""This should help developers understand what failed""" |
||||
info = ['Locating template "%s":' % template] |
||||
total_found = 0 |
||||
blueprint = None |
||||
reqctx = _request_ctx_stack.top |
||||
if reqctx is not None and reqctx.request.blueprint is not None: |
||||
blueprint = reqctx.request.blueprint |
||||
|
||||
for idx, (loader, srcobj, triple) in enumerate(attempts): |
||||
if isinstance(srcobj, Flask): |
||||
src_info = 'application "%s"' % srcobj.import_name |
||||
elif isinstance(srcobj, Blueprint): |
||||
src_info = 'blueprint "%s" (%s)' % (srcobj.name, |
||||
srcobj.import_name) |
||||
else: |
||||
src_info = repr(srcobj) |
||||
|
||||
info.append('% 5d: trying loader of %s' % ( |
||||
idx + 1, src_info)) |
||||
|
||||
for line in _dump_loader_info(loader): |
||||
info.append(' %s' % line) |
||||
|
||||
if triple is None: |
||||
detail = 'no match' |
||||
else: |
||||
detail = 'found (%r)' % (triple[1] or '<string>') |
||||
total_found += 1 |
||||
info.append(' -> %s' % detail) |
||||
|
||||
seems_fishy = False |
||||
if total_found == 0: |
||||
info.append('Error: the template could not be found.') |
||||
seems_fishy = True |
||||
elif total_found > 1: |
||||
info.append('Warning: multiple loaders returned a match for the template.') |
||||
seems_fishy = True |
||||
|
||||
if blueprint is not None and seems_fishy: |
||||
info.append(' The template was looked up from an endpoint that ' |
||||
'belongs to the blueprint "%s".' % blueprint) |
||||
info.append(' Maybe you did not place a template in the right folder?') |
||||
info.append(' See http://flask.pocoo.org/docs/blueprints/#templates') |
||||
|
||||
app.logger.info('\n'.join(info)) |
||||
|
||||
|
||||
def explain_ignored_app_run(): |
||||
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true': |
||||
warn(Warning('Silently ignoring app.run() because the ' |
||||
'application is run from the flask command line ' |
||||
'executable. Consider putting app.run() behind an ' |
||||
'if __name__ == "__main__" guard to silence this ' |
||||
'warning.'), stacklevel=3) |
@ -0,0 +1,61 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.globals |
||||
~~~~~~~~~~~~~ |
||||
|
||||
Defines all the global objects that are proxies to the current |
||||
active context. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
from functools import partial |
||||
from werkzeug.local import LocalStack, LocalProxy |
||||
|
||||
|
||||
_request_ctx_err_msg = '''\ |
||||
Working outside of request context. |
||||
|
||||
This typically means that you attempted to use functionality that needed |
||||
an active HTTP request. Consult the documentation on testing for |
||||
information about how to avoid this problem.\ |
||||
''' |
||||
_app_ctx_err_msg = '''\ |
||||
Working outside of application context. |
||||
|
||||
This typically means that you attempted to use functionality that needed |
||||
to interface with the current application object in some way. To solve |
||||
this, set up an application context with app.app_context(). See the |
||||
documentation for more information.\ |
||||
''' |
||||
|
||||
|
||||
def _lookup_req_object(name): |
||||
top = _request_ctx_stack.top |
||||
if top is None: |
||||
raise RuntimeError(_request_ctx_err_msg) |
||||
return getattr(top, name) |
||||
|
||||
|
||||
def _lookup_app_object(name): |
||||
top = _app_ctx_stack.top |
||||
if top is None: |
||||
raise RuntimeError(_app_ctx_err_msg) |
||||
return getattr(top, name) |
||||
|
||||
|
||||
def _find_app(): |
||||
top = _app_ctx_stack.top |
||||
if top is None: |
||||
raise RuntimeError(_app_ctx_err_msg) |
||||
return top.app |
||||
|
||||
|
||||
# context locals |
||||
_request_ctx_stack = LocalStack() |
||||
_app_ctx_stack = LocalStack() |
||||
current_app = LocalProxy(_find_app) |
||||
request = LocalProxy(partial(_lookup_req_object, 'request')) |
||||
session = LocalProxy(partial(_lookup_req_object, 'session')) |
||||
g = LocalProxy(partial(_lookup_app_object, 'g')) |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,327 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.json |
||||
~~~~~~~~~~ |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
import codecs |
||||
import io |
||||
import uuid |
||||
from datetime import date, datetime |
||||
from flask.globals import current_app, request |
||||
from flask._compat import text_type, PY2 |
||||
|
||||
from werkzeug.http import http_date |
||||
from jinja2 import Markup |
||||
|
||||
# Use the same json implementation as itsdangerous on which we |
||||
# depend anyways. |
||||
from itsdangerous import json as _json |
||||
|
||||
|
||||
# Figure out if simplejson escapes slashes. This behavior was changed |
||||
# from one version to another without reason. |
||||
_slash_escape = '\\/' not in _json.dumps('/') |
||||
|
||||
|
||||
__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump', |
||||
'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder', |
||||
'jsonify'] |
||||
|
||||
|
||||
def _wrap_reader_for_text(fp, encoding): |
||||
if isinstance(fp.read(0), bytes): |
||||
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding) |
||||
return fp |
||||
|
||||
|
||||
def _wrap_writer_for_text(fp, encoding): |
||||
try: |
||||
fp.write('') |
||||
except TypeError: |
||||
fp = io.TextIOWrapper(fp, encoding) |
||||
return fp |
||||
|
||||
|
||||
class JSONEncoder(_json.JSONEncoder): |
||||
"""The default Flask JSON encoder. This one extends the default simplejson |
||||
encoder by also supporting ``datetime`` objects, ``UUID`` as well as |
||||
``Markup`` objects which are serialized as RFC 822 datetime strings (same |
||||
as the HTTP date format). In order to support more data types override the |
||||
:meth:`default` method. |
||||
""" |
||||
|
||||
def default(self, o): |
||||
"""Implement this method in a subclass such that it returns a |
||||
serializable object for ``o``, or calls the base implementation (to |
||||
raise a :exc:`TypeError`). |
||||
|
||||
For example, to support arbitrary iterators, you could implement |
||||
default like this:: |
||||
|
||||
def default(self, o): |
||||
try: |
||||
iterable = iter(o) |
||||
except TypeError: |
||||
pass |
||||
else: |
||||
return list(iterable) |
||||
return JSONEncoder.default(self, o) |
||||
""" |
||||
if isinstance(o, datetime): |
||||
return http_date(o.utctimetuple()) |
||||
if isinstance(o, date): |
||||
return http_date(o.timetuple()) |
||||
if isinstance(o, uuid.UUID): |
||||
return str(o) |
||||
if hasattr(o, '__html__'): |
||||
return text_type(o.__html__()) |
||||
return _json.JSONEncoder.default(self, o) |
||||
|
||||
|
||||
class JSONDecoder(_json.JSONDecoder): |
||||
"""The default JSON decoder. This one does not change the behavior from |
||||
the default simplejson decoder. Consult the :mod:`json` documentation |
||||
for more information. This decoder is not only used for the load |
||||
functions of this module but also :attr:`~flask.Request`. |
||||
""" |
||||
|
||||
|
||||
def _dump_arg_defaults(kwargs): |
||||
"""Inject default arguments for dump functions.""" |
||||
if current_app: |
||||
bp = current_app.blueprints.get(request.blueprint) if request else None |
||||
kwargs.setdefault( |
||||
'cls', |
||||
bp.json_encoder if bp and bp.json_encoder |
||||
else current_app.json_encoder |
||||
) |
||||
|
||||
if not current_app.config['JSON_AS_ASCII']: |
||||
kwargs.setdefault('ensure_ascii', False) |
||||
|
||||
kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS']) |
||||
else: |
||||
kwargs.setdefault('sort_keys', True) |
||||
kwargs.setdefault('cls', JSONEncoder) |
||||
|
||||
|
||||
def _load_arg_defaults(kwargs): |
||||
"""Inject default arguments for load functions.""" |
||||
if current_app: |
||||
bp = current_app.blueprints.get(request.blueprint) if request else None |
||||
kwargs.setdefault( |
||||
'cls', |
||||
bp.json_decoder if bp and bp.json_decoder |
||||
else current_app.json_decoder |
||||
) |
||||
else: |
||||
kwargs.setdefault('cls', JSONDecoder) |
||||
|
||||
|
||||
def detect_encoding(data): |
||||
"""Detect which UTF codec was used to encode the given bytes. |
||||
|
||||
The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is |
||||
accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big |
||||
or little endian. Some editors or libraries may prepend a BOM. |
||||
|
||||
:param data: Bytes in unknown UTF encoding. |
||||
:return: UTF encoding name |
||||
""" |
||||
head = data[:4] |
||||
|
||||
if head[:3] == codecs.BOM_UTF8: |
||||
return 'utf-8-sig' |
||||
|
||||
if b'\x00' not in head: |
||||
return 'utf-8' |
||||
|
||||
if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE): |
||||
return 'utf-32' |
||||
|
||||
if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE): |
||||
return 'utf-16' |
||||
|
||||
if len(head) == 4: |
||||
if head[:3] == b'\x00\x00\x00': |
||||
return 'utf-32-be' |
||||
|
||||
if head[::2] == b'\x00\x00': |
||||
return 'utf-16-be' |
||||
|
||||
if head[1:] == b'\x00\x00\x00': |
||||
return 'utf-32-le' |
||||
|
||||
if head[1::2] == b'\x00\x00': |
||||
return 'utf-16-le' |
||||
|
||||
if len(head) == 2: |
||||
return 'utf-16-be' if head.startswith(b'\x00') else 'utf-16-le' |
||||
|
||||
return 'utf-8' |
||||
|
||||
|
||||
def dumps(obj, **kwargs): |
||||
"""Serialize ``obj`` to a JSON formatted ``str`` by using the application's |
||||
configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an |
||||
application on the stack. |
||||
|
||||
This function can return ``unicode`` strings or ascii-only bytestrings by |
||||
default which coerce into unicode strings automatically. That behavior by |
||||
default is controlled by the ``JSON_AS_ASCII`` configuration variable |
||||
and can be overridden by the simplejson ``ensure_ascii`` parameter. |
||||
""" |
||||
_dump_arg_defaults(kwargs) |
||||
encoding = kwargs.pop('encoding', None) |
||||
rv = _json.dumps(obj, **kwargs) |
||||
if encoding is not None and isinstance(rv, text_type): |
||||
rv = rv.encode(encoding) |
||||
return rv |
||||
|
||||
|
||||
def dump(obj, fp, **kwargs): |
||||
"""Like :func:`dumps` but writes into a file object.""" |
||||
_dump_arg_defaults(kwargs) |
||||
encoding = kwargs.pop('encoding', None) |
||||
if encoding is not None: |
||||
fp = _wrap_writer_for_text(fp, encoding) |
||||
_json.dump(obj, fp, **kwargs) |
||||
|
||||
|
||||
def loads(s, **kwargs): |
||||
"""Unserialize a JSON object from a string ``s`` by using the application's |
||||
configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an |
||||
application on the stack. |
||||
""" |
||||
_load_arg_defaults(kwargs) |
||||
if isinstance(s, bytes): |
||||
encoding = kwargs.pop('encoding', None) |
||||
if encoding is None: |
||||
encoding = detect_encoding(s) |
||||
s = s.decode(encoding) |
||||
return _json.loads(s, **kwargs) |
||||
|
||||
|
||||
def load(fp, **kwargs): |
||||
"""Like :func:`loads` but reads from a file object. |
||||
""" |
||||
_load_arg_defaults(kwargs) |
||||
if not PY2: |
||||
fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8') |
||||
return _json.load(fp, **kwargs) |
||||
|
||||
|
||||
def htmlsafe_dumps(obj, **kwargs): |
||||
"""Works exactly like :func:`dumps` but is safe for use in ``<script>`` |
||||
tags. It accepts the same arguments and returns a JSON string. Note that |
||||
this is available in templates through the ``|tojson`` filter which will |
||||
also mark the result as safe. Due to how this function escapes certain |
||||
characters this is safe even if used outside of ``<script>`` tags. |
||||
|
||||
The following characters are escaped in strings: |
||||
|
||||
- ``<`` |
||||
- ``>`` |
||||
- ``&`` |
||||
- ``'`` |
||||
|
||||
This makes it safe to embed such strings in any place in HTML with the |
||||
notable exception of double quoted attributes. In that case single |
||||
quote your attributes or HTML escape it in addition. |
||||
|
||||
.. versionchanged:: 0.10 |
||||
This function's return value is now always safe for HTML usage, even |
||||
if outside of script tags or if used in XHTML. This rule does not |
||||
hold true when using this function in HTML attributes that are double |
||||
quoted. Always single quote attributes if you use the ``|tojson`` |
||||
filter. Alternatively use ``|tojson|forceescape``. |
||||
""" |
||||
rv = dumps(obj, **kwargs) \ |
||||
.replace(u'<', u'\\u003c') \ |
||||
.replace(u'>', u'\\u003e') \ |
||||
.replace(u'&', u'\\u0026') \ |
||||
.replace(u"'", u'\\u0027') |
||||
if not _slash_escape: |
||||
rv = rv.replace('\\/', '/') |
||||
return rv |
||||
|
||||
|
||||
def htmlsafe_dump(obj, fp, **kwargs): |
||||
"""Like :func:`htmlsafe_dumps` but writes into a file object.""" |
||||
fp.write(text_type(htmlsafe_dumps(obj, **kwargs))) |
||||
|
||||
|
||||
def jsonify(*args, **kwargs): |
||||
"""This function wraps :func:`dumps` to add a few enhancements that make |
||||
life easier. It turns the JSON output into a :class:`~flask.Response` |
||||
object with the :mimetype:`application/json` mimetype. For convenience, it |
||||
also converts multiple arguments into an array or multiple keyword arguments |
||||
into a dict. This means that both ``jsonify(1,2,3)`` and |
||||
``jsonify([1,2,3])`` serialize to ``[1,2,3]``. |
||||
|
||||
For clarity, the JSON serialization behavior has the following differences |
||||
from :func:`dumps`: |
||||
|
||||
1. Single argument: Passed straight through to :func:`dumps`. |
||||
2. Multiple arguments: Converted to an array before being passed to |
||||
:func:`dumps`. |
||||
3. Multiple keyword arguments: Converted to a dict before being passed to |
||||
:func:`dumps`. |
||||
4. Both args and kwargs: Behavior undefined and will throw an exception. |
||||
|
||||
Example usage:: |
||||
|
||||
from flask import jsonify |
||||
|
||||
@app.route('/_get_current_user') |
||||
def get_current_user(): |
||||
return jsonify(username=g.user.username, |
||||
email=g.user.email, |
||||
id=g.user.id) |
||||
|
||||
This will send a JSON response like this to the browser:: |
||||
|
||||
{ |
||||
"username": "admin", |
||||
"email": "admin@localhost", |
||||
"id": 42 |
||||
} |
||||
|
||||
|
||||
.. versionchanged:: 0.11 |
||||
Added support for serializing top-level arrays. This introduces a |
||||
security risk in ancient browsers. See :ref:`json-security` for details. |
||||
|
||||
This function's response will be pretty printed if the |
||||
``JSONIFY_PRETTYPRINT_REGULAR`` config parameter is set to True or the |
||||
Flask app is running in debug mode. Compressed (not pretty) formatting |
||||
currently means no indents and no spaces after separators. |
||||
|
||||
.. versionadded:: 0.2 |
||||
""" |
||||
|
||||
indent = None |
||||
separators = (',', ':') |
||||
|
||||
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] or current_app.debug: |
||||
indent = 2 |
||||
separators = (', ', ': ') |
||||
|
||||
if args and kwargs: |
||||
raise TypeError('jsonify() behavior undefined when passed both args and kwargs') |
||||
elif len(args) == 1: # single args are passed directly to dumps() |
||||
data = args[0] |
||||
else: |
||||
data = args or kwargs |
||||
|
||||
return current_app.response_class( |
||||
dumps(data, indent=indent, separators=separators) + '\n', |
||||
mimetype=current_app.config['JSONIFY_MIMETYPE'] |
||||
) |
||||
|
||||
|
||||
def tojson_filter(obj, **kwargs): |
||||
return Markup(htmlsafe_dumps(obj, **kwargs)) |
@ -0,0 +1,300 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
Tagged JSON |
||||
~~~~~~~~~~~ |
||||
|
||||
A compact representation for lossless serialization of non-standard JSON types. |
||||
:class:`~flask.sessions.SecureCookieSessionInterface` uses this to serialize |
||||
the session data, but it may be useful in other places. It can be extended to |
||||
support other types. |
||||
|
||||
.. autoclass:: TaggedJSONSerializer |
||||
:members: |
||||
|
||||
.. autoclass:: JSONTag |
||||
:members: |
||||
|
||||
Let's seen an example that adds support for :class:`~collections.OrderedDict`. |
||||
Dicts don't have an order in Python or JSON, so to handle this we will dump |
||||
the items as a list of ``[key, value]`` pairs. Subclass :class:`JSONTag` and |
||||
give it the new key ``' od'`` to identify the type. The session serializer |
||||
processes dicts first, so insert the new tag at the front of the order since |
||||
``OrderedDict`` must be processed before ``dict``. :: |
||||
|
||||
from flask.json.tag import JSONTag |
||||
|
||||
class TagOrderedDict(JSONTag): |
||||
__slots__ = ('serializer',) |
||||
key = ' od' |
||||
|
||||
def check(self, value): |
||||
return isinstance(value, OrderedDict) |
||||
|
||||
def to_json(self, value): |
||||
return [[k, self.serializer.tag(v)] for k, v in iteritems(value)] |
||||
|
||||
def to_python(self, value): |
||||
return OrderedDict(value) |
||||
|
||||
app.session_interface.serializer.register(TagOrderedDict, index=0) |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
from base64 import b64decode, b64encode |
||||
from datetime import datetime |
||||
from uuid import UUID |
||||
|
||||
from jinja2 import Markup |
||||
from werkzeug.http import http_date, parse_date |
||||
|
||||
from flask._compat import iteritems, text_type |
||||
from flask.json import dumps, loads |
||||
|
||||
|
||||
class JSONTag(object): |
||||
"""Base class for defining type tags for :class:`TaggedJSONSerializer`.""" |
||||
|
||||
__slots__ = ('serializer',) |
||||
|
||||
#: The tag to mark the serialized object with. If ``None``, this tag is |
||||
#: only used as an intermediate step during tagging. |
||||
key = None |
||||
|
||||
def __init__(self, serializer): |
||||
"""Create a tagger for the given serializer.""" |
||||
self.serializer = serializer |
||||
|
||||
def check(self, value): |
||||
"""Check if the given value should be tagged by this tag.""" |
||||
raise NotImplementedError |
||||
|
||||
def to_json(self, value): |
||||
"""Convert the Python object to an object that is a valid JSON type. |
||||
The tag will be added later.""" |
||||
raise NotImplementedError |
||||
|
||||
def to_python(self, value): |
||||
"""Convert the JSON representation back to the correct type. The tag |
||||
will already be removed.""" |
||||
raise NotImplementedError |
||||
|
||||
def tag(self, value): |
||||
"""Convert the value to a valid JSON type and add the tag structure |
||||
around it.""" |
||||
return {self.key: self.to_json(value)} |
||||
|
||||
|
||||
class TagDict(JSONTag): |
||||
"""Tag for 1-item dicts whose only key matches a registered tag. |
||||
|
||||
Internally, the dict key is suffixed with `__`, and the suffix is removed |
||||
when deserializing. |
||||
""" |
||||
|
||||
__slots__ = () |
||||
key = ' di' |
||||
|
||||
def check(self, value): |
||||
return ( |
||||
isinstance(value, dict) |
||||
and len(value) == 1 |
||||
and next(iter(value)) in self.serializer.tags |
||||
) |
||||
|
||||
def to_json(self, value): |
||||
key = next(iter(value)) |
||||
return {key + '__': self.serializer.tag(value[key])} |
||||
|
||||
def to_python(self, value): |
||||
key = next(iter(value)) |
||||
return {key[:-2]: value[key]} |
||||
|
||||
|
||||
class PassDict(JSONTag): |
||||
__slots__ = () |
||||
|
||||
def check(self, value): |
||||
return isinstance(value, dict) |
||||
|
||||
def to_json(self, value): |
||||
# JSON objects may only have string keys, so don't bother tagging the |
||||
# key here. |
||||
return dict((k, self.serializer.tag(v)) for k, v in iteritems(value)) |
||||
|
||||
tag = to_json |
||||
|
||||
|
||||
class TagTuple(JSONTag): |
||||
__slots__ = () |
||||
key = ' t' |
||||
|
||||
def check(self, value): |
||||
return isinstance(value, tuple) |
||||
|
||||
def to_json(self, value): |
||||
return [self.serializer.tag(item) for item in value] |
||||
|
||||
def to_python(self, value): |
||||
return tuple(value) |
||||
|
||||
|
||||
class PassList(JSONTag): |
||||
__slots__ = () |
||||
|
||||
def check(self, value): |
||||
return isinstance(value, list) |
||||
|
||||
def to_json(self, value): |
||||
return [self.serializer.tag(item) for item in value] |
||||
|
||||
tag = to_json |
||||
|
||||
|
||||
class TagBytes(JSONTag): |
||||
__slots__ = () |
||||
key = ' b' |
||||
|
||||
def check(self, value): |
||||
return isinstance(value, bytes) |
||||
|
||||
def to_json(self, value): |
||||
return b64encode(value).decode('ascii') |
||||
|
||||
def to_python(self, value): |
||||
return b64decode(value) |
||||
|
||||
|
||||
class TagMarkup(JSONTag): |
||||
"""Serialize anything matching the :class:`~flask.Markup` API by |
||||
having a ``__html__`` method to the result of that method. Always |
||||
deserializes to an instance of :class:`~flask.Markup`.""" |
||||
|
||||
__slots__ = () |
||||
key = ' m' |
||||
|
||||
def check(self, value): |
||||
return callable(getattr(value, '__html__', None)) |
||||
|
||||
def to_json(self, value): |
||||
return text_type(value.__html__()) |
||||
|
||||
def to_python(self, value): |
||||
return Markup(value) |
||||
|
||||
|
||||
class TagUUID(JSONTag): |
||||
__slots__ = () |
||||
key = ' u' |
||||
|
||||
def check(self, value): |
||||
return isinstance(value, UUID) |
||||
|
||||
def to_json(self, value): |
||||
return value.hex |
||||
|
||||
def to_python(self, value): |
||||
return UUID(value) |
||||
|
||||
|
||||
class TagDateTime(JSONTag): |
||||
__slots__ = () |
||||
key = ' d' |
||||
|
||||
def check(self, value): |
||||
return isinstance(value, datetime) |
||||
|
||||
def to_json(self, value): |
||||
return http_date(value) |
||||
|
||||
def to_python(self, value): |
||||
return parse_date(value) |
||||
|
||||
|
||||
class TaggedJSONSerializer(object): |
||||
"""Serializer that uses a tag system to compactly represent objects that |
||||
are not JSON types. Passed as the intermediate serializer to |
||||
:class:`itsdangerous.Serializer`. |
||||
|
||||
The following extra types are supported: |
||||
|
||||
* :class:`dict` |
||||
* :class:`tuple` |
||||
* :class:`bytes` |
||||
* :class:`~flask.Markup` |
||||
* :class:`~uuid.UUID` |
||||
* :class:`~datetime.datetime` |
||||
""" |
||||
|
||||
__slots__ = ('tags', 'order') |
||||
|
||||
#: Tag classes to bind when creating the serializer. Other tags can be |
||||
#: added later using :meth:`~register`. |
||||
default_tags = [ |
||||
TagDict, PassDict, TagTuple, PassList, TagBytes, TagMarkup, TagUUID, |
||||
TagDateTime, |
||||
] |
||||
|
||||
def __init__(self): |
||||
self.tags = {} |
||||
self.order = [] |
||||
|
||||
for cls in self.default_tags: |
||||
self.register(cls) |
||||
|
||||
def register(self, tag_class, force=False, index=None): |
||||
"""Register a new tag with this serializer. |
||||
|
||||
:param tag_class: tag class to register. Will be instantiated with this |
||||
serializer instance. |
||||
:param force: overwrite an existing tag. If false (default), a |
||||
:exc:`KeyError` is raised. |
||||
:param index: index to insert the new tag in the tag order. Useful when |
||||
the new tag is a special case of an existing tag. If ``None`` |
||||
(default), the tag is appended to the end of the order. |
||||
|
||||
:raise KeyError: if the tag key is already registered and ``force`` is |
||||
not true. |
||||
""" |
||||
tag = tag_class(self) |
||||
key = tag.key |
||||
|
||||
if key is not None: |
||||
if not force and key in self.tags: |
||||
raise KeyError("Tag '{0}' is already registered.".format(key)) |
||||
|
||||
self.tags[key] = tag |
||||
|
||||
if index is None: |
||||
self.order.append(tag) |
||||
else: |
||||
self.order.insert(index, tag) |
||||
|
||||
def tag(self, value): |
||||
"""Convert a value to a tagged representation if necessary.""" |
||||
for tag in self.order: |
||||
if tag.check(value): |
||||
return tag.tag(value) |
||||
|
||||
return value |
||||
|
||||
def untag(self, value): |
||||
"""Convert a tagged representation back to the original type.""" |
||||
if len(value) != 1: |
||||
return value |
||||
|
||||
key = next(iter(value)) |
||||
|
||||
if key not in self.tags: |
||||
return value |
||||
|
||||
return self.tags[key].to_python(value[key]) |
||||
|
||||
def dumps(self, value): |
||||
"""Tag the value and dump it to a compact JSON string.""" |
||||
return dumps(self.tag(value), separators=(',', ':')) |
||||
|
||||
def loads(self, value): |
||||
"""Load data from a JSON string and deserialized any tagged objects.""" |
||||
return loads(value, object_hook=self.untag) |
@ -0,0 +1,78 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.logging |
||||
~~~~~~~~~~~~~ |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
from __future__ import absolute_import |
||||
|
||||
import logging |
||||
import sys |
||||
|
||||
from werkzeug.local import LocalProxy |
||||
|
||||
from .globals import request |
||||
|
||||
|
||||
@LocalProxy |
||||
def wsgi_errors_stream(): |
||||
"""Find the most appropriate error stream for the application. If a request |
||||
is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``. |
||||
|
||||
If you configure your own :class:`logging.StreamHandler`, you may want to |
||||
use this for the stream. If you are using file or dict configuration and |
||||
can't import this directly, you can refer to it as |
||||
``ext://flask.logging.wsgi_errors_stream``. |
||||
""" |
||||
return request.environ['wsgi.errors'] if request else sys.stderr |
||||
|
||||
|
||||
def has_level_handler(logger): |
||||
"""Check if there is a handler in the logging chain that will handle the |
||||
given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`. |
||||
""" |
||||
level = logger.getEffectiveLevel() |
||||
current = logger |
||||
|
||||
while current: |
||||
if any(handler.level <= level for handler in current.handlers): |
||||
return True |
||||
|
||||
if not current.propagate: |
||||
break |
||||
|
||||
current = current.parent |
||||
|
||||
return False |
||||
|
||||
|
||||
#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format |
||||
#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``. |
||||
default_handler = logging.StreamHandler(wsgi_errors_stream) |
||||
default_handler.setFormatter(logging.Formatter( |
||||
'[%(asctime)s] %(levelname)s in %(module)s: %(message)s' |
||||
)) |
||||
|
||||
|
||||
def create_logger(app): |
||||
"""Get the ``'flask.app'`` logger and configure it if needed. |
||||
|
||||
When :attr:`~flask.Flask.debug` is enabled, set the logger level to |
||||
:data:`logging.DEBUG` if it is not set. |
||||
|
||||
If there is no handler for the logger's effective level, add a |
||||
:class:`~logging.StreamHandler` for |
||||
:func:`~flask.logging.wsgi_errors_stream` with a basic format. |
||||
""" |
||||
logger = logging.getLogger('flask.app') |
||||
|
||||
if app.debug and logger.level == logging.NOTSET: |
||||
logger.setLevel(logging.DEBUG) |
||||
|
||||
if not has_level_handler(logger): |
||||
logger.addHandler(default_handler) |
||||
|
||||
return logger |
@ -0,0 +1,385 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.sessions |
||||
~~~~~~~~~~~~~~ |
||||
|
||||
Implements cookie based sessions based on itsdangerous. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
import hashlib |
||||
import warnings |
||||
from collections import MutableMapping |
||||
from datetime import datetime |
||||
|
||||
from itsdangerous import BadSignature, URLSafeTimedSerializer |
||||
from werkzeug.datastructures import CallbackDict |
||||
|
||||
from flask.helpers import is_ip, total_seconds |
||||
from flask.json.tag import TaggedJSONSerializer |
||||
|
||||
|
||||
class SessionMixin(MutableMapping): |
||||
"""Expands a basic dictionary with session attributes.""" |
||||
|
||||
@property |
||||
def permanent(self): |
||||
"""This reflects the ``'_permanent'`` key in the dict.""" |
||||
return self.get('_permanent', False) |
||||
|
||||
@permanent.setter |
||||
def permanent(self, value): |
||||
self['_permanent'] = bool(value) |
||||
|
||||
#: Some implementations can detect whether a session is newly |
||||
#: created, but that is not guaranteed. Use with caution. The mixin |
||||
# default is hard-coded ``False``. |
||||
new = False |
||||
|
||||
#: Some implementations can detect changes to the session and set |
||||
#: this when that happens. The mixin default is hard coded to |
||||
#: ``True``. |
||||
modified = True |
||||
|
||||
#: Some implementations can detect when session data is read or |
||||
#: written and set this when that happens. The mixin default is hard |
||||
#: coded to ``True``. |
||||
accessed = True |
||||
|
||||
|
||||
class SecureCookieSession(CallbackDict, SessionMixin): |
||||
"""Base class for sessions based on signed cookies. |
||||
|
||||
This session backend will set the :attr:`modified` and |
||||
:attr:`accessed` attributes. It cannot reliably track whether a |
||||
session is new (vs. empty), so :attr:`new` remains hard coded to |
||||
``False``. |
||||
""" |
||||
|
||||
#: When data is changed, this is set to ``True``. Only the session |
||||
#: dictionary itself is tracked; if the session contains mutable |
||||
#: data (for example a nested dict) then this must be set to |
||||
#: ``True`` manually when modifying that data. The session cookie |
||||
#: will only be written to the response if this is ``True``. |
||||
modified = False |
||||
|
||||
#: When data is read or written, this is set to ``True``. Used by |
||||
# :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie`` |
||||
#: header, which allows caching proxies to cache different pages for |
||||
#: different users. |
||||
accessed = False |
||||
|
||||
def __init__(self, initial=None): |
||||
def on_update(self): |
||||
self.modified = True |
||||
self.accessed = True |
||||
|
||||
super(SecureCookieSession, self).__init__(initial, on_update) |
||||
|
||||
def __getitem__(self, key): |
||||
self.accessed = True |
||||
return super(SecureCookieSession, self).__getitem__(key) |
||||
|
||||
def get(self, key, default=None): |
||||
self.accessed = True |
||||
return super(SecureCookieSession, self).get(key, default) |
||||
|
||||
def setdefault(self, key, default=None): |
||||
self.accessed = True |
||||
return super(SecureCookieSession, self).setdefault(key, default) |
||||
|
||||
|
||||
class NullSession(SecureCookieSession): |
||||
"""Class used to generate nicer error messages if sessions are not |
||||
available. Will still allow read-only access to the empty session |
||||
but fail on setting. |
||||
""" |
||||
|
||||
def _fail(self, *args, **kwargs): |
||||
raise RuntimeError('The session is unavailable because no secret ' |
||||
'key was set. Set the secret_key on the ' |
||||
'application to something unique and secret.') |
||||
__setitem__ = __delitem__ = clear = pop = popitem = \ |
||||
update = setdefault = _fail |
||||
del _fail |
||||
|
||||
|
||||
class SessionInterface(object): |
||||
"""The basic interface you have to implement in order to replace the |
||||
default session interface which uses werkzeug's securecookie |
||||
implementation. The only methods you have to implement are |
||||
:meth:`open_session` and :meth:`save_session`, the others have |
||||
useful defaults which you don't need to change. |
||||
|
||||
The session object returned by the :meth:`open_session` method has to |
||||
provide a dictionary like interface plus the properties and methods |
||||
from the :class:`SessionMixin`. We recommend just subclassing a dict |
||||
and adding that mixin:: |
||||
|
||||
class Session(dict, SessionMixin): |
||||
pass |
||||
|
||||
If :meth:`open_session` returns ``None`` Flask will call into |
||||
:meth:`make_null_session` to create a session that acts as replacement |
||||
if the session support cannot work because some requirement is not |
||||
fulfilled. The default :class:`NullSession` class that is created |
||||
will complain that the secret key was not set. |
||||
|
||||
To replace the session interface on an application all you have to do |
||||
is to assign :attr:`flask.Flask.session_interface`:: |
||||
|
||||
app = Flask(__name__) |
||||
app.session_interface = MySessionInterface() |
||||
|
||||
.. versionadded:: 0.8 |
||||
""" |
||||
|
||||
#: :meth:`make_null_session` will look here for the class that should |
||||
#: be created when a null session is requested. Likewise the |
||||
#: :meth:`is_null_session` method will perform a typecheck against |
||||
#: this type. |
||||
null_session_class = NullSession |
||||
|
||||
#: A flag that indicates if the session interface is pickle based. |
||||
#: This can be used by Flask extensions to make a decision in regards |
||||
#: to how to deal with the session object. |
||||
#: |
||||
#: .. versionadded:: 0.10 |
||||
pickle_based = False |
||||
|
||||
def make_null_session(self, app): |
||||
"""Creates a null session which acts as a replacement object if the |
||||
real session support could not be loaded due to a configuration |
||||
error. This mainly aids the user experience because the job of the |
||||
null session is to still support lookup without complaining but |
||||
modifications are answered with a helpful error message of what |
||||
failed. |
||||
|
||||
This creates an instance of :attr:`null_session_class` by default. |
||||
""" |
||||
return self.null_session_class() |
||||
|
||||
def is_null_session(self, obj): |
||||
"""Checks if a given object is a null session. Null sessions are |
||||
not asked to be saved. |
||||
|
||||
This checks if the object is an instance of :attr:`null_session_class` |
||||
by default. |
||||
""" |
||||
return isinstance(obj, self.null_session_class) |
||||
|
||||
def get_cookie_domain(self, app): |
||||
"""Returns the domain that should be set for the session cookie. |
||||
|
||||
Uses ``SESSION_COOKIE_DOMAIN`` if it is configured, otherwise |
||||
falls back to detecting the domain based on ``SERVER_NAME``. |
||||
|
||||
Once detected (or if not set at all), ``SESSION_COOKIE_DOMAIN`` is |
||||
updated to avoid re-running the logic. |
||||
""" |
||||
|
||||
rv = app.config['SESSION_COOKIE_DOMAIN'] |
||||
|
||||
# set explicitly, or cached from SERVER_NAME detection |
||||
# if False, return None |
||||
if rv is not None: |
||||
return rv if rv else None |
||||
|
||||
rv = app.config['SERVER_NAME'] |
||||
|
||||
# server name not set, cache False to return none next time |
||||
if not rv: |
||||
app.config['SESSION_COOKIE_DOMAIN'] = False |
||||
return None |
||||
|
||||
# chop off the port which is usually not supported by browsers |
||||
# remove any leading '.' since we'll add that later |
||||
rv = rv.rsplit(':', 1)[0].lstrip('.') |
||||
|
||||
if '.' not in rv: |
||||
# Chrome doesn't allow names without a '.' |
||||
# this should only come up with localhost |
||||
# hack around this by not setting the name, and show a warning |
||||
warnings.warn( |
||||
'"{rv}" is not a valid cookie domain, it must contain a ".".' |
||||
' Add an entry to your hosts file, for example' |
||||
' "{rv}.localdomain", and use that instead.'.format(rv=rv) |
||||
) |
||||
app.config['SESSION_COOKIE_DOMAIN'] = False |
||||
return None |
||||
|
||||
ip = is_ip(rv) |
||||
|
||||
if ip: |
||||
warnings.warn( |
||||
'The session cookie domain is an IP address. This may not work' |
||||
' as intended in some browsers. Add an entry to your hosts' |
||||
' file, for example "localhost.localdomain", and use that' |
||||
' instead.' |
||||
) |
||||
|
||||
# if this is not an ip and app is mounted at the root, allow subdomain |
||||
# matching by adding a '.' prefix |
||||
if self.get_cookie_path(app) == '/' and not ip: |
||||
rv = '.' + rv |
||||
|
||||
app.config['SESSION_COOKIE_DOMAIN'] = rv |
||||
return rv |
||||
|
||||
def get_cookie_path(self, app): |
||||
"""Returns the path for which the cookie should be valid. The |
||||
default implementation uses the value from the ``SESSION_COOKIE_PATH`` |
||||
config var if it's set, and falls back to ``APPLICATION_ROOT`` or |
||||
uses ``/`` if it's ``None``. |
||||
""" |
||||
return app.config['SESSION_COOKIE_PATH'] \ |
||||
or app.config['APPLICATION_ROOT'] |
||||
|
||||
def get_cookie_httponly(self, app): |
||||
"""Returns True if the session cookie should be httponly. This |
||||
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY`` |
||||
config var. |
||||
""" |
||||
return app.config['SESSION_COOKIE_HTTPONLY'] |
||||
|
||||
def get_cookie_secure(self, app): |
||||
"""Returns True if the cookie should be secure. This currently |
||||
just returns the value of the ``SESSION_COOKIE_SECURE`` setting. |
||||
""" |
||||
return app.config['SESSION_COOKIE_SECURE'] |
||||
|
||||
def get_cookie_samesite(self, app): |
||||
"""Return ``'Strict'`` or ``'Lax'`` if the cookie should use the |
||||
``SameSite`` attribute. This currently just returns the value of |
||||
the :data:`SESSION_COOKIE_SAMESITE` setting. |
||||
""" |
||||
return app.config['SESSION_COOKIE_SAMESITE'] |
||||
|
||||
def get_expiration_time(self, app, session): |
||||
"""A helper method that returns an expiration date for the session |
||||
or ``None`` if the session is linked to the browser session. The |
||||
default implementation returns now + the permanent session |
||||
lifetime configured on the application. |
||||
""" |
||||
if session.permanent: |
||||
return datetime.utcnow() + app.permanent_session_lifetime |
||||
|
||||
def should_set_cookie(self, app, session): |
||||
"""Used by session backends to determine if a ``Set-Cookie`` header |
||||
should be set for this session cookie for this response. If the session |
||||
has been modified, the cookie is set. If the session is permanent and |
||||
the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is |
||||
always set. |
||||
|
||||
This check is usually skipped if the session was deleted. |
||||
|
||||
.. versionadded:: 0.11 |
||||
""" |
||||
|
||||
return session.modified or ( |
||||
session.permanent and app.config['SESSION_REFRESH_EACH_REQUEST'] |
||||
) |
||||
|
||||
def open_session(self, app, request): |
||||
"""This method has to be implemented and must either return ``None`` |
||||
in case the loading failed because of a configuration error or an |
||||
instance of a session object which implements a dictionary like |
||||
interface + the methods and attributes on :class:`SessionMixin`. |
||||
""" |
||||
raise NotImplementedError() |
||||
|
||||
def save_session(self, app, session, response): |
||||
"""This is called for actual sessions returned by :meth:`open_session` |
||||
at the end of the request. This is still called during a request |
||||
context so if you absolutely need access to the request you can do |
||||
that. |
||||
""" |
||||
raise NotImplementedError() |
||||
|
||||
|
||||
session_json_serializer = TaggedJSONSerializer() |
||||
|
||||
|
||||
class SecureCookieSessionInterface(SessionInterface): |
||||
"""The default session interface that stores sessions in signed cookies |
||||
through the :mod:`itsdangerous` module. |
||||
""" |
||||
#: the salt that should be applied on top of the secret key for the |
||||
#: signing of cookie based sessions. |
||||
salt = 'cookie-session' |
||||
#: the hash function to use for the signature. The default is sha1 |
||||
digest_method = staticmethod(hashlib.sha1) |
||||
#: the name of the itsdangerous supported key derivation. The default |
||||
#: is hmac. |
||||
key_derivation = 'hmac' |
||||
#: A python serializer for the payload. The default is a compact |
||||
#: JSON derived serializer with support for some extra Python types |
||||
#: such as datetime objects or tuples. |
||||
serializer = session_json_serializer |
||||
session_class = SecureCookieSession |
||||
|
||||
def get_signing_serializer(self, app): |
||||
if not app.secret_key: |
||||
return None |
||||
signer_kwargs = dict( |
||||
key_derivation=self.key_derivation, |
||||
digest_method=self.digest_method |
||||
) |
||||
return URLSafeTimedSerializer(app.secret_key, salt=self.salt, |
||||
serializer=self.serializer, |
||||
signer_kwargs=signer_kwargs) |
||||
|
||||
def open_session(self, app, request): |
||||
s = self.get_signing_serializer(app) |
||||
if s is None: |
||||
return None |
||||
val = request.cookies.get(app.session_cookie_name) |
||||
if not val: |
||||
return self.session_class() |
||||
max_age = total_seconds(app.permanent_session_lifetime) |
||||
try: |
||||
data = s.loads(val, max_age=max_age) |
||||
return self.session_class(data) |
||||
except BadSignature: |
||||
return self.session_class() |
||||
|
||||
def save_session(self, app, session, response): |
||||
domain = self.get_cookie_domain(app) |
||||
path = self.get_cookie_path(app) |
||||
|
||||
# If the session is modified to be empty, remove the cookie. |
||||
# If the session is empty, return without setting the cookie. |
||||
if not session: |
||||
if session.modified: |
||||
response.delete_cookie( |
||||
app.session_cookie_name, |
||||
domain=domain, |
||||
path=path |
||||
) |
||||
|
||||
return |
||||
|
||||
# Add a "Vary: Cookie" header if the session was accessed at all. |
||||
if session.accessed: |
||||
response.vary.add('Cookie') |
||||
|
||||
if not self.should_set_cookie(app, session): |
||||
return |
||||
|
||||
httponly = self.get_cookie_httponly(app) |
||||
secure = self.get_cookie_secure(app) |
||||
samesite = self.get_cookie_samesite(app) |
||||
expires = self.get_expiration_time(app, session) |
||||
val = self.get_signing_serializer(app).dumps(dict(session)) |
||||
response.set_cookie( |
||||
app.session_cookie_name, |
||||
val, |
||||
expires=expires, |
||||
httponly=httponly, |
||||
domain=domain, |
||||
path=path, |
||||
secure=secure, |
||||
samesite=samesite |
||||
) |
@ -0,0 +1,57 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.signals |
||||
~~~~~~~~~~~~~ |
||||
|
||||
Implements signals based on blinker if available, otherwise |
||||
falls silently back to a noop. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
signals_available = False |
||||
try: |
||||
from blinker import Namespace |
||||
signals_available = True |
||||
except ImportError: |
||||
class Namespace(object): |
||||
def signal(self, name, doc=None): |
||||
return _FakeSignal(name, doc) |
||||
|
||||
class _FakeSignal(object): |
||||
"""If blinker is unavailable, create a fake class with the same |
||||
interface that allows sending of signals but will fail with an |
||||
error on anything else. Instead of doing anything on send, it |
||||
will just ignore the arguments and do nothing instead. |
||||
""" |
||||
|
||||
def __init__(self, name, doc=None): |
||||
self.name = name |
||||
self.__doc__ = doc |
||||
def _fail(self, *args, **kwargs): |
||||
raise RuntimeError('signalling support is unavailable ' |
||||
'because the blinker library is ' |
||||
'not installed.') |
||||
send = lambda *a, **kw: None |
||||
connect = disconnect = has_receivers_for = receivers_for = \ |
||||
temporarily_connected_to = connected_to = _fail |
||||
del _fail |
||||
|
||||
# The namespace for code signals. If you are not Flask code, do |
||||
# not put signals in here. Create your own namespace instead. |
||||
_signals = Namespace() |
||||
|
||||
|
||||
# Core signals. For usage examples grep the source code or consult |
||||
# the API documentation in docs/api.rst as well as docs/signals.rst |
||||
template_rendered = _signals.signal('template-rendered') |
||||
before_render_template = _signals.signal('before-render-template') |
||||
request_started = _signals.signal('request-started') |
||||
request_finished = _signals.signal('request-finished') |
||||
request_tearing_down = _signals.signal('request-tearing-down') |
||||
got_request_exception = _signals.signal('got-request-exception') |
||||
appcontext_tearing_down = _signals.signal('appcontext-tearing-down') |
||||
appcontext_pushed = _signals.signal('appcontext-pushed') |
||||
appcontext_popped = _signals.signal('appcontext-popped') |
||||
message_flashed = _signals.signal('message-flashed') |
@ -0,0 +1,150 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.templating |
||||
~~~~~~~~~~~~~~~~ |
||||
|
||||
Implements the bridge to Jinja2. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
from jinja2 import BaseLoader, Environment as BaseEnvironment, \ |
||||
TemplateNotFound |
||||
|
||||
from .globals import _request_ctx_stack, _app_ctx_stack |
||||
from .signals import template_rendered, before_render_template |
||||
|
||||
|
||||
def _default_template_ctx_processor(): |
||||
"""Default template context processor. Injects `request`, |
||||
`session` and `g`. |
||||
""" |
||||
reqctx = _request_ctx_stack.top |
||||
appctx = _app_ctx_stack.top |
||||
rv = {} |
||||
if appctx is not None: |
||||
rv['g'] = appctx.g |
||||
if reqctx is not None: |
||||
rv['request'] = reqctx.request |
||||
rv['session'] = reqctx.session |
||||
return rv |
||||
|
||||
|
||||
class Environment(BaseEnvironment): |
||||
"""Works like a regular Jinja2 environment but has some additional |
||||
knowledge of how Flask's blueprint works so that it can prepend the |
||||
name of the blueprint to referenced templates if necessary. |
||||
""" |
||||
|
||||
def __init__(self, app, **options): |
||||
if 'loader' not in options: |
||||
options['loader'] = app.create_global_jinja_loader() |
||||
BaseEnvironment.__init__(self, **options) |
||||
self.app = app |
||||
|
||||
|
||||
class DispatchingJinjaLoader(BaseLoader): |
||||
"""A loader that looks for templates in the application and all |
||||
the blueprint folders. |
||||
""" |
||||
|
||||
def __init__(self, app): |
||||
self.app = app |
||||
|
||||
def get_source(self, environment, template): |
||||
if self.app.config['EXPLAIN_TEMPLATE_LOADING']: |
||||
return self._get_source_explained(environment, template) |
||||
return self._get_source_fast(environment, template) |
||||
|
||||
def _get_source_explained(self, environment, template): |
||||
attempts = [] |
||||
trv = None |
||||
|
||||
for srcobj, loader in self._iter_loaders(template): |
||||
try: |
||||
rv = loader.get_source(environment, template) |
||||
if trv is None: |
||||
trv = rv |
||||
except TemplateNotFound: |
||||
rv = None |
||||
attempts.append((loader, srcobj, rv)) |
||||
|
||||
from .debughelpers import explain_template_loading_attempts |
||||
explain_template_loading_attempts(self.app, template, attempts) |
||||
|
||||
if trv is not None: |
||||
return trv |
||||
raise TemplateNotFound(template) |
||||
|
||||
def _get_source_fast(self, environment, template): |
||||
for srcobj, loader in self._iter_loaders(template): |
||||
try: |
||||
return loader.get_source(environment, template) |
||||
except TemplateNotFound: |
||||
continue |
||||
raise TemplateNotFound(template) |
||||
|
||||
def _iter_loaders(self, template): |
||||
loader = self.app.jinja_loader |
||||
if loader is not None: |
||||
yield self.app, loader |
||||
|
||||
for blueprint in self.app.iter_blueprints(): |
||||
loader = blueprint.jinja_loader |
||||
if loader is not None: |
||||
yield blueprint, loader |
||||
|
||||
def list_templates(self): |
||||
result = set() |
||||
loader = self.app.jinja_loader |
||||
if loader is not None: |
||||
result.update(loader.list_templates()) |
||||
|
||||
for blueprint in self.app.iter_blueprints(): |
||||
loader = blueprint.jinja_loader |
||||
if loader is not None: |
||||
for template in loader.list_templates(): |
||||
result.add(template) |
||||
|
||||
return list(result) |
||||
|
||||
|
||||
def _render(template, context, app): |
||||
"""Renders the template and fires the signal""" |
||||
|
||||
before_render_template.send(app, template=template, context=context) |
||||
rv = template.render(context) |
||||
template_rendered.send(app, template=template, context=context) |
||||
return rv |
||||
|
||||
|
||||
def render_template(template_name_or_list, **context): |
||||
"""Renders a template from the template folder with the given |
||||
context. |
||||
|
||||
:param template_name_or_list: the name of the template to be |
||||
rendered, or an iterable with template names |
||||
the first one existing will be rendered |
||||
:param context: the variables that should be available in the |
||||
context of the template. |
||||
""" |
||||
ctx = _app_ctx_stack.top |
||||
ctx.app.update_template_context(context) |
||||
return _render(ctx.app.jinja_env.get_or_select_template(template_name_or_list), |
||||
context, ctx.app) |
||||
|
||||
|
||||
def render_template_string(source, **context): |
||||
"""Renders a template from the given template source string |
||||
with the given context. Template variables will be autoescaped. |
||||
|
||||
:param source: the source code of the template to be |
||||
rendered |
||||
:param context: the variables that should be available in the |
||||
context of the template. |
||||
""" |
||||
ctx = _app_ctx_stack.top |
||||
ctx.app.update_template_context(context) |
||||
return _render(ctx.app.jinja_env.from_string(source), |
||||
context, ctx.app) |
@ -0,0 +1,250 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.testing |
||||
~~~~~~~~~~~~~ |
||||
|
||||
Implements test support helpers. This module is lazily imported |
||||
and usually not used in production environments. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
import werkzeug |
||||
from contextlib import contextmanager |
||||
|
||||
from click.testing import CliRunner |
||||
from flask.cli import ScriptInfo |
||||
from werkzeug.test import Client, EnvironBuilder |
||||
from flask import _request_ctx_stack |
||||
from flask.json import dumps as json_dumps |
||||
from werkzeug.urls import url_parse |
||||
|
||||
|
||||
def make_test_environ_builder( |
||||
app, path='/', base_url=None, subdomain=None, url_scheme=None, |
||||
*args, **kwargs |
||||
): |
||||
"""Create a :class:`~werkzeug.test.EnvironBuilder`, taking some |
||||
defaults from the application. |
||||
|
||||
:param app: The Flask application to configure the environment from. |
||||
:param path: URL path being requested. |
||||
:param base_url: Base URL where the app is being served, which |
||||
``path`` is relative to. If not given, built from |
||||
:data:`PREFERRED_URL_SCHEME`, ``subdomain``, |
||||
:data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. |
||||
:param subdomain: Subdomain name to append to :data:`SERVER_NAME`. |
||||
:param url_scheme: Scheme to use instead of |
||||
:data:`PREFERRED_URL_SCHEME`. |
||||
:param json: If given, this is serialized as JSON and passed as |
||||
``data``. Also defaults ``content_type`` to |
||||
``application/json``. |
||||
:param args: other positional arguments passed to |
||||
:class:`~werkzeug.test.EnvironBuilder`. |
||||
:param kwargs: other keyword arguments passed to |
||||
:class:`~werkzeug.test.EnvironBuilder`. |
||||
""" |
||||
|
||||
assert ( |
||||
not (base_url or subdomain or url_scheme) |
||||
or (base_url is not None) != bool(subdomain or url_scheme) |
||||
), 'Cannot pass "subdomain" or "url_scheme" with "base_url".' |
||||
|
||||
if base_url is None: |
||||
http_host = app.config.get('SERVER_NAME') or 'localhost' |
||||
app_root = app.config['APPLICATION_ROOT'] |
||||
|
||||
if subdomain: |
||||
http_host = '{0}.{1}'.format(subdomain, http_host) |
||||
|
||||
if url_scheme is None: |
||||
url_scheme = app.config['PREFERRED_URL_SCHEME'] |
||||
|
||||
url = url_parse(path) |
||||
base_url = '{scheme}://{netloc}/{path}'.format( |
||||
scheme=url.scheme or url_scheme, |
||||
netloc=url.netloc or http_host, |
||||
path=app_root.lstrip('/') |
||||
) |
||||
path = url.path |
||||
|
||||
if url.query: |
||||
sep = b'?' if isinstance(url.query, bytes) else '?' |
||||
path += sep + url.query |
||||
|
||||
if 'json' in kwargs: |
||||
assert 'data' not in kwargs, ( |
||||
"Client cannot provide both 'json' and 'data'." |
||||
) |
||||
|
||||
# push a context so flask.json can use app's json attributes |
||||
with app.app_context(): |
||||
kwargs['data'] = json_dumps(kwargs.pop('json')) |
||||
|
||||
if 'content_type' not in kwargs: |
||||
kwargs['content_type'] = 'application/json' |
||||
|
||||
return EnvironBuilder(path, base_url, *args, **kwargs) |
||||
|
||||
|
||||
class FlaskClient(Client): |
||||
"""Works like a regular Werkzeug test client but has some knowledge about |
||||
how Flask works to defer the cleanup of the request context stack to the |
||||
end of a ``with`` body when used in a ``with`` statement. For general |
||||
information about how to use this class refer to |
||||
:class:`werkzeug.test.Client`. |
||||
|
||||
.. versionchanged:: 0.12 |
||||
`app.test_client()` includes preset default environment, which can be |
||||
set after instantiation of the `app.test_client()` object in |
||||
`client.environ_base`. |
||||
|
||||
Basic usage is outlined in the :ref:`testing` chapter. |
||||
""" |
||||
|
||||
preserve_context = False |
||||
|
||||
def __init__(self, *args, **kwargs): |
||||
super(FlaskClient, self).__init__(*args, **kwargs) |
||||
self.environ_base = { |
||||
"REMOTE_ADDR": "127.0.0.1", |
||||
"HTTP_USER_AGENT": "werkzeug/" + werkzeug.__version__ |
||||
} |
||||
|
||||
@contextmanager |
||||
def session_transaction(self, *args, **kwargs): |
||||
"""When used in combination with a ``with`` statement this opens a |
||||
session transaction. This can be used to modify the session that |
||||
the test client uses. Once the ``with`` block is left the session is |
||||
stored back. |
||||
|
||||
:: |
||||
|
||||
with client.session_transaction() as session: |
||||
session['value'] = 42 |
||||
|
||||
Internally this is implemented by going through a temporary test |
||||
request context and since session handling could depend on |
||||
request variables this function accepts the same arguments as |
||||
:meth:`~flask.Flask.test_request_context` which are directly |
||||
passed through. |
||||
""" |
||||
if self.cookie_jar is None: |
||||
raise RuntimeError('Session transactions only make sense ' |
||||
'with cookies enabled.') |
||||
app = self.application |
||||
environ_overrides = kwargs.setdefault('environ_overrides', {}) |
||||
self.cookie_jar.inject_wsgi(environ_overrides) |
||||
outer_reqctx = _request_ctx_stack.top |
||||
with app.test_request_context(*args, **kwargs) as c: |
||||
session_interface = app.session_interface |
||||
sess = session_interface.open_session(app, c.request) |
||||
if sess is None: |
||||
raise RuntimeError('Session backend did not open a session. ' |
||||
'Check the configuration') |
||||
|
||||
# Since we have to open a new request context for the session |
||||
# handling we want to make sure that we hide out own context |
||||
# from the caller. By pushing the original request context |
||||
# (or None) on top of this and popping it we get exactly that |
||||
# behavior. It's important to not use the push and pop |
||||
# methods of the actual request context object since that would |
||||
# mean that cleanup handlers are called |
||||
_request_ctx_stack.push(outer_reqctx) |
||||
try: |
||||
yield sess |
||||
finally: |
||||
_request_ctx_stack.pop() |
||||
|
||||
resp = app.response_class() |
||||
if not session_interface.is_null_session(sess): |
||||
session_interface.save_session(app, sess, resp) |
||||
headers = resp.get_wsgi_headers(c.request.environ) |
||||
self.cookie_jar.extract_wsgi(c.request.environ, headers) |
||||
|
||||
def open(self, *args, **kwargs): |
||||
as_tuple = kwargs.pop('as_tuple', False) |
||||
buffered = kwargs.pop('buffered', False) |
||||
follow_redirects = kwargs.pop('follow_redirects', False) |
||||
|
||||
if ( |
||||
not kwargs and len(args) == 1 |
||||
and isinstance(args[0], (EnvironBuilder, dict)) |
||||
): |
||||
environ = self.environ_base.copy() |
||||
|
||||
if isinstance(args[0], EnvironBuilder): |
||||
environ.update(args[0].get_environ()) |
||||
else: |
||||
environ.update(args[0]) |
||||
|
||||
environ['flask._preserve_context'] = self.preserve_context |
||||
else: |
||||
kwargs.setdefault('environ_overrides', {}) \ |
||||
['flask._preserve_context'] = self.preserve_context |
||||
kwargs.setdefault('environ_base', self.environ_base) |
||||
builder = make_test_environ_builder( |
||||
self.application, *args, **kwargs |
||||
) |
||||
|
||||
try: |
||||
environ = builder.get_environ() |
||||
finally: |
||||
builder.close() |
||||
|
||||
return Client.open( |
||||
self, environ, |
||||
as_tuple=as_tuple, |
||||
buffered=buffered, |
||||
follow_redirects=follow_redirects |
||||
) |
||||
|
||||
def __enter__(self): |
||||
if self.preserve_context: |
||||
raise RuntimeError('Cannot nest client invocations') |
||||
self.preserve_context = True |
||||
return self |
||||
|
||||
def __exit__(self, exc_type, exc_value, tb): |
||||
self.preserve_context = False |
||||
|
||||
# on exit we want to clean up earlier. Normally the request context |
||||
# stays preserved until the next request in the same thread comes |
||||
# in. See RequestGlobals.push() for the general behavior. |
||||
top = _request_ctx_stack.top |
||||
if top is not None and top.preserved: |
||||
top.pop() |
||||
|
||||
|
||||
class FlaskCliRunner(CliRunner): |
||||
"""A :class:`~click.testing.CliRunner` for testing a Flask app's |
||||
CLI commands. Typically created using |
||||
:meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`. |
||||
""" |
||||
def __init__(self, app, **kwargs): |
||||
self.app = app |
||||
super(FlaskCliRunner, self).__init__(**kwargs) |
||||
|
||||
def invoke(self, cli=None, args=None, **kwargs): |
||||
"""Invokes a CLI command in an isolated environment. See |
||||
:meth:`CliRunner.invoke <click.testing.CliRunner.invoke>` for |
||||
full method documentation. See :ref:`testing-cli` for examples. |
||||
|
||||
If the ``obj`` argument is not given, passes an instance of |
||||
:class:`~flask.cli.ScriptInfo` that knows how to load the Flask |
||||
app being tested. |
||||
|
||||
:param cli: Command object to invoke. Default is the app's |
||||
:attr:`~flask.app.Flask.cli` group. |
||||
:param args: List of strings to invoke the command with. |
||||
|
||||
:return: a :class:`~click.testing.Result` object. |
||||
""" |
||||
if cli is None: |
||||
cli = self.app.cli |
||||
|
||||
if 'obj' not in kwargs: |
||||
kwargs['obj'] = ScriptInfo(create_app=lambda: self.app) |
||||
|
||||
return super(FlaskCliRunner, self).invoke(cli, args, **kwargs) |
@ -0,0 +1,158 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.views |
||||
~~~~~~~~~~~ |
||||
|
||||
This module provides class-based views inspired by the ones in Django. |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
from .globals import request |
||||
from ._compat import with_metaclass |
||||
|
||||
|
||||
http_method_funcs = frozenset(['get', 'post', 'head', 'options', |
||||
'delete', 'put', 'trace', 'patch']) |
||||
|
||||
|
||||
class View(object): |
||||
"""Alternative way to use view functions. A subclass has to implement |
||||
:meth:`dispatch_request` which is called with the view arguments from |
||||
the URL routing system. If :attr:`methods` is provided the methods |
||||
do not have to be passed to the :meth:`~flask.Flask.add_url_rule` |
||||
method explicitly:: |
||||
|
||||
class MyView(View): |
||||
methods = ['GET'] |
||||
|
||||
def dispatch_request(self, name): |
||||
return 'Hello %s!' % name |
||||
|
||||
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview')) |
||||
|
||||
When you want to decorate a pluggable view you will have to either do that |
||||
when the view function is created (by wrapping the return value of |
||||
:meth:`as_view`) or you can use the :attr:`decorators` attribute:: |
||||
|
||||
class SecretView(View): |
||||
methods = ['GET'] |
||||
decorators = [superuser_required] |
||||
|
||||
def dispatch_request(self): |
||||
... |
||||
|
||||
The decorators stored in the decorators list are applied one after another |
||||
when the view function is created. Note that you can *not* use the class |
||||
based decorators since those would decorate the view class and not the |
||||
generated view function! |
||||
""" |
||||
|
||||
#: A list of methods this view can handle. |
||||
methods = None |
||||
|
||||
#: Setting this disables or force-enables the automatic options handling. |
||||
provide_automatic_options = None |
||||
|
||||
#: The canonical way to decorate class-based views is to decorate the |
||||
#: return value of as_view(). However since this moves parts of the |
||||
#: logic from the class declaration to the place where it's hooked |
||||
#: into the routing system. |
||||
#: |
||||
#: You can place one or more decorators in this list and whenever the |
||||
#: view function is created the result is automatically decorated. |
||||
#: |
||||
#: .. versionadded:: 0.8 |
||||
decorators = () |
||||
|
||||
def dispatch_request(self): |
||||
"""Subclasses have to override this method to implement the |
||||
actual view function code. This method is called with all |
||||
the arguments from the URL rule. |
||||
""" |
||||
raise NotImplementedError() |
||||
|
||||
@classmethod |
||||
def as_view(cls, name, *class_args, **class_kwargs): |
||||
"""Converts the class into an actual view function that can be used |
||||
with the routing system. Internally this generates a function on the |
||||
fly which will instantiate the :class:`View` on each request and call |
||||
the :meth:`dispatch_request` method on it. |
||||
|
||||
The arguments passed to :meth:`as_view` are forwarded to the |
||||
constructor of the class. |
||||
""" |
||||
def view(*args, **kwargs): |
||||
self = view.view_class(*class_args, **class_kwargs) |
||||
return self.dispatch_request(*args, **kwargs) |
||||
|
||||
if cls.decorators: |
||||
view.__name__ = name |
||||
view.__module__ = cls.__module__ |
||||
for decorator in cls.decorators: |
||||
view = decorator(view) |
||||
|
||||
# We attach the view class to the view function for two reasons: |
||||
# first of all it allows us to easily figure out what class-based |
||||
# view this thing came from, secondly it's also used for instantiating |
||||
# the view class so you can actually replace it with something else |
||||
# for testing purposes and debugging. |
||||
view.view_class = cls |
||||
view.__name__ = name |
||||
view.__doc__ = cls.__doc__ |
||||
view.__module__ = cls.__module__ |
||||
view.methods = cls.methods |
||||
view.provide_automatic_options = cls.provide_automatic_options |
||||
return view |
||||
|
||||
|
||||
class MethodViewType(type): |
||||
"""Metaclass for :class:`MethodView` that determines what methods the view |
||||
defines. |
||||
""" |
||||
|
||||
def __init__(cls, name, bases, d): |
||||
super(MethodViewType, cls).__init__(name, bases, d) |
||||
|
||||
if 'methods' not in d: |
||||
methods = set() |
||||
|
||||
for key in http_method_funcs: |
||||
if hasattr(cls, key): |
||||
methods.add(key.upper()) |
||||
|
||||
# If we have no method at all in there we don't want to add a |
||||
# method list. This is for instance the case for the base class |
||||
# or another subclass of a base method view that does not introduce |
||||
# new methods. |
||||
if methods: |
||||
cls.methods = methods |
||||
|
||||
|
||||
class MethodView(with_metaclass(MethodViewType, View)): |
||||
"""A class-based view that dispatches request methods to the corresponding |
||||
class methods. For example, if you implement a ``get`` method, it will be |
||||
used to handle ``GET`` requests. :: |
||||
|
||||
class CounterAPI(MethodView): |
||||
def get(self): |
||||
return session.get('counter', 0) |
||||
|
||||
def post(self): |
||||
session['counter'] = session.get('counter', 0) + 1 |
||||
return 'OK' |
||||
|
||||
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter')) |
||||
""" |
||||
|
||||
def dispatch_request(self, *args, **kwargs): |
||||
meth = getattr(self, request.method.lower(), None) |
||||
|
||||
# If the request method is HEAD and we don't have a handler for it |
||||
# retry with GET. |
||||
if meth is None and request.method == 'HEAD': |
||||
meth = getattr(self, 'get', None) |
||||
|
||||
assert meth is not None, 'Unimplemented method %r' % request.method |
||||
return meth(*args, **kwargs) |
@ -0,0 +1,216 @@ |
||||
# -*- coding: utf-8 -*- |
||||
""" |
||||
flask.wrappers |
||||
~~~~~~~~~~~~~~ |
||||
|
||||
Implements the WSGI wrappers (request and response). |
||||
|
||||
:copyright: © 2010 by the Pallets team. |
||||
:license: BSD, see LICENSE for more details. |
||||
""" |
||||
|
||||
from werkzeug.exceptions import BadRequest |
||||
from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase |
||||
|
||||
from flask import json |
||||
from flask.globals import current_app |
||||
|
||||
|
||||
class JSONMixin(object): |
||||
"""Common mixin for both request and response objects to provide JSON |
||||
parsing capabilities. |
||||
|
||||
.. versionadded:: 1.0 |
||||
""" |
||||
|
||||
_cached_json = (Ellipsis, Ellipsis) |
||||
|
||||
@property |
||||
def is_json(self): |
||||
"""Check if the mimetype indicates JSON data, either |
||||
:mimetype:`application/json` or :mimetype:`application/*+json`. |
||||
|
||||
.. versionadded:: 0.11 |
||||
""" |
||||
mt = self.mimetype |
||||
return ( |
||||
mt == 'application/json' |
||||
or (mt.startswith('application/')) and mt.endswith('+json') |
||||
) |
||||
|
||||
@property |
||||
def json(self): |
||||
"""This will contain the parsed JSON data if the mimetype indicates |
||||
JSON (:mimetype:`application/json`, see :meth:`is_json`), otherwise it |
||||
will be ``None``. |
||||
""" |
||||
return self.get_json() |
||||
|
||||
def _get_data_for_json(self, cache): |
||||
return self.get_data(cache=cache) |
||||
|
||||
def get_json(self, force=False, silent=False, cache=True): |
||||
"""Parse and return the data as JSON. If the mimetype does not |
||||
indicate JSON (:mimetype:`application/json`, see |
||||
:meth:`is_json`), this returns ``None`` unless ``force`` is |
||||
true. If parsing fails, :meth:`on_json_loading_failed` is called |
||||
and its return value is used as the return value. |
||||
|
||||
:param force: Ignore the mimetype and always try to parse JSON. |
||||
:param silent: Silence parsing errors and return ``None`` |
||||
instead. |
||||
:param cache: Store the parsed JSON to return for subsequent |
||||
calls. |
||||
""" |
||||
if cache and self._cached_json[silent] is not Ellipsis: |
||||
return self._cached_json[silent] |
||||
|
||||
if not (force or self.is_json): |
||||
return None |
||||
|
||||
data = self._get_data_for_json(cache=cache) |
||||
|
||||
try: |
||||
rv = json.loads(data) |
||||
except ValueError as e: |
||||
if silent: |
||||
rv = None |
||||
if cache: |
||||
normal_rv, _ = self._cached_json |
||||
self._cached_json = (normal_rv, rv) |
||||
else: |
||||
rv = self.on_json_loading_failed(e) |
||||
if cache: |
||||
_, silent_rv = self._cached_json |
||||
self._cached_json = (rv, silent_rv) |
||||
else: |
||||
if cache: |
||||
self._cached_json = (rv, rv) |
||||
|
||||
return rv |
||||
|
||||
def on_json_loading_failed(self, e): |
||||
"""Called if :meth:`get_json` parsing fails and isn't silenced. If |
||||
this method returns a value, it is used as the return value for |
||||
:meth:`get_json`. The default implementation raises a |
||||
:class:`BadRequest` exception. |
||||
|
||||
.. versionchanged:: 0.10 |
||||
Raise a :exc:`BadRequest` error instead of returning an error |
||||
message as JSON. If you want that behavior you can add it by |
||||
subclassing. |
||||
|
||||
.. versionadded:: 0.8 |
||||
""" |
||||
if current_app is not None and current_app.debug: |
||||
raise BadRequest('Failed to decode JSON object: {0}'.format(e)) |
||||
|
||||
raise BadRequest() |
||||
|
||||
|
||||
class Request(RequestBase, JSONMixin): |
||||
"""The request object used by default in Flask. Remembers the |
||||
matched endpoint and view arguments. |
||||
|
||||
It is what ends up as :class:`~flask.request`. If you want to replace |
||||
the request object used you can subclass this and set |
||||
:attr:`~flask.Flask.request_class` to your subclass. |
||||
|
||||
The request object is a :class:`~werkzeug.wrappers.Request` subclass and |
||||
provides all of the attributes Werkzeug defines plus a few Flask |
||||
specific ones. |
||||
""" |
||||
|
||||
#: The internal URL rule that matched the request. This can be |
||||
#: useful to inspect which methods are allowed for the URL from |
||||
#: a before/after handler (``request.url_rule.methods``) etc. |
||||
#: Though if the request's method was invalid for the URL rule, |
||||
#: the valid list is available in ``routing_exception.valid_methods`` |
||||
#: instead (an attribute of the Werkzeug exception :exc:`~werkzeug.exceptions.MethodNotAllowed`) |
||||
#: because the request was never internally bound. |
||||
#: |
||||
#: .. versionadded:: 0.6 |
||||
url_rule = None |
||||
|
||||
#: A dict of view arguments that matched the request. If an exception |
||||
#: happened when matching, this will be ``None``. |
||||
view_args = None |
||||
|
||||
#: If matching the URL failed, this is the exception that will be |
||||
#: raised / was raised as part of the request handling. This is |
||||
#: usually a :exc:`~werkzeug.exceptions.NotFound` exception or |
||||
#: something similar. |
||||
routing_exception = None |
||||
|
||||
@property |
||||
def max_content_length(self): |
||||
"""Read-only view of the ``MAX_CONTENT_LENGTH`` config key.""" |
||||
if current_app: |
||||
return current_app.config['MAX_CONTENT_LENGTH'] |
||||
|
||||
@property |
||||
def endpoint(self): |
||||
"""The endpoint that matched the request. This in combination with |
||||
:attr:`view_args` can be used to reconstruct the same or a |
||||
modified URL. If an exception happened when matching, this will |
||||
be ``None``. |
||||
""" |
||||
if self.url_rule is not None: |
||||
return self.url_rule.endpoint |
||||
|
||||
@property |
||||
def blueprint(self): |
||||
"""The name of the current blueprint""" |
||||
if self.url_rule and '.' in self.url_rule.endpoint: |
||||
return self.url_rule.endpoint.rsplit('.', 1)[0] |
||||
|
||||
def _load_form_data(self): |
||||
RequestBase._load_form_data(self) |
||||
|
||||
# In debug mode we're replacing the files multidict with an ad-hoc |
||||
# subclass that raises a different error for key errors. |
||||
if ( |
||||
current_app |
||||
and current_app.debug |
||||
and self.mimetype != 'multipart/form-data' |
||||
and not self.files |
||||
): |
||||
from .debughelpers import attach_enctype_error_multidict |
||||
attach_enctype_error_multidict(self) |
||||
|
||||
|
||||
class Response(ResponseBase, JSONMixin): |
||||
"""The response object that is used by default in Flask. Works like the |
||||
response object from Werkzeug but is set to have an HTML mimetype by |
||||
default. Quite often you don't have to create this object yourself because |
||||
:meth:`~flask.Flask.make_response` will take care of that for you. |
||||
|
||||
If you want to replace the response object used you can subclass this and |
||||
set :attr:`~flask.Flask.response_class` to your subclass. |
||||
|
||||
.. versionchanged:: 1.0 |
||||
JSON support is added to the response, like the request. This is useful |
||||
when testing to get the test client response data as JSON. |
||||
|
||||
.. versionchanged:: 1.0 |
||||
|
||||
Added :attr:`max_cookie_size`. |
||||
""" |
||||
|
||||
default_mimetype = 'text/html' |
||||
|
||||
def _get_data_for_json(self, cache): |
||||
return self.get_data() |
||||
|
||||
@property |
||||
def max_cookie_size(self): |
||||
"""Read-only view of the :data:`MAX_COOKIE_SIZE` config key. |
||||
|
||||
See :attr:`~werkzeug.wrappers.BaseResponse.max_cookie_size` in |
||||
Werkzeug's docs. |
||||
""" |
||||
if current_app: |
||||
return current_app.config['MAX_COOKIE_SIZE'] |
||||
|
||||
# return Werkzeug's default when not in an app context |
||||
return super(Response, self).max_cookie_size |
@ -0,0 +1 @@ |
||||
pip |
@ -0,0 +1,23 @@ |
||||
2009-2018 (c) Benoît Chesneau <benoitc@e-engura.org> |
||||
2009-2015 (c) Paul J. Davis <paul.joseph.davis@gmail.com> |
||||
|
||||
Permission is hereby granted, free of charge, to any person |
||||
obtaining a copy of this software and associated documentation |
||||
files (the "Software"), to deal in the Software without |
||||
restriction, including without limitation the rights to use, |
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell |
||||
copies of the Software, and to permit persons to whom the |
||||
Software is furnished to do so, subject to the following |
||||
conditions: |
||||
|
||||
The above copyright notice and this permission notice shall be |
||||
included in all copies or substantial portions of the Software. |
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES |
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT |
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, |
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
||||
OTHER DEALINGS IN THE SOFTWARE. |
@ -0,0 +1,111 @@ |
||||
Metadata-Version: 2.1 |
||||
Name: gunicorn |
||||
Version: 19.8.1 |
||||
Summary: WSGI HTTP Server for UNIX |
||||
Home-page: http://gunicorn.org |
||||
Author: Benoit Chesneau |
||||
Author-email: benoitc@e-engura.com |
||||
License: MIT |
||||
Platform: UNKNOWN |
||||
Classifier: Development Status :: 4 - Beta |
||||
Classifier: Environment :: Other Environment |
||||
Classifier: Intended Audience :: Developers |
||||
Classifier: License :: OSI Approved :: MIT License |
||||
Classifier: Operating System :: MacOS :: MacOS X |
||||
Classifier: Operating System :: POSIX |
||||
Classifier: Programming Language :: Python |
||||
Classifier: Programming Language :: Python :: 2 |
||||
Classifier: Programming Language :: Python :: 2.6 |
||||
Classifier: Programming Language :: Python :: 2.7 |
||||
Classifier: Programming Language :: Python :: 3 |
||||
Classifier: Programming Language :: Python :: 3.2 |
||||
Classifier: Programming Language :: Python :: 3.3 |
||||
Classifier: Programming Language :: Python :: 3.4 |
||||
Classifier: Programming Language :: Python :: 3.5 |
||||
Classifier: Programming Language :: Python :: 3.6 |
||||
Classifier: Topic :: Internet |
||||
Classifier: Topic :: Utilities |
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules |
||||
Classifier: Topic :: Internet :: WWW/HTTP |
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI |
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server |
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content |
||||
Requires-Python: >=2.6, !=3.0.*, !=3.1.* |
||||
Provides-Extra: gevent |
||||
Provides-Extra: tornado |
||||
Provides-Extra: eventlet |
||||
Provides-Extra: gthread |
||||
Provides-Extra: eventlet |
||||
Requires-Dist: eventlet (>=0.9.7); extra == 'eventlet' |
||||
Provides-Extra: gevent |
||||
Requires-Dist: gevent (>=0.13); extra == 'gevent' |
||||
Provides-Extra: gthread |
||||
Provides-Extra: tornado |
||||
Requires-Dist: tornado (>=0.2); extra == 'tornado' |
||||
|
||||
Gunicorn |
||||
-------- |
||||
|
||||
.. image:: https://img.shields.io/pypi/v/gunicorn.svg?style=flat |
||||
:alt: PyPI version |
||||
:target: https://pypi.python.org/pypi/gunicorn |
||||
|
||||
.. image:: https://img.shields.io/pypi/pyversions/gunicorn.svg |
||||
:alt: Supported Python versions |
||||
:target: https://pypi.python.org/pypi/gunicorn |
||||
|
||||
.. image:: https://travis-ci.org/benoitc/gunicorn.svg?branch=master |
||||
:alt: Build Status |
||||
:target: https://travis-ci.org/benoitc/gunicorn |
||||
|
||||
Gunicorn 'Green Unicorn' is a Python WSGI HTTP Server for UNIX. It's a pre-fork |
||||
worker model ported from Ruby's Unicorn_ project. The Gunicorn server is broadly |
||||
compatible with various web frameworks, simply implemented, light on server |
||||
resource usage, and fairly speedy. |
||||
|
||||
Feel free to join us in `#gunicorn`_ on Freenode_. |
||||
|
||||
Documentation |
||||
------------- |
||||
|
||||
The documentation is hosted at http://docs.gunicorn.org. |
||||
|
||||
Installation |
||||
------------ |
||||
|
||||
Gunicorn requires **Python 2.x >= 2.6** or **Python 3.x >= 3.2**. |
||||
|
||||
Install from PyPI:: |
||||
|
||||
$ pip install gunicorn |
||||
|
||||
|
||||
Usage |
||||
----- |
||||
|
||||
Basic usage:: |
||||
|
||||
$ gunicorn [OPTIONS] APP_MODULE |
||||
|
||||
Where ``APP_MODULE`` is of the pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. The |
||||
module name can be a full dotted path. The variable name refers to a WSGI |
||||
callable that should be found in the specified module. |
||||
|
||||
Example with test app:: |
||||
|
||||
$ cd examples |
||||
$ gunicorn --workers=2 test:app |
||||
|
||||
|
||||
License |
||||
------- |
||||
|
||||
Gunicorn is released under the MIT License. See the LICENSE_ file for more |
||||
details. |
||||
|
||||
.. _Unicorn: https://bogomips.org/unicorn/ |
||||
.. _`#gunicorn`: https://webchat.freenode.net/?channels=gunicorn |
||||
.. _Freenode: https://freenode.net/ |
||||
.. _LICENSE: https://github.com/benoitc/gunicorn/blob/master/LICENSE |
||||
|
||||
|
@ -0,0 +1,88 @@ |
||||
gunicorn/__init__.py,sha256=Y1BlVaRVNjIKXqSzcqTNHo9dq2eG6kgPx5jWJ3h5GRE,255 |
||||
gunicorn/_compat.py,sha256=5cXb6vMfVzInDq-AHNyZfsK-UG5NetDn62nPfqylHSU,9355 |
||||
gunicorn/arbiter.py,sha256=AbJNSFnTmx9Qd-vZAqEH3y5fz8ydPmyli_BERNIwdyE,20158 |
||||
gunicorn/argparse_compat.py,sha256=gsHDGwo4BSJWHdiaEXy0Emr96NKC0LDYmK5nB7PE8Qc,87791 |
||||
gunicorn/config.py,sha256=HDoWZ0JyoPzl2WW6hlDatDevvhfah4oS_j7tOq8Pa7E,53417 |
||||
gunicorn/debug.py,sha256=UUw-eteLEm_OQ98D6K3XtDjx4Dya2H35zdiu8z7F7uc,2289 |
||||
gunicorn/errors.py,sha256=JlDBjag90gMiRwLHG3xzEJzDOntSl1iM32R277-U6j0,919 |
||||
gunicorn/glogging.py,sha256=PMdoe6hdBQWKGlnP4lXdBph6b1ygD0kknxkDsmNIVSU,15554 |
||||
gunicorn/pidfile.py,sha256=_69tsfF1aHklrMrJe2sHERovMduRByVTv99my7yQ874,2357 |
||||
gunicorn/reloader.py,sha256=fh4J7w_DxWaFuFd3G4RyOgDFs1C1lrd0w7jOXItSu5g,3791 |
||||
gunicorn/selectors.py,sha256=14_UESrpE3AQKXWKeeAUG9vBTzJ0yTYDGtEo6xOtlDY,18997 |
||||
gunicorn/six.py,sha256=6N-6RCENPfBtMpN5UmgDfDKmJebbbuPu_Dk3Zf8ngww,27344 |
||||
gunicorn/sock.py,sha256=gX2FsdsOGMCtSHbDXn7lsiYYYRc3roQklIJLip1oZQo,6019 |
||||
gunicorn/systemd.py,sha256=ffhv17cdv-hDeFAJi1eAVtJskkVciV6cQU75Q2oplqg,1362 |
||||
gunicorn/util.py,sha256=Ns_a8Pf7MkaEi0KbV3GsP9aVQ2a_S45EjSE6Iyg2tYU,16229 |
||||
gunicorn/app/__init__.py,sha256=GuqstqdkizeV4HRbd8aGMBn0Q8IDOyRU1wMMNqNe5GY,127 |
||||
gunicorn/app/base.py,sha256=LKxyziLMPNlK3qm6dPMieELBqfLfmwBFnn9SB-KBogE,6652 |
||||
gunicorn/app/pasterapp.py,sha256=AGzZnUpcpw8O8KrizxTgdJBZ4lQdrHgsV0gdx7FVTs8,6046 |
||||
gunicorn/app/wsgiapp.py,sha256=ny71qjegQHl_bGMjNfq_aemPrmGEpH2bMRIdph6bj4Q,1870 |
||||
gunicorn/http/__init__.py,sha256=b4TF3x5F0VYOPTOeNYwRGR1EYHBaPMhZRMoNeuD5-n0,277 |
||||
gunicorn/http/_sendfile.py,sha256=Eqd-s3HlvLuyfGjqaH_Jk72cAtEV8hQv5tb1M1AqcBU,2217 |
||||
gunicorn/http/body.py,sha256=MmlZpj_6oRPj3oPVSMQZr0X3KH6ikntxDnVcLgfekZs,7345 |
||||
gunicorn/http/errors.py,sha256=sNjF2lm4m2qyZ9l95_U33FRxPXpxXzjnZyYqWS-hxd4,2850 |
||||
gunicorn/http/message.py,sha256=G5po0upwbrTyIggb_IEAItIjSi_aDoWYLPQ62o8pOI4,12257 |
||||
gunicorn/http/parser.py,sha256=IRMvp0veP4wL8Z4vgNV72CPydCNPdNNIy9u-DlDvvSo,1294 |
||||
gunicorn/http/unreader.py,sha256=s4kDW5euiJPsDuHzCqFXUtHCApqIxpShb9dtAyjJw9Y,2019 |
||||
gunicorn/http/wsgi.py,sha256=_5zYFl5HGKrPpEMrEnsLyPreOJcTgaYzsEBuJkEOmko,12852 |
||||
gunicorn/instrument/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 |
||||
gunicorn/instrument/statsd.py,sha256=5xueDuTZMFtmS8ayGT4sU_OyB9qkEv4Agk-eJwAmhJM,4434 |
||||
gunicorn/workers/__init__.py,sha256=Z57G1WjnZDCG52C8PgiXF4mKRKqlv81b2GHkhOJiO6A,774 |
||||
gunicorn/workers/_gaiohttp.py,sha256=CFKiyLNqWqemhDvDovb-JqMRTMNz50gZUwwOpIjrpHw,5071 |
||||
gunicorn/workers/async.py,sha256=54VkS3S_wrFD7v3jInhFfkeBhaPnV5UN-cu-i5MoXkc,5575 |
||||
gunicorn/workers/base.py,sha256=eYdcy2EPlydcjyi2CpgeU0tqlJpz3_kt-RbGhoamvQ8,9126 |
||||
gunicorn/workers/gaiohttp.py,sha256=3rhXky6APkhI0D9nwXlogLo_Jd9v98CiEuCy9inzCU4,823 |
||||
gunicorn/workers/geventlet.py,sha256=L7N2bizKQw8VXb02teu1_wYVG5hwt9SSaQn7J0kSKyI,4253 |
||||
gunicorn/workers/ggevent.py,sha256=hzx2kOZP13yVBz-EBthoTgjalOeRKfoUk5XchmRKzDM,7407 |
||||
gunicorn/workers/gthread.py,sha256=HIoWuylHZfH1wlSh4eZ8wxo1kQ5abvdUaFfKfIsgQvI,12009 |
||||
gunicorn/workers/gtornado.py,sha256=LtBWnEX7MNpeGX-YmlBoV1_OOhjkdytFmt1pzOlRPZk,5044 |
||||
gunicorn/workers/sync.py,sha256=_vd1JATNLG4MgJppNJG5KWBIzLGYqRzhEAQVz9H11LI,7153 |
||||
gunicorn/workers/workertmp.py,sha256=6QINPBrriLvezgkC_hclOOeXLi_owMt_SOA5KPEIN-A,1459 |
||||
gunicorn-19.8.1.dist-info/LICENSE.txt,sha256=eJ_hG5Lhyr-890S1_MOSyb1cZ5hgOk6J-SW2M3mE0d8,1136 |
||||
gunicorn-19.8.1.dist-info/METADATA,sha256=w9Od3TWYwIXsx_CAyLy5OZ1h5v9C_T0_mhK9psl1x5Q,3388 |
||||
gunicorn-19.8.1.dist-info/RECORD,, |
||||
gunicorn-19.8.1.dist-info/WHEEL,sha256=J3CsTk7Mf2JNUyhImI-mjX-fmI4oDjyiXgWT4qgZiCE,110 |
||||
gunicorn-19.8.1.dist-info/entry_points.txt,sha256=XeFINKRdSUKwJwaVSolO24PuV_YeO71IMF-rOra5JO8,184 |
||||
gunicorn-19.8.1.dist-info/top_level.txt,sha256=cdMaa2yhxb8do-WioY9qRHUCfwf55YztjwQCncaInoE,9 |
||||
../../../bin/gunicorn,sha256=3n02jiI_1KSuGRVnArdMBsMwXwNk9qYNxest7n5NCtA,226 |
||||
../../../bin/gunicorn_paster,sha256=VpfTHNYpEn2RVEjyLdmq1-_lArYY2joHAgAlGrbIjeg,228 |
||||
gunicorn-19.8.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 |
||||
gunicorn/debug.pyc,, |
||||
gunicorn/workers/sync.pyc,, |
||||
gunicorn/http/wsgi.pyc,, |
||||
gunicorn/workers/async.pyc,, |
||||
gunicorn/workers/workertmp.pyc,, |
||||
gunicorn/http/message.pyc,, |
||||
gunicorn/instrument/__init__.pyc,, |
||||
gunicorn/http/unreader.pyc,, |
||||
gunicorn/app/__init__.pyc,, |
||||
gunicorn/systemd.pyc,, |
||||
gunicorn/config.pyc,, |
||||
gunicorn/workers/base.pyc,, |
||||
gunicorn/app/wsgiapp.pyc,, |
||||
gunicorn/workers/gaiohttp.pyc,, |
||||
gunicorn/app/base.pyc,, |
||||
gunicorn/arbiter.pyc,, |
||||
gunicorn/workers/ggevent.pyc,, |
||||
gunicorn/http/errors.pyc,, |
||||
gunicorn/workers/gtornado.pyc,, |
||||
gunicorn/six.pyc,, |
||||
gunicorn/http/__init__.pyc,, |
||||
gunicorn/pidfile.pyc,, |
||||
gunicorn/workers/gthread.pyc,, |
||||
gunicorn/_compat.pyc,, |
||||
gunicorn/errors.pyc,, |
||||
gunicorn/http/body.pyc,, |
||||
gunicorn/instrument/statsd.pyc,, |
||||
gunicorn/__init__.pyc,, |
||||
gunicorn/http/parser.pyc,, |
||||
gunicorn/selectors.pyc,, |
||||
gunicorn/sock.pyc,, |
||||
gunicorn/reloader.pyc,, |
||||
gunicorn/workers/__init__.pyc,, |
||||
gunicorn/argparse_compat.pyc,, |
||||
gunicorn/glogging.pyc,, |
||||
gunicorn/http/_sendfile.pyc,, |
||||
gunicorn/workers/geventlet.pyc,, |
||||
gunicorn/util.pyc,, |
||||
gunicorn/app/pasterapp.pyc,, |
@ -0,0 +1,6 @@ |
||||
Wheel-Version: 1.0 |
||||
Generator: bdist_wheel (0.31.0) |
||||
Root-Is-Purelib: true |
||||
Tag: py2-none-any |
||||
Tag: py3-none-any |
||||
|
@ -0,0 +1,8 @@ |
||||
|
||||
[console_scripts] |
||||
gunicorn=gunicorn.app.wsgiapp:run |
||||
gunicorn_paster=gunicorn.app.pasterapp:run |
||||
|
||||
[paste.server_runner] |
||||
main=gunicorn.app.pasterapp:paste_server |
||||
|
@ -0,0 +1 @@ |
||||
gunicorn |
@ -0,0 +1,8 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
version_info = (19, 8, 1) |
||||
__version__ = ".".join([str(v) for v in version_info]) |
||||
SERVER_SOFTWARE = "gunicorn/%s" % __version__ |
@ -0,0 +1,298 @@ |
||||
import sys |
||||
|
||||
from gunicorn import six |
||||
|
||||
PY26 = (sys.version_info[:2] == (2, 6)) |
||||
PY33 = (sys.version_info >= (3, 3)) |
||||
|
||||
|
||||
def _check_if_pyc(fname): |
||||
"""Return True if the extension is .pyc, False if .py |
||||
and None if otherwise""" |
||||
from imp import find_module |
||||
from os.path import realpath, dirname, basename, splitext |
||||
|
||||
# Normalize the file-path for the find_module() |
||||
filepath = realpath(fname) |
||||
dirpath = dirname(filepath) |
||||
module_name = splitext(basename(filepath))[0] |
||||
|
||||
# Validate and fetch |
||||
try: |
||||
fileobj, fullpath, (_, _, pytype) = find_module(module_name, [dirpath]) |
||||
except ImportError: |
||||
raise IOError("Cannot find config file. " |
||||
"Path maybe incorrect! : {0}".format(filepath)) |
||||
return pytype, fileobj, fullpath |
||||
|
||||
|
||||
def _get_codeobj(pyfile): |
||||
""" Returns the code object, given a python file """ |
||||
from imp import PY_COMPILED, PY_SOURCE |
||||
|
||||
result, fileobj, fullpath = _check_if_pyc(pyfile) |
||||
|
||||
# WARNING: |
||||
# fp.read() can blowup if the module is extremely large file. |
||||
# Lookout for overflow errors. |
||||
try: |
||||
data = fileobj.read() |
||||
finally: |
||||
fileobj.close() |
||||
|
||||
# This is a .pyc file. Treat accordingly. |
||||
if result is PY_COMPILED: |
||||
# .pyc format is as follows: |
||||
# 0 - 4 bytes: Magic number, which changes with each create of .pyc file. |
||||
# First 2 bytes change with each marshal of .pyc file. Last 2 bytes is "\r\n". |
||||
# 4 - 8 bytes: Datetime value, when the .py was last changed. |
||||
# 8 - EOF: Marshalled code object data. |
||||
# So to get code object, just read the 8th byte onwards till EOF, and |
||||
# UN-marshal it. |
||||
import marshal |
||||
code_obj = marshal.loads(data[8:]) |
||||
|
||||
elif result is PY_SOURCE: |
||||
# This is a .py file. |
||||
code_obj = compile(data, fullpath, 'exec') |
||||
|
||||
else: |
||||
# Unsupported extension |
||||
raise Exception("Input file is unknown format: {0}".format(fullpath)) |
||||
|
||||
# Return code object |
||||
return code_obj |
||||
|
||||
if six.PY3: |
||||
def execfile_(fname, *args): |
||||
if fname.endswith(".pyc"): |
||||
code = _get_codeobj(fname) |
||||
else: |
||||
code = compile(open(fname, 'rb').read(), fname, 'exec') |
||||
return six.exec_(code, *args) |
||||
|
||||
def bytes_to_str(b): |
||||
if isinstance(b, six.text_type): |
||||
return b |
||||
return str(b, 'latin1') |
||||
|
||||
import urllib.parse |
||||
|
||||
def unquote_to_wsgi_str(string): |
||||
return _unquote_to_bytes(string).decode('latin-1') |
||||
|
||||
_unquote_to_bytes = urllib.parse.unquote_to_bytes |
||||
|
||||
else: |
||||
def execfile_(fname, *args): |
||||
""" Overriding PY2 execfile() implementation to support .pyc files """ |
||||
if fname.endswith(".pyc"): |
||||
return six.exec_(_get_codeobj(fname), *args) |
||||
return execfile(fname, *args) |
||||
|
||||
def bytes_to_str(s): |
||||
if isinstance(s, unicode): |
||||
return s.encode('utf-8') |
||||
return s |
||||
|
||||
import urllib |
||||
unquote_to_wsgi_str = urllib.unquote |
||||
|
||||
|
||||
# The following code adapted from trollius.py33_exceptions |
||||
def _wrap_error(exc, mapping, key): |
||||
if key not in mapping: |
||||
return |
||||
new_err_cls = mapping[key] |
||||
new_err = new_err_cls(*exc.args) |
||||
|
||||
# raise a new exception with the original traceback |
||||
six.reraise(new_err_cls, new_err, |
||||
exc.__traceback__ if hasattr(exc, '__traceback__') else sys.exc_info()[2]) |
||||
|
||||
if PY33: |
||||
import builtins |
||||
|
||||
BlockingIOError = builtins.BlockingIOError |
||||
BrokenPipeError = builtins.BrokenPipeError |
||||
ChildProcessError = builtins.ChildProcessError |
||||
ConnectionRefusedError = builtins.ConnectionRefusedError |
||||
ConnectionResetError = builtins.ConnectionResetError |
||||
InterruptedError = builtins.InterruptedError |
||||
ConnectionAbortedError = builtins.ConnectionAbortedError |
||||
PermissionError = builtins.PermissionError |
||||
FileNotFoundError = builtins.FileNotFoundError |
||||
ProcessLookupError = builtins.ProcessLookupError |
||||
|
||||
def wrap_error(func, *args, **kw): |
||||
return func(*args, **kw) |
||||
else: |
||||
import errno |
||||
import select |
||||
import socket |
||||
|
||||
class BlockingIOError(OSError): |
||||
pass |
||||
|
||||
class BrokenPipeError(OSError): |
||||
pass |
||||
|
||||
class ChildProcessError(OSError): |
||||
pass |
||||
|
||||
class ConnectionRefusedError(OSError): |
||||
pass |
||||
|
||||
class InterruptedError(OSError): |
||||
pass |
||||
|
||||
class ConnectionResetError(OSError): |
||||
pass |
||||
|
||||
class ConnectionAbortedError(OSError): |
||||
pass |
||||
|
||||
class PermissionError(OSError): |
||||
pass |
||||
|
||||
class FileNotFoundError(OSError): |
||||
pass |
||||
|
||||
class ProcessLookupError(OSError): |
||||
pass |
||||
|
||||
_MAP_ERRNO = { |
||||
errno.EACCES: PermissionError, |
||||
errno.EAGAIN: BlockingIOError, |
||||
errno.EALREADY: BlockingIOError, |
||||
errno.ECHILD: ChildProcessError, |
||||
errno.ECONNABORTED: ConnectionAbortedError, |
||||
errno.ECONNREFUSED: ConnectionRefusedError, |
||||
errno.ECONNRESET: ConnectionResetError, |
||||
errno.EINPROGRESS: BlockingIOError, |
||||
errno.EINTR: InterruptedError, |
||||
errno.ENOENT: FileNotFoundError, |
||||
errno.EPERM: PermissionError, |
||||
errno.EPIPE: BrokenPipeError, |
||||
errno.ESHUTDOWN: BrokenPipeError, |
||||
errno.EWOULDBLOCK: BlockingIOError, |
||||
errno.ESRCH: ProcessLookupError, |
||||
} |
||||
|
||||
def wrap_error(func, *args, **kw): |
||||
""" |
||||
Wrap socket.error, IOError, OSError, select.error to raise new specialized |
||||
exceptions of Python 3.3 like InterruptedError (PEP 3151). |
||||
""" |
||||
try: |
||||
return func(*args, **kw) |
||||
except (socket.error, IOError, OSError) as exc: |
||||
if hasattr(exc, 'winerror'): |
||||
_wrap_error(exc, _MAP_ERRNO, exc.winerror) |
||||
# _MAP_ERRNO does not contain all Windows errors. |
||||
# For some errors like "file not found", exc.errno should |
||||
# be used (ex: ENOENT). |
||||
_wrap_error(exc, _MAP_ERRNO, exc.errno) |
||||
raise |
||||
except select.error as exc: |
||||
if exc.args: |
||||
_wrap_error(exc, _MAP_ERRNO, exc.args[0]) |
||||
raise |
||||
|
||||
if PY26: |
||||
from urlparse import ( |
||||
_parse_cache, MAX_CACHE_SIZE, clear_cache, _splitnetloc, SplitResult, |
||||
scheme_chars, |
||||
) |
||||
|
||||
def urlsplit(url, scheme='', allow_fragments=True): |
||||
"""Parse a URL into 5 components: |
||||
<scheme>://<netloc>/<path>?<query>#<fragment> |
||||
Return a 5-tuple: (scheme, netloc, path, query, fragment). |
||||
Note that we don't break the components up in smaller bits |
||||
(e.g. netloc is a single string) and we don't expand % escapes.""" |
||||
allow_fragments = bool(allow_fragments) |
||||
key = url, scheme, allow_fragments, type(url), type(scheme) |
||||
cached = _parse_cache.get(key, None) |
||||
if cached: |
||||
return cached |
||||
if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth |
||||
clear_cache() |
||||
netloc = query = fragment = '' |
||||
i = url.find(':') |
||||
if i > 0: |
||||
if url[:i] == 'http': # optimize the common case |
||||
scheme = url[:i].lower() |
||||
url = url[i+1:] |
||||
if url[:2] == '//': |
||||
netloc, url = _splitnetloc(url, 2) |
||||
if (('[' in netloc and ']' not in netloc) or |
||||
(']' in netloc and '[' not in netloc)): |
||||
raise ValueError("Invalid IPv6 URL") |
||||
if allow_fragments and '#' in url: |
||||
url, fragment = url.split('#', 1) |
||||
if '?' in url: |
||||
url, query = url.split('?', 1) |
||||
v = SplitResult(scheme, netloc, url, query, fragment) |
||||
_parse_cache[key] = v |
||||
return v |
||||
for c in url[:i]: |
||||
if c not in scheme_chars: |
||||
break |
||||
else: |
||||
# make sure "url" is not actually a port number (in which case |
||||
# "scheme" is really part of the path) |
||||
rest = url[i+1:] |
||||
if not rest or any(c not in '0123456789' for c in rest): |
||||
# not a port number |
||||
scheme, url = url[:i].lower(), rest |
||||
|
||||
if url[:2] == '//': |
||||
netloc, url = _splitnetloc(url, 2) |
||||
if (('[' in netloc and ']' not in netloc) or |
||||
(']' in netloc and '[' not in netloc)): |
||||
raise ValueError("Invalid IPv6 URL") |
||||
if allow_fragments and '#' in url: |
||||
url, fragment = url.split('#', 1) |
||||
if '?' in url: |
||||
url, query = url.split('?', 1) |
||||
v = SplitResult(scheme, netloc, url, query, fragment) |
||||
_parse_cache[key] = v |
||||
return v |
||||
|
||||
else: |
||||
from gunicorn.six.moves.urllib.parse import urlsplit |
||||
|
||||
|
||||
import inspect |
||||
|
||||
if hasattr(inspect, 'signature'): |
||||
positionals = ( |
||||
inspect.Parameter.POSITIONAL_ONLY, |
||||
inspect.Parameter.POSITIONAL_OR_KEYWORD, |
||||
) |
||||
|
||||
def get_arity(f): |
||||
sig = inspect.signature(f) |
||||
arity = 0 |
||||
|
||||
for param in sig.parameters.values(): |
||||
if param.kind in positionals: |
||||
arity += 1 |
||||
|
||||
return arity |
||||
else: |
||||
def get_arity(f): |
||||
return len(inspect.getargspec(f)[0]) |
||||
|
||||
|
||||
try: |
||||
import html |
||||
|
||||
def html_escape(s): |
||||
return html.escape(s) |
||||
except ImportError: |
||||
import cgi |
||||
|
||||
def html_escape(s): |
||||
return cgi.escape(s, quote=True) |
@ -0,0 +1,4 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
@ -0,0 +1,223 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
from __future__ import print_function |
||||
|
||||
import os |
||||
import sys |
||||
import traceback |
||||
|
||||
from gunicorn._compat import execfile_ |
||||
from gunicorn import util |
||||
from gunicorn.arbiter import Arbiter |
||||
from gunicorn.config import Config, get_default_config_file |
||||
from gunicorn import debug |
||||
|
||||
class BaseApplication(object): |
||||
""" |
||||
An application interface for configuring and loading |
||||
the various necessities for any given web framework. |
||||
""" |
||||
def __init__(self, usage=None, prog=None): |
||||
self.usage = usage |
||||
self.cfg = None |
||||
self.callable = None |
||||
self.prog = prog |
||||
self.logger = None |
||||
self.do_load_config() |
||||
|
||||
def do_load_config(self): |
||||
""" |
||||
Loads the configuration |
||||
""" |
||||
try: |
||||
self.load_default_config() |
||||
self.load_config() |
||||
except Exception as e: |
||||
print("\nError: %s" % str(e), file=sys.stderr) |
||||
sys.stderr.flush() |
||||
sys.exit(1) |
||||
|
||||
def load_default_config(self): |
||||
# init configuration |
||||
self.cfg = Config(self.usage, prog=self.prog) |
||||
|
||||
def init(self, parser, opts, args): |
||||
raise NotImplementedError |
||||
|
||||
def load(self): |
||||
raise NotImplementedError |
||||
|
||||
def load_config(self): |
||||
""" |
||||
This method is used to load the configuration from one or several input(s). |
||||
Custom Command line, configuration file. |
||||
You have to override this method in your class. |
||||
""" |
||||
raise NotImplementedError |
||||
|
||||
def reload(self): |
||||
self.do_load_config() |
||||
if self.cfg.spew: |
||||
debug.spew() |
||||
|
||||
def wsgi(self): |
||||
if self.callable is None: |
||||
self.callable = self.load() |
||||
return self.callable |
||||
|
||||
def run(self): |
||||
try: |
||||
Arbiter(self).run() |
||||
except RuntimeError as e: |
||||
print("\nError: %s\n" % e, file=sys.stderr) |
||||
sys.stderr.flush() |
||||
sys.exit(1) |
||||
|
||||
|
||||
class Application(BaseApplication): |
||||
|
||||
# 'init' and 'load' methods are implemented by WSGIApplication. |
||||
# pylint: disable=abstract-method |
||||
|
||||
def chdir(self): |
||||
# chdir to the configured path before loading, |
||||
# default is the current dir |
||||
os.chdir(self.cfg.chdir) |
||||
|
||||
# add the path to sys.path |
||||
if self.cfg.chdir not in sys.path: |
||||
sys.path.insert(0, self.cfg.chdir) |
||||
|
||||
def get_config_from_filename(self, filename): |
||||
|
||||
if not os.path.exists(filename): |
||||
raise RuntimeError("%r doesn't exist" % filename) |
||||
|
||||
cfg = { |
||||
"__builtins__": __builtins__, |
||||
"__name__": "__config__", |
||||
"__file__": filename, |
||||
"__doc__": None, |
||||
"__package__": None |
||||
} |
||||
try: |
||||
execfile_(filename, cfg, cfg) |
||||
except Exception: |
||||
print("Failed to read config file: %s" % filename, file=sys.stderr) |
||||
traceback.print_exc() |
||||
sys.stderr.flush() |
||||
sys.exit(1) |
||||
|
||||
return cfg |
||||
|
||||
def get_config_from_module_name(self, module_name): |
||||
return vars(util.import_module(module_name)) |
||||
|
||||
def load_config_from_module_name_or_filename(self, location): |
||||
""" |
||||
Loads the configuration file: the file is a python file, otherwise raise an RuntimeError |
||||
Exception or stop the process if the configuration file contains a syntax error. |
||||
""" |
||||
|
||||
if location.startswith("python:"): |
||||
module_name = location[len("python:"):] |
||||
cfg = self.get_config_from_module_name(module_name) |
||||
else: |
||||
if location.startswith("file:"): |
||||
filename = location[len("file:"):] |
||||
else: |
||||
filename = location |
||||
cfg = self.get_config_from_filename(filename) |
||||
|
||||
for k, v in cfg.items(): |
||||
# Ignore unknown names |
||||
if k not in self.cfg.settings: |
||||
continue |
||||
try: |
||||
self.cfg.set(k.lower(), v) |
||||
except: |
||||
print("Invalid value for %s: %s\n" % (k, v), file=sys.stderr) |
||||
sys.stderr.flush() |
||||
raise |
||||
|
||||
return cfg |
||||
|
||||
def load_config_from_file(self, filename): |
||||
return self.load_config_from_module_name_or_filename(location=filename) |
||||
|
||||
def load_config(self): |
||||
# parse console args |
||||
parser = self.cfg.parser() |
||||
args = parser.parse_args() |
||||
|
||||
# optional settings from apps |
||||
cfg = self.init(parser, args, args.args) |
||||
|
||||
# set up import paths and follow symlinks |
||||
self.chdir() |
||||
|
||||
# Load up the any app specific configuration |
||||
if cfg: |
||||
for k, v in cfg.items(): |
||||
self.cfg.set(k.lower(), v) |
||||
|
||||
env_args = parser.parse_args(self.cfg.get_cmd_args_from_env()) |
||||
|
||||
if args.config: |
||||
self.load_config_from_file(args.config) |
||||
elif env_args.config: |
||||
self.load_config_from_file(env_args.config) |
||||
else: |
||||
default_config = get_default_config_file() |
||||
if default_config is not None: |
||||
self.load_config_from_file(default_config) |
||||
|
||||
# Load up environment configuration |
||||
for k, v in vars(env_args).items(): |
||||
if v is None: |
||||
continue |
||||
if k == "args": |
||||
continue |
||||
self.cfg.set(k.lower(), v) |
||||
|
||||
# Lastly, update the configuration with any command line settings. |
||||
for k, v in vars(args).items(): |
||||
if v is None: |
||||
continue |
||||
if k == "args": |
||||
continue |
||||
self.cfg.set(k.lower(), v) |
||||
|
||||
# current directory might be changed by the config now |
||||
# set up import paths and follow symlinks |
||||
self.chdir() |
||||
|
||||
def run(self): |
||||
if self.cfg.check_config: |
||||
try: |
||||
self.load() |
||||
except: |
||||
msg = "\nError while loading the application:\n" |
||||
print(msg, file=sys.stderr) |
||||
traceback.print_exc() |
||||
sys.stderr.flush() |
||||
sys.exit(1) |
||||
sys.exit(0) |
||||
|
||||
if self.cfg.spew: |
||||
debug.spew() |
||||
|
||||
if self.cfg.daemon: |
||||
util.daemonize(self.cfg.enable_stdio_inheritance) |
||||
|
||||
# set python paths |
||||
if self.cfg.pythonpath: |
||||
paths = self.cfg.pythonpath.split(",") |
||||
for path in paths: |
||||
pythonpath = os.path.abspath(path) |
||||
if pythonpath not in sys.path: |
||||
sys.path.insert(0, pythonpath) |
||||
|
||||
super(Application, self).run() |
@ -0,0 +1,209 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
from __future__ import print_function |
||||
|
||||
# pylint: skip-file |
||||
|
||||
import os |
||||
import pkg_resources |
||||
import sys |
||||
|
||||
try: |
||||
import configparser as ConfigParser |
||||
except ImportError: |
||||
import ConfigParser |
||||
|
||||
from paste.deploy import loadapp, loadwsgi |
||||
SERVER = loadwsgi.SERVER |
||||
|
||||
from gunicorn.app.base import Application |
||||
from gunicorn.config import Config, get_default_config_file |
||||
from gunicorn import util |
||||
|
||||
|
||||
def _has_logging_config(paste_file): |
||||
cfg_parser = ConfigParser.ConfigParser() |
||||
cfg_parser.read([paste_file]) |
||||
return cfg_parser.has_section('loggers') |
||||
|
||||
|
||||
def paste_config(gconfig, config_url, relative_to, global_conf=None): |
||||
# add entry to pkg_resources |
||||
sys.path.insert(0, relative_to) |
||||
pkg_resources.working_set.add_entry(relative_to) |
||||
|
||||
config_url = config_url.split('#')[0] |
||||
cx = loadwsgi.loadcontext(SERVER, config_url, relative_to=relative_to, |
||||
global_conf=global_conf) |
||||
gc, lc = cx.global_conf.copy(), cx.local_conf.copy() |
||||
cfg = {} |
||||
|
||||
host, port = lc.pop('host', ''), lc.pop('port', '') |
||||
if host and port: |
||||
cfg['bind'] = '%s:%s' % (host, port) |
||||
elif host: |
||||
cfg['bind'] = host.split(',') |
||||
|
||||
cfg['default_proc_name'] = gc.get('__file__') |
||||
|
||||
# init logging configuration |
||||
config_file = config_url.split(':')[1] |
||||
if _has_logging_config(config_file): |
||||
cfg.setdefault('logconfig', config_file) |
||||
|
||||
for k, v in gc.items(): |
||||
if k not in gconfig.settings: |
||||
continue |
||||
cfg[k] = v |
||||
|
||||
for k, v in lc.items(): |
||||
if k not in gconfig.settings: |
||||
continue |
||||
cfg[k] = v |
||||
|
||||
return cfg |
||||
|
||||
|
||||
def load_pasteapp(config_url, relative_to, global_conf=None): |
||||
return loadapp(config_url, relative_to=relative_to, |
||||
global_conf=global_conf) |
||||
|
||||
class PasterBaseApplication(Application): |
||||
gcfg = None |
||||
|
||||
def app_config(self): |
||||
return paste_config(self.cfg, self.cfgurl, self.relpath, |
||||
global_conf=self.gcfg) |
||||
|
||||
def load_config(self): |
||||
super(PasterBaseApplication, self).load_config() |
||||
|
||||
# reload logging conf |
||||
if hasattr(self, "cfgfname"): |
||||
parser = ConfigParser.ConfigParser() |
||||
parser.read([self.cfgfname]) |
||||
if parser.has_section('loggers'): |
||||
from logging.config import fileConfig |
||||
config_file = os.path.abspath(self.cfgfname) |
||||
fileConfig(config_file, dict(__file__=config_file, |
||||
here=os.path.dirname(config_file))) |
||||
|
||||
|
||||
class PasterApplication(PasterBaseApplication): |
||||
|
||||
def init(self, parser, opts, args): |
||||
if len(args) != 1: |
||||
parser.error("No application name specified.") |
||||
|
||||
cwd = util.getcwd() |
||||
cfgfname = os.path.normpath(os.path.join(cwd, args[0])) |
||||
cfgfname = os.path.abspath(cfgfname) |
||||
if not os.path.exists(cfgfname): |
||||
parser.error("Config file not found: %s" % cfgfname) |
||||
|
||||
self.cfgurl = 'config:%s' % cfgfname |
||||
self.relpath = os.path.dirname(cfgfname) |
||||
self.cfgfname = cfgfname |
||||
|
||||
sys.path.insert(0, self.relpath) |
||||
pkg_resources.working_set.add_entry(self.relpath) |
||||
|
||||
return self.app_config() |
||||
|
||||
def load(self): |
||||
# chdir to the configured path before loading, |
||||
# default is the current dir |
||||
os.chdir(self.cfg.chdir) |
||||
|
||||
return load_pasteapp(self.cfgurl, self.relpath, global_conf=self.gcfg) |
||||
|
||||
|
||||
class PasterServerApplication(PasterBaseApplication): |
||||
|
||||
def __init__(self, app, gcfg=None, host="127.0.0.1", port=None, **kwargs): |
||||
# pylint: disable=super-init-not-called |
||||
self.cfg = Config() |
||||
self.gcfg = gcfg # need to hold this for app_config |
||||
self.app = app |
||||
self.callable = None |
||||
|
||||
gcfg = gcfg or {} |
||||
cfgfname = gcfg.get("__file__") |
||||
if cfgfname is not None: |
||||
self.cfgurl = 'config:%s' % cfgfname |
||||
self.relpath = os.path.dirname(cfgfname) |
||||
self.cfgfname = cfgfname |
||||
|
||||
cfg = kwargs.copy() |
||||
|
||||
if port and not host.startswith("unix:"): |
||||
bind = "%s:%s" % (host, port) |
||||
else: |
||||
bind = host |
||||
cfg["bind"] = bind.split(',') |
||||
|
||||
if gcfg: |
||||
for k, v in gcfg.items(): |
||||
cfg[k] = v |
||||
cfg["default_proc_name"] = cfg['__file__'] |
||||
|
||||
try: |
||||
for k, v in cfg.items(): |
||||
if k.lower() in self.cfg.settings and v is not None: |
||||
self.cfg.set(k.lower(), v) |
||||
except Exception as e: |
||||
print("\nConfig error: %s" % str(e), file=sys.stderr) |
||||
sys.stderr.flush() |
||||
sys.exit(1) |
||||
|
||||
if cfg.get("config"): |
||||
self.load_config_from_file(cfg["config"]) |
||||
else: |
||||
default_config = get_default_config_file() |
||||
if default_config is not None: |
||||
self.load_config_from_file(default_config) |
||||
|
||||
def load(self): |
||||
return self.app |
||||
|
||||
|
||||
def run(): |
||||
"""\ |
||||
The ``gunicorn_paster`` command for launching Paster compatible |
||||
applications like Pylons or Turbogears2 |
||||
""" |
||||
util.warn("""This command is deprecated. |
||||
|
||||
You should now use the `--paste` option. Ex.: |
||||
|
||||
gunicorn --paste development.ini |
||||
""") |
||||
|
||||
from gunicorn.app.pasterapp import PasterApplication |
||||
PasterApplication("%(prog)s [OPTIONS] pasteconfig.ini").run() |
||||
|
||||
|
||||
def paste_server(app, gcfg=None, host="127.0.0.1", port=None, **kwargs): |
||||
"""\ |
||||
A paster server. |
||||
|
||||
Then entry point in your paster ini file should looks like this: |
||||
|
||||
[server:main] |
||||
use = egg:gunicorn#main |
||||
host = 127.0.0.1 |
||||
port = 5000 |
||||
|
||||
""" |
||||
|
||||
util.warn("""This command is deprecated. |
||||
|
||||
You should now use the `--paste` option. Ex.: |
||||
|
||||
gunicorn --paste development.ini |
||||
""") |
||||
|
||||
from gunicorn.app.pasterapp import PasterServerApplication |
||||
PasterServerApplication(app, gcfg=gcfg, host=host, port=port, **kwargs).run() |
@ -0,0 +1,65 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import os |
||||
|
||||
from gunicorn.errors import ConfigError |
||||
from gunicorn.app.base import Application |
||||
from gunicorn import util |
||||
|
||||
|
||||
class WSGIApplication(Application): |
||||
def init(self, parser, opts, args): |
||||
if opts.paste: |
||||
app_name = 'main' |
||||
path = opts.paste |
||||
if '#' in path: |
||||
path, app_name = path.split('#') |
||||
path = os.path.abspath(os.path.normpath( |
||||
os.path.join(util.getcwd(), path))) |
||||
|
||||
if not os.path.exists(path): |
||||
raise ConfigError("%r not found" % path) |
||||
|
||||
# paste application, load the config |
||||
self.cfgurl = 'config:%s#%s' % (path, app_name) |
||||
self.relpath = os.path.dirname(path) |
||||
|
||||
from .pasterapp import paste_config |
||||
return paste_config(self.cfg, self.cfgurl, self.relpath) |
||||
|
||||
if len(args) < 1: |
||||
parser.error("No application module specified.") |
||||
|
||||
self.cfg.set("default_proc_name", args[0]) |
||||
self.app_uri = args[0] |
||||
|
||||
def load_wsgiapp(self): |
||||
# load the app |
||||
return util.import_app(self.app_uri) |
||||
|
||||
def load_pasteapp(self): |
||||
# load the paste app |
||||
from .pasterapp import load_pasteapp |
||||
return load_pasteapp(self.cfgurl, self.relpath, global_conf=self.cfg.paste_global_conf) |
||||
|
||||
def load(self): |
||||
if self.cfg.paste is not None: |
||||
return self.load_pasteapp() |
||||
else: |
||||
return self.load_wsgiapp() |
||||
|
||||
|
||||
def run(): |
||||
"""\ |
||||
The ``gunicorn`` command line runner for launching Gunicorn with |
||||
generic WSGI applications. |
||||
""" |
||||
from gunicorn.app.wsgiapp import WSGIApplication |
||||
WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run() |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
run() |
@ -0,0 +1,646 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
from __future__ import print_function |
||||
|
||||
import errno |
||||
import os |
||||
import random |
||||
import select |
||||
import signal |
||||
import sys |
||||
import time |
||||
import traceback |
||||
|
||||
from gunicorn.errors import HaltServer, AppImportError |
||||
from gunicorn.pidfile import Pidfile |
||||
from gunicorn import sock, systemd, util |
||||
|
||||
from gunicorn import __version__, SERVER_SOFTWARE |
||||
|
||||
|
||||
class Arbiter(object): |
||||
""" |
||||
Arbiter maintain the workers processes alive. It launches or |
||||
kills them if needed. It also manages application reloading |
||||
via SIGHUP/USR2. |
||||
""" |
||||
|
||||
# A flag indicating if a worker failed to |
||||
# to boot. If a worker process exist with |
||||
# this error code, the arbiter will terminate. |
||||
WORKER_BOOT_ERROR = 3 |
||||
|
||||
# A flag indicating if an application failed to be loaded |
||||
APP_LOAD_ERROR = 4 |
||||
|
||||
START_CTX = {} |
||||
|
||||
LISTENERS = [] |
||||
WORKERS = {} |
||||
PIPE = [] |
||||
|
||||
# I love dynamic languages |
||||
SIG_QUEUE = [] |
||||
SIGNALS = [getattr(signal, "SIG%s" % x) |
||||
for x in "HUP QUIT INT TERM TTIN TTOU USR1 USR2 WINCH".split()] |
||||
SIG_NAMES = dict( |
||||
(getattr(signal, name), name[3:].lower()) for name in dir(signal) |
||||
if name[:3] == "SIG" and name[3] != "_" |
||||
) |
||||
|
||||
def __init__(self, app): |
||||
os.environ["SERVER_SOFTWARE"] = SERVER_SOFTWARE |
||||
|
||||
self._num_workers = None |
||||
self._last_logged_active_worker_count = None |
||||
self.log = None |
||||
|
||||
self.setup(app) |
||||
|
||||
self.pidfile = None |
||||
self.systemd = False |
||||
self.worker_age = 0 |
||||
self.reexec_pid = 0 |
||||
self.master_pid = 0 |
||||
self.master_name = "Master" |
||||
|
||||
cwd = util.getcwd() |
||||
|
||||
args = sys.argv[:] |
||||
args.insert(0, sys.executable) |
||||
|
||||
# init start context |
||||
self.START_CTX = { |
||||
"args": args, |
||||
"cwd": cwd, |
||||
0: sys.executable |
||||
} |
||||
|
||||
def _get_num_workers(self): |
||||
return self._num_workers |
||||
|
||||
def _set_num_workers(self, value): |
||||
old_value = self._num_workers |
||||
self._num_workers = value |
||||
self.cfg.nworkers_changed(self, value, old_value) |
||||
num_workers = property(_get_num_workers, _set_num_workers) |
||||
|
||||
def setup(self, app): |
||||
self.app = app |
||||
self.cfg = app.cfg |
||||
|
||||
if self.log is None: |
||||
self.log = self.cfg.logger_class(app.cfg) |
||||
|
||||
# reopen files |
||||
if 'GUNICORN_FD' in os.environ: |
||||
self.log.reopen_files() |
||||
|
||||
self.worker_class = self.cfg.worker_class |
||||
self.address = self.cfg.address |
||||
self.num_workers = self.cfg.workers |
||||
self.timeout = self.cfg.timeout |
||||
self.proc_name = self.cfg.proc_name |
||||
|
||||
self.log.debug('Current configuration:\n{0}'.format( |
||||
'\n'.join( |
||||
' {0}: {1}'.format(config, value.value) |
||||
for config, value |
||||
in sorted(self.cfg.settings.items(), |
||||
key=lambda setting: setting[1])))) |
||||
|
||||
# set enviroment' variables |
||||
if self.cfg.env: |
||||
for k, v in self.cfg.env.items(): |
||||
os.environ[k] = v |
||||
|
||||
if self.cfg.preload_app: |
||||
self.app.wsgi() |
||||
|
||||
def start(self): |
||||
"""\ |
||||
Initialize the arbiter. Start listening and set pidfile if needed. |
||||
""" |
||||
self.log.info("Starting gunicorn %s", __version__) |
||||
|
||||
if 'GUNICORN_PID' in os.environ: |
||||
self.master_pid = int(os.environ.get('GUNICORN_PID')) |
||||
self.proc_name = self.proc_name + ".2" |
||||
self.master_name = "Master.2" |
||||
|
||||
self.pid = os.getpid() |
||||
if self.cfg.pidfile is not None: |
||||
pidname = self.cfg.pidfile |
||||
if self.master_pid != 0: |
||||
pidname += ".2" |
||||
self.pidfile = Pidfile(pidname) |
||||
self.pidfile.create(self.pid) |
||||
self.cfg.on_starting(self) |
||||
|
||||
self.init_signals() |
||||
|
||||
if not self.LISTENERS: |
||||
fds = None |
||||
listen_fds = systemd.listen_fds() |
||||
if listen_fds: |
||||
self.systemd = True |
||||
fds = range(systemd.SD_LISTEN_FDS_START, |
||||
systemd.SD_LISTEN_FDS_START + listen_fds) |
||||
|
||||
elif self.master_pid: |
||||
fds = [] |
||||
for fd in os.environ.pop('GUNICORN_FD').split(','): |
||||
fds.append(int(fd)) |
||||
|
||||
self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds) |
||||
|
||||
listeners_str = ",".join([str(l) for l in self.LISTENERS]) |
||||
self.log.debug("Arbiter booted") |
||||
self.log.info("Listening at: %s (%s)", listeners_str, self.pid) |
||||
self.log.info("Using worker: %s", self.cfg.worker_class_str) |
||||
|
||||
# check worker class requirements |
||||
if hasattr(self.worker_class, "check_config"): |
||||
self.worker_class.check_config(self.cfg, self.log) |
||||
|
||||
self.cfg.when_ready(self) |
||||
|
||||
def init_signals(self): |
||||
"""\ |
||||
Initialize master signal handling. Most of the signals |
||||
are queued. Child signals only wake up the master. |
||||
""" |
||||
# close old PIPE |
||||
for p in self.PIPE: |
||||
os.close(p) |
||||
|
||||
# initialize the pipe |
||||
self.PIPE = pair = os.pipe() |
||||
for p in pair: |
||||
util.set_non_blocking(p) |
||||
util.close_on_exec(p) |
||||
|
||||
self.log.close_on_exec() |
||||
|
||||
# initialize all signals |
||||
for s in self.SIGNALS: |
||||
signal.signal(s, self.signal) |
||||
signal.signal(signal.SIGCHLD, self.handle_chld) |
||||
|
||||
def signal(self, sig, frame): |
||||
if len(self.SIG_QUEUE) < 5: |
||||
self.SIG_QUEUE.append(sig) |
||||
self.wakeup() |
||||
|
||||
def run(self): |
||||
"Main master loop." |
||||
self.start() |
||||
util._setproctitle("master [%s]" % self.proc_name) |
||||
|
||||
try: |
||||
self.manage_workers() |
||||
|
||||
while True: |
||||
self.maybe_promote_master() |
||||
|
||||
sig = self.SIG_QUEUE.pop(0) if self.SIG_QUEUE else None |
||||
if sig is None: |
||||
self.sleep() |
||||
self.murder_workers() |
||||
self.manage_workers() |
||||
continue |
||||
|
||||
if sig not in self.SIG_NAMES: |
||||
self.log.info("Ignoring unknown signal: %s", sig) |
||||
continue |
||||
|
||||
signame = self.SIG_NAMES.get(sig) |
||||
handler = getattr(self, "handle_%s" % signame, None) |
||||
if not handler: |
||||
self.log.error("Unhandled signal: %s", signame) |
||||
continue |
||||
self.log.info("Handling signal: %s", signame) |
||||
handler() |
||||
self.wakeup() |
||||
except StopIteration: |
||||
self.halt() |
||||
except KeyboardInterrupt: |
||||
self.halt() |
||||
except HaltServer as inst: |
||||
self.halt(reason=inst.reason, exit_status=inst.exit_status) |
||||
except SystemExit: |
||||
raise |
||||
except Exception: |
||||
self.log.info("Unhandled exception in main loop", |
||||
exc_info=True) |
||||
self.stop(False) |
||||
if self.pidfile is not None: |
||||
self.pidfile.unlink() |
||||
sys.exit(-1) |
||||
|
||||
def handle_chld(self, sig, frame): |
||||
"SIGCHLD handling" |
||||
self.reap_workers() |
||||
self.wakeup() |
||||
|
||||
def handle_hup(self): |
||||
"""\ |
||||
HUP handling. |
||||
- Reload configuration |
||||
- Start the new worker processes with a new configuration |
||||
- Gracefully shutdown the old worker processes |
||||
""" |
||||
self.log.info("Hang up: %s", self.master_name) |
||||
self.reload() |
||||
|
||||
def handle_term(self): |
||||
"SIGTERM handling" |
||||
raise StopIteration |
||||
|
||||
def handle_int(self): |
||||
"SIGINT handling" |
||||
self.stop(False) |
||||
raise StopIteration |
||||
|
||||
def handle_quit(self): |
||||
"SIGQUIT handling" |
||||
self.stop(False) |
||||
raise StopIteration |
||||
|
||||
def handle_ttin(self): |
||||
"""\ |
||||
SIGTTIN handling. |
||||
Increases the number of workers by one. |
||||
""" |
||||
self.num_workers += 1 |
||||
self.manage_workers() |
||||
|
||||
def handle_ttou(self): |
||||
"""\ |
||||
SIGTTOU handling. |
||||
Decreases the number of workers by one. |
||||
""" |
||||
if self.num_workers <= 1: |
||||
return |
||||
self.num_workers -= 1 |
||||
self.manage_workers() |
||||
|
||||
def handle_usr1(self): |
||||
"""\ |
||||
SIGUSR1 handling. |
||||
Kill all workers by sending them a SIGUSR1 |
||||
""" |
||||
self.log.reopen_files() |
||||
self.kill_workers(signal.SIGUSR1) |
||||
|
||||
def handle_usr2(self): |
||||
"""\ |
||||
SIGUSR2 handling. |
||||
Creates a new master/worker set as a slave of the current |
||||
master without affecting old workers. Use this to do live |
||||
deployment with the ability to backout a change. |
||||
""" |
||||
self.reexec() |
||||
|
||||
def handle_winch(self): |
||||
"""SIGWINCH handling""" |
||||
if self.cfg.daemon: |
||||
self.log.info("graceful stop of workers") |
||||
self.num_workers = 0 |
||||
self.kill_workers(signal.SIGTERM) |
||||
else: |
||||
self.log.debug("SIGWINCH ignored. Not daemonized") |
||||
|
||||
def maybe_promote_master(self): |
||||
if self.master_pid == 0: |
||||
return |
||||
|
||||
if self.master_pid != os.getppid(): |
||||
self.log.info("Master has been promoted.") |
||||
# reset master infos |
||||
self.master_name = "Master" |
||||
self.master_pid = 0 |
||||
self.proc_name = self.cfg.proc_name |
||||
del os.environ['GUNICORN_PID'] |
||||
# rename the pidfile |
||||
if self.pidfile is not None: |
||||
self.pidfile.rename(self.cfg.pidfile) |
||||
# reset proctitle |
||||
util._setproctitle("master [%s]" % self.proc_name) |
||||
|
||||
def wakeup(self): |
||||
"""\ |
||||
Wake up the arbiter by writing to the PIPE |
||||
""" |
||||
try: |
||||
os.write(self.PIPE[1], b'.') |
||||
except IOError as e: |
||||
if e.errno not in [errno.EAGAIN, errno.EINTR]: |
||||
raise |
||||
|
||||
def halt(self, reason=None, exit_status=0): |
||||
""" halt arbiter """ |
||||
self.stop() |
||||
self.log.info("Shutting down: %s", self.master_name) |
||||
if reason is not None: |
||||
self.log.info("Reason: %s", reason) |
||||
if self.pidfile is not None: |
||||
self.pidfile.unlink() |
||||
self.cfg.on_exit(self) |
||||
sys.exit(exit_status) |
||||
|
||||
def sleep(self): |
||||
"""\ |
||||
Sleep until PIPE is readable or we timeout. |
||||
A readable PIPE means a signal occurred. |
||||
""" |
||||
try: |
||||
ready = select.select([self.PIPE[0]], [], [], 1.0) |
||||
if not ready[0]: |
||||
return |
||||
while os.read(self.PIPE[0], 1): |
||||
pass |
||||
except (select.error, OSError) as e: |
||||
# TODO: select.error is a subclass of OSError since Python 3.3. |
||||
error_number = getattr(e, 'errno', e.args[0]) |
||||
if error_number not in [errno.EAGAIN, errno.EINTR]: |
||||
raise |
||||
except KeyboardInterrupt: |
||||
sys.exit() |
||||
|
||||
def stop(self, graceful=True): |
||||
"""\ |
||||
Stop workers |
||||
|
||||
:attr graceful: boolean, If True (the default) workers will be |
||||
killed gracefully (ie. trying to wait for the current connection) |
||||
""" |
||||
|
||||
unlink = self.reexec_pid == self.master_pid == 0 and not self.systemd |
||||
sock.close_sockets(self.LISTENERS, unlink) |
||||
|
||||
self.LISTENERS = [] |
||||
sig = signal.SIGTERM |
||||
if not graceful: |
||||
sig = signal.SIGQUIT |
||||
limit = time.time() + self.cfg.graceful_timeout |
||||
# instruct the workers to exit |
||||
self.kill_workers(sig) |
||||
# wait until the graceful timeout |
||||
while self.WORKERS and time.time() < limit: |
||||
time.sleep(0.1) |
||||
|
||||
self.kill_workers(signal.SIGKILL) |
||||
|
||||
def reexec(self): |
||||
"""\ |
||||
Relaunch the master and workers. |
||||
""" |
||||
if self.reexec_pid != 0: |
||||
self.log.warning("USR2 signal ignored. Child exists.") |
||||
return |
||||
|
||||
if self.master_pid != 0: |
||||
self.log.warning("USR2 signal ignored. Parent exists.") |
||||
return |
||||
|
||||
master_pid = os.getpid() |
||||
self.reexec_pid = os.fork() |
||||
if self.reexec_pid != 0: |
||||
return |
||||
|
||||
self.cfg.pre_exec(self) |
||||
|
||||
environ = self.cfg.env_orig.copy() |
||||
environ['GUNICORN_PID'] = str(master_pid) |
||||
|
||||
if self.systemd: |
||||
environ['LISTEN_PID'] = str(os.getpid()) |
||||
environ['LISTEN_FDS'] = str(len(self.LISTENERS)) |
||||
else: |
||||
environ['GUNICORN_FD'] = ','.join( |
||||
str(l.fileno()) for l in self.LISTENERS) |
||||
|
||||
os.chdir(self.START_CTX['cwd']) |
||||
|
||||
# exec the process using the original environment |
||||
os.execvpe(self.START_CTX[0], self.START_CTX['args'], environ) |
||||
|
||||
def reload(self): |
||||
old_address = self.cfg.address |
||||
|
||||
# reset old environment |
||||
for k in self.cfg.env: |
||||
if k in self.cfg.env_orig: |
||||
# reset the key to the value it had before |
||||
# we launched gunicorn |
||||
os.environ[k] = self.cfg.env_orig[k] |
||||
else: |
||||
# delete the value set by gunicorn |
||||
try: |
||||
del os.environ[k] |
||||
except KeyError: |
||||
pass |
||||
|
||||
# reload conf |
||||
self.app.reload() |
||||
self.setup(self.app) |
||||
|
||||
# reopen log files |
||||
self.log.reopen_files() |
||||
|
||||
# do we need to change listener ? |
||||
if old_address != self.cfg.address: |
||||
# close all listeners |
||||
for l in self.LISTENERS: |
||||
l.close() |
||||
# init new listeners |
||||
self.LISTENERS = sock.create_sockets(self.cfg, self.log) |
||||
listeners_str = ",".join([str(l) for l in self.LISTENERS]) |
||||
self.log.info("Listening at: %s", listeners_str) |
||||
|
||||
# do some actions on reload |
||||
self.cfg.on_reload(self) |
||||
|
||||
# unlink pidfile |
||||
if self.pidfile is not None: |
||||
self.pidfile.unlink() |
||||
|
||||
# create new pidfile |
||||
if self.cfg.pidfile is not None: |
||||
self.pidfile = Pidfile(self.cfg.pidfile) |
||||
self.pidfile.create(self.pid) |
||||
|
||||
# set new proc_name |
||||
util._setproctitle("master [%s]" % self.proc_name) |
||||
|
||||
# spawn new workers |
||||
for _ in range(self.cfg.workers): |
||||
self.spawn_worker() |
||||
|
||||
# manage workers |
||||
self.manage_workers() |
||||
|
||||
def murder_workers(self): |
||||
"""\ |
||||
Kill unused/idle workers |
||||
""" |
||||
if not self.timeout: |
||||
return |
||||
workers = list(self.WORKERS.items()) |
||||
for (pid, worker) in workers: |
||||
try: |
||||
if time.time() - worker.tmp.last_update() <= self.timeout: |
||||
continue |
||||
except (OSError, ValueError): |
||||
continue |
||||
|
||||
if not worker.aborted: |
||||
self.log.critical("WORKER TIMEOUT (pid:%s)", pid) |
||||
worker.aborted = True |
||||
self.kill_worker(pid, signal.SIGABRT) |
||||
else: |
||||
self.kill_worker(pid, signal.SIGKILL) |
||||
|
||||
def reap_workers(self): |
||||
"""\ |
||||
Reap workers to avoid zombie processes |
||||
""" |
||||
try: |
||||
while True: |
||||
wpid, status = os.waitpid(-1, os.WNOHANG) |
||||
if not wpid: |
||||
break |
||||
if self.reexec_pid == wpid: |
||||
self.reexec_pid = 0 |
||||
else: |
||||
# A worker was terminated. If the termination reason was |
||||
# that it could not boot, we'll shut it down to avoid |
||||
# infinite start/stop cycles. |
||||
exitcode = status >> 8 |
||||
if exitcode == self.WORKER_BOOT_ERROR: |
||||
reason = "Worker failed to boot." |
||||
raise HaltServer(reason, self.WORKER_BOOT_ERROR) |
||||
if exitcode == self.APP_LOAD_ERROR: |
||||
reason = "App failed to load." |
||||
raise HaltServer(reason, self.APP_LOAD_ERROR) |
||||
|
||||
worker = self.WORKERS.pop(wpid, None) |
||||
if not worker: |
||||
continue |
||||
worker.tmp.close() |
||||
self.cfg.child_exit(self, worker) |
||||
except OSError as e: |
||||
if e.errno != errno.ECHILD: |
||||
raise |
||||
|
||||
def manage_workers(self): |
||||
"""\ |
||||
Maintain the number of workers by spawning or killing |
||||
as required. |
||||
""" |
||||
if len(self.WORKERS.keys()) < self.num_workers: |
||||
self.spawn_workers() |
||||
|
||||
workers = self.WORKERS.items() |
||||
workers = sorted(workers, key=lambda w: w[1].age) |
||||
while len(workers) > self.num_workers: |
||||
(pid, _) = workers.pop(0) |
||||
self.kill_worker(pid, signal.SIGTERM) |
||||
|
||||
active_worker_count = len(workers) |
||||
if self._last_logged_active_worker_count != active_worker_count: |
||||
self._last_logged_active_worker_count = active_worker_count |
||||
self.log.debug("{0} workers".format(active_worker_count), |
||||
extra={"metric": "gunicorn.workers", |
||||
"value": active_worker_count, |
||||
"mtype": "gauge"}) |
||||
|
||||
def spawn_worker(self): |
||||
self.worker_age += 1 |
||||
worker = self.worker_class(self.worker_age, self.pid, self.LISTENERS, |
||||
self.app, self.timeout / 2.0, |
||||
self.cfg, self.log) |
||||
self.cfg.pre_fork(self, worker) |
||||
pid = os.fork() |
||||
if pid != 0: |
||||
worker.pid = pid |
||||
self.WORKERS[pid] = worker |
||||
return pid |
||||
|
||||
# Do not inherit the temporary files of other workers |
||||
for sibling in self.WORKERS.values(): |
||||
sibling.tmp.close() |
||||
|
||||
# Process Child |
||||
worker.pid = os.getpid() |
||||
try: |
||||
util._setproctitle("worker [%s]" % self.proc_name) |
||||
self.log.info("Booting worker with pid: %s", worker.pid) |
||||
self.cfg.post_fork(self, worker) |
||||
worker.init_process() |
||||
sys.exit(0) |
||||
except SystemExit: |
||||
raise |
||||
except AppImportError as e: |
||||
self.log.debug("Exception while loading the application", |
||||
exc_info=True) |
||||
print("%s" % e, file=sys.stderr) |
||||
sys.stderr.flush() |
||||
sys.exit(self.APP_LOAD_ERROR) |
||||
except: |
||||
self.log.exception("Exception in worker process") |
||||
if not worker.booted: |
||||
sys.exit(self.WORKER_BOOT_ERROR) |
||||
sys.exit(-1) |
||||
finally: |
||||
self.log.info("Worker exiting (pid: %s)", worker.pid) |
||||
try: |
||||
worker.tmp.close() |
||||
self.cfg.worker_exit(self, worker) |
||||
except: |
||||
self.log.warning("Exception during worker exit:\n%s", |
||||
traceback.format_exc()) |
||||
|
||||
def spawn_workers(self): |
||||
"""\ |
||||
Spawn new workers as needed. |
||||
|
||||
This is where a worker process leaves the main loop |
||||
of the master process. |
||||
""" |
||||
|
||||
for _ in range(self.num_workers - len(self.WORKERS.keys())): |
||||
self.spawn_worker() |
||||
time.sleep(0.1 * random.random()) |
||||
|
||||
def kill_workers(self, sig): |
||||
"""\ |
||||
Kill all workers with the signal `sig` |
||||
:attr sig: `signal.SIG*` value |
||||
""" |
||||
worker_pids = list(self.WORKERS.keys()) |
||||
for pid in worker_pids: |
||||
self.kill_worker(pid, sig) |
||||
|
||||
def kill_worker(self, pid, sig): |
||||
"""\ |
||||
Kill a worker |
||||
|
||||
:attr pid: int, worker pid |
||||
:attr sig: `signal.SIG*` value |
||||
""" |
||||
try: |
||||
os.kill(pid, sig) |
||||
except OSError as e: |
||||
if e.errno == errno.ESRCH: |
||||
try: |
||||
worker = self.WORKERS.pop(pid) |
||||
worker.tmp.close() |
||||
self.cfg.worker_exit(self, worker) |
||||
return |
||||
except (KeyError, OSError): |
||||
return |
||||
raise |
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,69 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
"""The debug module contains utilities and functions for better |
||||
debugging Gunicorn.""" |
||||
|
||||
import sys |
||||
import linecache |
||||
import re |
||||
import inspect |
||||
|
||||
__all__ = ['spew', 'unspew'] |
||||
|
||||
_token_spliter = re.compile(r'\W+') |
||||
|
||||
|
||||
class Spew(object): |
||||
|
||||
def __init__(self, trace_names=None, show_values=True): |
||||
self.trace_names = trace_names |
||||
self.show_values = show_values |
||||
|
||||
def __call__(self, frame, event, arg): |
||||
if event == 'line': |
||||
lineno = frame.f_lineno |
||||
if '__file__' in frame.f_globals: |
||||
filename = frame.f_globals['__file__'] |
||||
if (filename.endswith('.pyc') or |
||||
filename.endswith('.pyo')): |
||||
filename = filename[:-1] |
||||
name = frame.f_globals['__name__'] |
||||
line = linecache.getline(filename, lineno) |
||||
else: |
||||
name = '[unknown]' |
||||
try: |
||||
src = inspect.getsourcelines(frame) |
||||
line = src[lineno] |
||||
except IOError: |
||||
line = 'Unknown code named [%s]. VM instruction #%d' % ( |
||||
frame.f_code.co_name, frame.f_lasti) |
||||
if self.trace_names is None or name in self.trace_names: |
||||
print('%s:%s: %s' % (name, lineno, line.rstrip())) |
||||
if not self.show_values: |
||||
return self |
||||
details = [] |
||||
tokens = _token_spliter.split(line) |
||||
for tok in tokens: |
||||
if tok in frame.f_globals: |
||||
details.append('%s=%r' % (tok, frame.f_globals[tok])) |
||||
if tok in frame.f_locals: |
||||
details.append('%s=%r' % (tok, frame.f_locals[tok])) |
||||
if details: |
||||
print("\t%s" % ' '.join(details)) |
||||
return self |
||||
|
||||
|
||||
def spew(trace_names=None, show_values=False): |
||||
"""Install a trace hook which writes incredibly detailed logs |
||||
about what code is being executed to stdout. |
||||
""" |
||||
sys.settrace(Spew(trace_names, show_values)) |
||||
|
||||
|
||||
def unspew(): |
||||
"""Remove the trace hook installed by spew. |
||||
""" |
||||
sys.settrace(None) |
@ -0,0 +1,29 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
# We don't need to call super() in __init__ methods of our |
||||
# BaseException and Exception classes because we also define |
||||
# our own __str__ methods so there is no need to pass 'message' |
||||
# to the base class to get a meaningful output from 'str(exc)'. |
||||
# pylint: disable=super-init-not-called |
||||
|
||||
|
||||
# we inherit from BaseException here to make sure to not be caught |
||||
# at application level |
||||
class HaltServer(BaseException): |
||||
def __init__(self, reason, exit_status=1): |
||||
self.reason = reason |
||||
self.exit_status = exit_status |
||||
|
||||
def __str__(self): |
||||
return "<HaltServer %r %d>" % (self.reason, self.exit_status) |
||||
|
||||
|
||||
class ConfigError(Exception): |
||||
""" Exception raised on config error """ |
||||
|
||||
|
||||
class AppImportError(Exception): |
||||
""" Exception raised when loading an application """ |
@ -0,0 +1,478 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import base64 |
||||
import binascii |
||||
import time |
||||
import logging |
||||
logging.Logger.manager.emittedNoHandlerWarning = 1 |
||||
from logging.config import fileConfig |
||||
try: |
||||
from logging.config import dictConfig |
||||
except ImportError: |
||||
# python 2.6 |
||||
dictConfig = None |
||||
import os |
||||
import socket |
||||
import sys |
||||
import threading |
||||
import traceback |
||||
|
||||
from gunicorn import util |
||||
from gunicorn.six import PY3, string_types |
||||
|
||||
|
||||
# syslog facility codes |
||||
SYSLOG_FACILITIES = { |
||||
"auth": 4, |
||||
"authpriv": 10, |
||||
"cron": 9, |
||||
"daemon": 3, |
||||
"ftp": 11, |
||||
"kern": 0, |
||||
"lpr": 6, |
||||
"mail": 2, |
||||
"news": 7, |
||||
"security": 4, # DEPRECATED |
||||
"syslog": 5, |
||||
"user": 1, |
||||
"uucp": 8, |
||||
"local0": 16, |
||||
"local1": 17, |
||||
"local2": 18, |
||||
"local3": 19, |
||||
"local4": 20, |
||||
"local5": 21, |
||||
"local6": 22, |
||||
"local7": 23 |
||||
} |
||||
|
||||
|
||||
CONFIG_DEFAULTS = dict( |
||||
version=1, |
||||
disable_existing_loggers=False, |
||||
|
||||
loggers={ |
||||
"root": {"level": "INFO", "handlers": ["console"]}, |
||||
"gunicorn.error": { |
||||
"level": "INFO", |
||||
"handlers": ["error_console"], |
||||
"propagate": True, |
||||
"qualname": "gunicorn.error" |
||||
}, |
||||
|
||||
"gunicorn.access": { |
||||
"level": "INFO", |
||||
"handlers": ["console"], |
||||
"propagate": True, |
||||
"qualname": "gunicorn.access" |
||||
} |
||||
}, |
||||
handlers={ |
||||
"console": { |
||||
"class": "logging.StreamHandler", |
||||
"formatter": "generic", |
||||
"stream": "sys.stdout" |
||||
}, |
||||
"error_console": { |
||||
"class": "logging.StreamHandler", |
||||
"formatter": "generic", |
||||
"stream": "sys.stderr" |
||||
}, |
||||
}, |
||||
formatters={ |
||||
"generic": { |
||||
"format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s", |
||||
"datefmt": "[%Y-%m-%d %H:%M:%S %z]", |
||||
"class": "logging.Formatter" |
||||
} |
||||
} |
||||
) |
||||
|
||||
|
||||
def loggers(): |
||||
""" get list of all loggers """ |
||||
root = logging.root |
||||
existing = root.manager.loggerDict.keys() |
||||
return [logging.getLogger(name) for name in existing] |
||||
|
||||
|
||||
class SafeAtoms(dict): |
||||
|
||||
def __init__(self, atoms): |
||||
dict.__init__(self) |
||||
for key, value in atoms.items(): |
||||
if isinstance(value, string_types): |
||||
self[key] = value.replace('"', '\\"') |
||||
else: |
||||
self[key] = value |
||||
|
||||
def __getitem__(self, k): |
||||
if k.startswith("{"): |
||||
kl = k.lower() |
||||
if kl in self: |
||||
return super(SafeAtoms, self).__getitem__(kl) |
||||
else: |
||||
return "-" |
||||
if k in self: |
||||
return super(SafeAtoms, self).__getitem__(k) |
||||
else: |
||||
return '-' |
||||
|
||||
|
||||
def parse_syslog_address(addr): |
||||
|
||||
# unix domain socket type depends on backend |
||||
# SysLogHandler will try both when given None |
||||
if addr.startswith("unix://"): |
||||
sock_type = None |
||||
|
||||
# set socket type only if explicitly requested |
||||
parts = addr.split("#", 1) |
||||
if len(parts) == 2: |
||||
addr = parts[0] |
||||
if parts[1] == "dgram": |
||||
sock_type = socket.SOCK_DGRAM |
||||
|
||||
return (sock_type, addr.split("unix://")[1]) |
||||
|
||||
if addr.startswith("udp://"): |
||||
addr = addr.split("udp://")[1] |
||||
socktype = socket.SOCK_DGRAM |
||||
elif addr.startswith("tcp://"): |
||||
addr = addr.split("tcp://")[1] |
||||
socktype = socket.SOCK_STREAM |
||||
else: |
||||
raise RuntimeError("invalid syslog address") |
||||
|
||||
if '[' in addr and ']' in addr: |
||||
host = addr.split(']')[0][1:].lower() |
||||
elif ':' in addr: |
||||
host = addr.split(':')[0].lower() |
||||
elif addr == "": |
||||
host = "localhost" |
||||
else: |
||||
host = addr.lower() |
||||
|
||||
addr = addr.split(']')[-1] |
||||
if ":" in addr: |
||||
port = addr.split(':', 1)[1] |
||||
if not port.isdigit(): |
||||
raise RuntimeError("%r is not a valid port number." % port) |
||||
port = int(port) |
||||
else: |
||||
port = 514 |
||||
|
||||
return (socktype, (host, port)) |
||||
|
||||
|
||||
class Logger(object): |
||||
|
||||
LOG_LEVELS = { |
||||
"critical": logging.CRITICAL, |
||||
"error": logging.ERROR, |
||||
"warning": logging.WARNING, |
||||
"info": logging.INFO, |
||||
"debug": logging.DEBUG |
||||
} |
||||
loglevel = logging.INFO |
||||
|
||||
error_fmt = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s" |
||||
datefmt = r"[%Y-%m-%d %H:%M:%S %z]" |
||||
|
||||
access_fmt = "%(message)s" |
||||
syslog_fmt = "[%(process)d] %(message)s" |
||||
|
||||
atoms_wrapper_class = SafeAtoms |
||||
|
||||
def __init__(self, cfg): |
||||
self.error_log = logging.getLogger("gunicorn.error") |
||||
self.error_log.propagate = False |
||||
self.access_log = logging.getLogger("gunicorn.access") |
||||
self.access_log.propagate = False |
||||
self.error_handlers = [] |
||||
self.access_handlers = [] |
||||
self.logfile = None |
||||
self.lock = threading.Lock() |
||||
self.cfg = cfg |
||||
self.setup(cfg) |
||||
|
||||
def setup(self, cfg): |
||||
self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO) |
||||
self.error_log.setLevel(self.loglevel) |
||||
self.access_log.setLevel(logging.INFO) |
||||
|
||||
# set gunicorn.error handler |
||||
if self.cfg.capture_output and cfg.errorlog != "-": |
||||
for stream in sys.stdout, sys.stderr: |
||||
stream.flush() |
||||
|
||||
self.logfile = open(cfg.errorlog, 'a+') |
||||
os.dup2(self.logfile.fileno(), sys.stdout.fileno()) |
||||
os.dup2(self.logfile.fileno(), sys.stderr.fileno()) |
||||
|
||||
self._set_handler(self.error_log, cfg.errorlog, |
||||
logging.Formatter(self.error_fmt, self.datefmt)) |
||||
|
||||
# set gunicorn.access handler |
||||
if cfg.accesslog is not None: |
||||
self._set_handler(self.access_log, cfg.accesslog, |
||||
fmt=logging.Formatter(self.access_fmt), stream=sys.stdout) |
||||
|
||||
# set syslog handler |
||||
if cfg.syslog: |
||||
self._set_syslog_handler( |
||||
self.error_log, cfg, self.syslog_fmt, "error" |
||||
) |
||||
if not cfg.disable_redirect_access_to_syslog: |
||||
self._set_syslog_handler( |
||||
self.access_log, cfg, self.syslog_fmt, "access" |
||||
) |
||||
|
||||
if dictConfig is None and cfg.logconfig_dict: |
||||
util.warn("Dictionary-based log configuration requires " |
||||
"Python 2.7 or above.") |
||||
|
||||
if dictConfig and cfg.logconfig_dict: |
||||
config = CONFIG_DEFAULTS.copy() |
||||
config.update(cfg.logconfig_dict) |
||||
try: |
||||
dictConfig(config) |
||||
except ( |
||||
AttributeError, |
||||
ImportError, |
||||
ValueError, |
||||
TypeError |
||||
) as exc: |
||||
raise RuntimeError(str(exc)) |
||||
elif cfg.logconfig: |
||||
if os.path.exists(cfg.logconfig): |
||||
defaults = CONFIG_DEFAULTS.copy() |
||||
defaults['__file__'] = cfg.logconfig |
||||
defaults['here'] = os.path.dirname(cfg.logconfig) |
||||
fileConfig(cfg.logconfig, defaults=defaults, |
||||
disable_existing_loggers=False) |
||||
else: |
||||
msg = "Error: log config '%s' not found" |
||||
raise RuntimeError(msg % cfg.logconfig) |
||||
|
||||
def critical(self, msg, *args, **kwargs): |
||||
self.error_log.critical(msg, *args, **kwargs) |
||||
|
||||
def error(self, msg, *args, **kwargs): |
||||
self.error_log.error(msg, *args, **kwargs) |
||||
|
||||
def warning(self, msg, *args, **kwargs): |
||||
self.error_log.warning(msg, *args, **kwargs) |
||||
|
||||
def info(self, msg, *args, **kwargs): |
||||
self.error_log.info(msg, *args, **kwargs) |
||||
|
||||
def debug(self, msg, *args, **kwargs): |
||||
self.error_log.debug(msg, *args, **kwargs) |
||||
|
||||
def exception(self, msg, *args, **kwargs): |
||||
self.error_log.exception(msg, *args, **kwargs) |
||||
|
||||
def log(self, lvl, msg, *args, **kwargs): |
||||
if isinstance(lvl, string_types): |
||||
lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO) |
||||
self.error_log.log(lvl, msg, *args, **kwargs) |
||||
|
||||
def atoms(self, resp, req, environ, request_time): |
||||
""" Gets atoms for log formating. |
||||
""" |
||||
status = resp.status |
||||
if isinstance(status, str): |
||||
status = status.split(None, 1)[0] |
||||
atoms = { |
||||
'h': environ.get('REMOTE_ADDR', '-'), |
||||
'l': '-', |
||||
'u': self._get_user(environ) or '-', |
||||
't': self.now(), |
||||
'r': "%s %s %s" % (environ['REQUEST_METHOD'], |
||||
environ['RAW_URI'], environ["SERVER_PROTOCOL"]), |
||||
's': status, |
||||
'm': environ.get('REQUEST_METHOD'), |
||||
'U': environ.get('PATH_INFO'), |
||||
'q': environ.get('QUERY_STRING'), |
||||
'H': environ.get('SERVER_PROTOCOL'), |
||||
'b': getattr(resp, 'sent', None) and str(resp.sent) or '-', |
||||
'B': getattr(resp, 'sent', None), |
||||
'f': environ.get('HTTP_REFERER', '-'), |
||||
'a': environ.get('HTTP_USER_AGENT', '-'), |
||||
'T': request_time.seconds, |
||||
'D': (request_time.seconds*1000000) + request_time.microseconds, |
||||
'L': "%d.%06d" % (request_time.seconds, request_time.microseconds), |
||||
'p': "<%s>" % os.getpid() |
||||
} |
||||
|
||||
# add request headers |
||||
if hasattr(req, 'headers'): |
||||
req_headers = req.headers |
||||
else: |
||||
req_headers = req |
||||
|
||||
if hasattr(req_headers, "items"): |
||||
req_headers = req_headers.items() |
||||
|
||||
atoms.update(dict([("{%s}i" % k.lower(), v) for k, v in req_headers])) |
||||
|
||||
resp_headers = resp.headers |
||||
if hasattr(resp_headers, "items"): |
||||
resp_headers = resp_headers.items() |
||||
|
||||
# add response headers |
||||
atoms.update(dict([("{%s}o" % k.lower(), v) for k, v in resp_headers])) |
||||
|
||||
# add environ variables |
||||
environ_variables = environ.items() |
||||
atoms.update(dict([("{%s}e" % k.lower(), v) for k, v in environ_variables])) |
||||
|
||||
return atoms |
||||
|
||||
def access(self, resp, req, environ, request_time): |
||||
""" See http://httpd.apache.org/docs/2.0/logs.html#combined |
||||
for format details |
||||
""" |
||||
|
||||
if not (self.cfg.accesslog or self.cfg.logconfig or |
||||
self.cfg.logconfig_dict or |
||||
(self.cfg.syslog and not self.cfg.disable_access_log_redirection)): |
||||
return |
||||
|
||||
# wrap atoms: |
||||
# - make sure atoms will be test case insensitively |
||||
# - if atom doesn't exist replace it by '-' |
||||
safe_atoms = self.atoms_wrapper_class(self.atoms(resp, req, environ, |
||||
request_time)) |
||||
|
||||
try: |
||||
self.access_log.info(self.cfg.access_log_format, safe_atoms) |
||||
except: |
||||
self.error(traceback.format_exc()) |
||||
|
||||
def now(self): |
||||
""" return date in Apache Common Log Format """ |
||||
return time.strftime('[%d/%b/%Y:%H:%M:%S %z]') |
||||
|
||||
def reopen_files(self): |
||||
if self.cfg.capture_output and self.cfg.errorlog != "-": |
||||
for stream in sys.stdout, sys.stderr: |
||||
stream.flush() |
||||
|
||||
with self.lock: |
||||
if self.logfile is not None: |
||||
self.logfile.close() |
||||
self.logfile = open(self.cfg.errorlog, 'a+') |
||||
os.dup2(self.logfile.fileno(), sys.stdout.fileno()) |
||||
os.dup2(self.logfile.fileno(), sys.stderr.fileno()) |
||||
|
||||
|
||||
for log in loggers(): |
||||
for handler in log.handlers: |
||||
if isinstance(handler, logging.FileHandler): |
||||
handler.acquire() |
||||
try: |
||||
if handler.stream: |
||||
handler.close() |
||||
handler.stream = handler._open() |
||||
finally: |
||||
handler.release() |
||||
|
||||
def close_on_exec(self): |
||||
for log in loggers(): |
||||
for handler in log.handlers: |
||||
if isinstance(handler, logging.FileHandler): |
||||
handler.acquire() |
||||
try: |
||||
if handler.stream: |
||||
util.close_on_exec(handler.stream.fileno()) |
||||
finally: |
||||
handler.release() |
||||
|
||||
def _get_gunicorn_handler(self, log): |
||||
for h in log.handlers: |
||||
if getattr(h, "_gunicorn", False): |
||||
return h |
||||
|
||||
def _set_handler(self, log, output, fmt, stream=None): |
||||
# remove previous gunicorn log handler |
||||
h = self._get_gunicorn_handler(log) |
||||
if h: |
||||
log.handlers.remove(h) |
||||
|
||||
if output is not None: |
||||
if output == "-": |
||||
h = logging.StreamHandler(stream) |
||||
else: |
||||
util.check_is_writeable(output) |
||||
h = logging.FileHandler(output) |
||||
# make sure the user can reopen the file |
||||
try: |
||||
os.chown(h.baseFilename, self.cfg.user, self.cfg.group) |
||||
except OSError: |
||||
# it's probably OK there, we assume the user has given |
||||
# /dev/null as a parameter. |
||||
pass |
||||
|
||||
h.setFormatter(fmt) |
||||
h._gunicorn = True |
||||
log.addHandler(h) |
||||
|
||||
def _set_syslog_handler(self, log, cfg, fmt, name): |
||||
# setup format |
||||
if not cfg.syslog_prefix: |
||||
prefix = cfg.proc_name.replace(":", ".") |
||||
else: |
||||
prefix = cfg.syslog_prefix |
||||
|
||||
prefix = "gunicorn.%s.%s" % (prefix, name) |
||||
|
||||
# set format |
||||
fmt = logging.Formatter(r"%s: %s" % (prefix, fmt)) |
||||
|
||||
# syslog facility |
||||
try: |
||||
facility = SYSLOG_FACILITIES[cfg.syslog_facility.lower()] |
||||
except KeyError: |
||||
raise RuntimeError("unknown facility name") |
||||
|
||||
# parse syslog address |
||||
socktype, addr = parse_syslog_address(cfg.syslog_addr) |
||||
|
||||
# finally setup the syslog handler |
||||
if sys.version_info >= (2, 7): |
||||
h = logging.handlers.SysLogHandler(address=addr, |
||||
facility=facility, socktype=socktype) |
||||
else: |
||||
# socktype is only supported in 2.7 and sup |
||||
# fix issue #541 |
||||
h = logging.handlers.SysLogHandler(address=addr, |
||||
facility=facility) |
||||
|
||||
h.setFormatter(fmt) |
||||
h._gunicorn = True |
||||
log.addHandler(h) |
||||
|
||||
def _get_user(self, environ): |
||||
user = None |
||||
http_auth = environ.get("HTTP_AUTHORIZATION") |
||||
if http_auth and http_auth.startswith('Basic'): |
||||
auth = http_auth.split(" ", 1) |
||||
if len(auth) == 2: |
||||
try: |
||||
# b64decode doesn't accept unicode in Python < 3.3 |
||||
# so we need to convert it to a byte string |
||||
auth = base64.b64decode(auth[1].strip().encode('utf-8')) |
||||
if PY3: # b64decode returns a byte string in Python 3 |
||||
auth = auth.decode('utf-8') |
||||
auth = auth.split(":", 1) |
||||
except (TypeError, binascii.Error, UnicodeDecodeError) as exc: |
||||
self.debug("Couldn't get username: %s", exc) |
||||
return user |
||||
if len(auth) == 2: |
||||
user = auth[0] |
||||
return user |
@ -0,0 +1,9 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
from gunicorn.http.message import Message, Request |
||||
from gunicorn.http.parser import RequestParser |
||||
|
||||
__all__ = ['Message', 'Request', 'RequestParser'] |
@ -0,0 +1,67 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import errno |
||||
import os |
||||
import sys |
||||
|
||||
try: |
||||
import ctypes |
||||
import ctypes.util |
||||
except MemoryError: |
||||
# selinux execmem denial |
||||
# https://bugzilla.redhat.com/show_bug.cgi?id=488396 |
||||
raise ImportError |
||||
|
||||
SUPPORTED_PLATFORMS = ( |
||||
'darwin', |
||||
'freebsd', |
||||
'dragonfly', |
||||
'linux2') |
||||
|
||||
if sys.platform not in SUPPORTED_PLATFORMS: |
||||
raise ImportError("sendfile isn't supported on this platform") |
||||
|
||||
_libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True) |
||||
_sendfile = _libc.sendfile |
||||
|
||||
|
||||
def sendfile(fdout, fdin, offset, nbytes): |
||||
if sys.platform == 'darwin': |
||||
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64, |
||||
ctypes.POINTER(ctypes.c_uint64), ctypes.c_voidp, |
||||
ctypes.c_int] |
||||
_nbytes = ctypes.c_uint64(nbytes) |
||||
result = _sendfile(fdin, fdout, offset, _nbytes, None, 0) |
||||
|
||||
if result == -1: |
||||
e = ctypes.get_errno() |
||||
if e == errno.EAGAIN and _nbytes.value is not None: |
||||
return _nbytes.value |
||||
raise OSError(e, os.strerror(e)) |
||||
return _nbytes.value |
||||
elif sys.platform in ('freebsd', 'dragonfly',): |
||||
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64, |
||||
ctypes.c_uint64, ctypes.c_voidp, |
||||
ctypes.POINTER(ctypes.c_uint64), ctypes.c_int] |
||||
_sbytes = ctypes.c_uint64() |
||||
result = _sendfile(fdin, fdout, offset, nbytes, None, _sbytes, 0) |
||||
if result == -1: |
||||
e = ctypes.get_errno() |
||||
if e == errno.EAGAIN and _sbytes.value is not None: |
||||
return _sbytes.value |
||||
raise OSError(e, os.strerror(e)) |
||||
return _sbytes.value |
||||
|
||||
else: |
||||
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, |
||||
ctypes.POINTER(ctypes.c_uint64), ctypes.c_size_t] |
||||
|
||||
_offset = ctypes.c_uint64(offset) |
||||
sent = _sendfile(fdout, fdin, _offset, nbytes) |
||||
if sent == -1: |
||||
e = ctypes.get_errno() |
||||
raise OSError(e, os.strerror(e)) |
||||
return sent |
@ -0,0 +1,259 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator, |
||||
InvalidChunkSize) |
||||
from gunicorn import six |
||||
|
||||
|
||||
class ChunkedReader(object): |
||||
def __init__(self, req, unreader): |
||||
self.req = req |
||||
self.parser = self.parse_chunked(unreader) |
||||
self.buf = six.BytesIO() |
||||
|
||||
def read(self, size): |
||||
if not isinstance(size, six.integer_types): |
||||
raise TypeError("size must be an integral type") |
||||
if size < 0: |
||||
raise ValueError("Size must be positive.") |
||||
if size == 0: |
||||
return b"" |
||||
|
||||
if self.parser: |
||||
while self.buf.tell() < size: |
||||
try: |
||||
self.buf.write(six.next(self.parser)) |
||||
except StopIteration: |
||||
self.parser = None |
||||
break |
||||
|
||||
data = self.buf.getvalue() |
||||
ret, rest = data[:size], data[size:] |
||||
self.buf = six.BytesIO() |
||||
self.buf.write(rest) |
||||
return ret |
||||
|
||||
def parse_trailers(self, unreader, data): |
||||
buf = six.BytesIO() |
||||
buf.write(data) |
||||
|
||||
idx = buf.getvalue().find(b"\r\n\r\n") |
||||
done = buf.getvalue()[:2] == b"\r\n" |
||||
while idx < 0 and not done: |
||||
self.get_data(unreader, buf) |
||||
idx = buf.getvalue().find(b"\r\n\r\n") |
||||
done = buf.getvalue()[:2] == b"\r\n" |
||||
if done: |
||||
unreader.unread(buf.getvalue()[2:]) |
||||
return b"" |
||||
self.req.trailers = self.req.parse_headers(buf.getvalue()[:idx]) |
||||
unreader.unread(buf.getvalue()[idx + 4:]) |
||||
|
||||
def parse_chunked(self, unreader): |
||||
(size, rest) = self.parse_chunk_size(unreader) |
||||
while size > 0: |
||||
while size > len(rest): |
||||
size -= len(rest) |
||||
yield rest |
||||
rest = unreader.read() |
||||
if not rest: |
||||
raise NoMoreData() |
||||
yield rest[:size] |
||||
# Remove \r\n after chunk |
||||
rest = rest[size:] |
||||
while len(rest) < 2: |
||||
rest += unreader.read() |
||||
if rest[:2] != b'\r\n': |
||||
raise ChunkMissingTerminator(rest[:2]) |
||||
(size, rest) = self.parse_chunk_size(unreader, data=rest[2:]) |
||||
|
||||
def parse_chunk_size(self, unreader, data=None): |
||||
buf = six.BytesIO() |
||||
if data is not None: |
||||
buf.write(data) |
||||
|
||||
idx = buf.getvalue().find(b"\r\n") |
||||
while idx < 0: |
||||
self.get_data(unreader, buf) |
||||
idx = buf.getvalue().find(b"\r\n") |
||||
|
||||
data = buf.getvalue() |
||||
line, rest_chunk = data[:idx], data[idx + 2:] |
||||
|
||||
chunk_size = line.split(b";", 1)[0].strip() |
||||
try: |
||||
chunk_size = int(chunk_size, 16) |
||||
except ValueError: |
||||
raise InvalidChunkSize(chunk_size) |
||||
|
||||
if chunk_size == 0: |
||||
try: |
||||
self.parse_trailers(unreader, rest_chunk) |
||||
except NoMoreData: |
||||
pass |
||||
return (0, None) |
||||
return (chunk_size, rest_chunk) |
||||
|
||||
def get_data(self, unreader, buf): |
||||
data = unreader.read() |
||||
if not data: |
||||
raise NoMoreData() |
||||
buf.write(data) |
||||
|
||||
|
||||
class LengthReader(object): |
||||
def __init__(self, unreader, length): |
||||
self.unreader = unreader |
||||
self.length = length |
||||
|
||||
def read(self, size): |
||||
if not isinstance(size, six.integer_types): |
||||
raise TypeError("size must be an integral type") |
||||
|
||||
size = min(self.length, size) |
||||
if size < 0: |
||||
raise ValueError("Size must be positive.") |
||||
if size == 0: |
||||
return b"" |
||||
|
||||
buf = six.BytesIO() |
||||
data = self.unreader.read() |
||||
while data: |
||||
buf.write(data) |
||||
if buf.tell() >= size: |
||||
break |
||||
data = self.unreader.read() |
||||
|
||||
buf = buf.getvalue() |
||||
ret, rest = buf[:size], buf[size:] |
||||
self.unreader.unread(rest) |
||||
self.length -= size |
||||
return ret |
||||
|
||||
|
||||
class EOFReader(object): |
||||
def __init__(self, unreader): |
||||
self.unreader = unreader |
||||
self.buf = six.BytesIO() |
||||
self.finished = False |
||||
|
||||
def read(self, size): |
||||
if not isinstance(size, six.integer_types): |
||||
raise TypeError("size must be an integral type") |
||||
if size < 0: |
||||
raise ValueError("Size must be positive.") |
||||
if size == 0: |
||||
return b"" |
||||
|
||||
if self.finished: |
||||
data = self.buf.getvalue() |
||||
ret, rest = data[:size], data[size:] |
||||
self.buf = six.BytesIO() |
||||
self.buf.write(rest) |
||||
return ret |
||||
|
||||
data = self.unreader.read() |
||||
while data: |
||||
self.buf.write(data) |
||||
if self.buf.tell() > size: |
||||
break |
||||
data = self.unreader.read() |
||||
|
||||
if not data: |
||||
self.finished = True |
||||
|
||||
data = self.buf.getvalue() |
||||
ret, rest = data[:size], data[size:] |
||||
self.buf = six.BytesIO() |
||||
self.buf.write(rest) |
||||
return ret |
||||
|
||||
|
||||
class Body(object): |
||||
def __init__(self, reader): |
||||
self.reader = reader |
||||
self.buf = six.BytesIO() |
||||
|
||||
def __iter__(self): |
||||
return self |
||||
|
||||
def __next__(self): |
||||
ret = self.readline() |
||||
if not ret: |
||||
raise StopIteration() |
||||
return ret |
||||
next = __next__ |
||||
|
||||
def getsize(self, size): |
||||
if size is None: |
||||
return six.MAXSIZE |
||||
elif not isinstance(size, six.integer_types): |
||||
raise TypeError("size must be an integral type") |
||||
elif size < 0: |
||||
return six.MAXSIZE |
||||
return size |
||||
|
||||
def read(self, size=None): |
||||
size = self.getsize(size) |
||||
if size == 0: |
||||
return b"" |
||||
|
||||
if size < self.buf.tell(): |
||||
data = self.buf.getvalue() |
||||
ret, rest = data[:size], data[size:] |
||||
self.buf = six.BytesIO() |
||||
self.buf.write(rest) |
||||
return ret |
||||
|
||||
while size > self.buf.tell(): |
||||
data = self.reader.read(1024) |
||||
if not data: |
||||
break |
||||
self.buf.write(data) |
||||
|
||||
data = self.buf.getvalue() |
||||
ret, rest = data[:size], data[size:] |
||||
self.buf = six.BytesIO() |
||||
self.buf.write(rest) |
||||
return ret |
||||
|
||||
def readline(self, size=None): |
||||
size = self.getsize(size) |
||||
if size == 0: |
||||
return b"" |
||||
|
||||
data = self.buf.getvalue() |
||||
self.buf = six.BytesIO() |
||||
|
||||
ret = [] |
||||
while 1: |
||||
idx = data.find(b"\n", 0, size) |
||||
idx = idx + 1 if idx >= 0 else size if len(data) >= size else 0 |
||||
if idx: |
||||
ret.append(data[:idx]) |
||||
self.buf.write(data[idx:]) |
||||
break |
||||
|
||||
ret.append(data) |
||||
size -= len(data) |
||||
data = self.reader.read(min(1024, size)) |
||||
if not data: |
||||
break |
||||
|
||||
return b"".join(ret) |
||||
|
||||
def readlines(self, size=None): |
||||
ret = [] |
||||
data = self.read() |
||||
while data: |
||||
pos = data.find(b"\n") |
||||
if pos < 0: |
||||
ret.append(data) |
||||
data = b"" |
||||
else: |
||||
line, data = data[:pos + 1], data[pos + 1:] |
||||
ret.append(line) |
||||
return ret |
@ -0,0 +1,120 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
# We don't need to call super() in __init__ methods of our |
||||
# BaseException and Exception classes because we also define |
||||
# our own __str__ methods so there is no need to pass 'message' |
||||
# to the base class to get a meaningful output from 'str(exc)'. |
||||
# pylint: disable=super-init-not-called |
||||
|
||||
|
||||
class ParseException(Exception): |
||||
pass |
||||
|
||||
|
||||
class NoMoreData(IOError): |
||||
def __init__(self, buf=None): |
||||
self.buf = buf |
||||
|
||||
def __str__(self): |
||||
return "No more data after: %r" % self.buf |
||||
|
||||
|
||||
class InvalidRequestLine(ParseException): |
||||
def __init__(self, req): |
||||
self.req = req |
||||
self.code = 400 |
||||
|
||||
def __str__(self): |
||||
return "Invalid HTTP request line: %r" % self.req |
||||
|
||||
|
||||
class InvalidRequestMethod(ParseException): |
||||
def __init__(self, method): |
||||
self.method = method |
||||
|
||||
def __str__(self): |
||||
return "Invalid HTTP method: %r" % self.method |
||||
|
||||
|
||||
class InvalidHTTPVersion(ParseException): |
||||
def __init__(self, version): |
||||
self.version = version |
||||
|
||||
def __str__(self): |
||||
return "Invalid HTTP Version: %r" % self.version |
||||
|
||||
|
||||
class InvalidHeader(ParseException): |
||||
def __init__(self, hdr, req=None): |
||||
self.hdr = hdr |
||||
self.req = req |
||||
|
||||
def __str__(self): |
||||
return "Invalid HTTP Header: %r" % self.hdr |
||||
|
||||
|
||||
class InvalidHeaderName(ParseException): |
||||
def __init__(self, hdr): |
||||
self.hdr = hdr |
||||
|
||||
def __str__(self): |
||||
return "Invalid HTTP header name: %r" % self.hdr |
||||
|
||||
|
||||
class InvalidChunkSize(IOError): |
||||
def __init__(self, data): |
||||
self.data = data |
||||
|
||||
def __str__(self): |
||||
return "Invalid chunk size: %r" % self.data |
||||
|
||||
|
||||
class ChunkMissingTerminator(IOError): |
||||
def __init__(self, term): |
||||
self.term = term |
||||
|
||||
def __str__(self): |
||||
return "Invalid chunk terminator is not '\\r\\n': %r" % self.term |
||||
|
||||
|
||||
class LimitRequestLine(ParseException): |
||||
def __init__(self, size, max_size): |
||||
self.size = size |
||||
self.max_size = max_size |
||||
|
||||
def __str__(self): |
||||
return "Request Line is too large (%s > %s)" % (self.size, self.max_size) |
||||
|
||||
|
||||
class LimitRequestHeaders(ParseException): |
||||
def __init__(self, msg): |
||||
self.msg = msg |
||||
|
||||
def __str__(self): |
||||
return self.msg |
||||
|
||||
|
||||
class InvalidProxyLine(ParseException): |
||||
def __init__(self, line): |
||||
self.line = line |
||||
self.code = 400 |
||||
|
||||
def __str__(self): |
||||
return "Invalid PROXY line: %r" % self.line |
||||
|
||||
|
||||
class ForbiddenProxyRequest(ParseException): |
||||
def __init__(self, host): |
||||
self.host = host |
||||
self.code = 403 |
||||
|
||||
def __str__(self): |
||||
return "Proxy request from %r not allowed" % self.host |
||||
|
||||
|
||||
class InvalidSchemeHeaders(ParseException): |
||||
def __str__(self): |
||||
return "Contradictory scheme headers" |
@ -0,0 +1,363 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import re |
||||
import socket |
||||
from errno import ENOTCONN |
||||
|
||||
from gunicorn._compat import bytes_to_str |
||||
from gunicorn.http.unreader import SocketUnreader |
||||
from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body |
||||
from gunicorn.http.errors import (InvalidHeader, InvalidHeaderName, NoMoreData, |
||||
InvalidRequestLine, InvalidRequestMethod, InvalidHTTPVersion, |
||||
LimitRequestLine, LimitRequestHeaders) |
||||
from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest |
||||
from gunicorn.http.errors import InvalidSchemeHeaders |
||||
from gunicorn.six import BytesIO, string_types |
||||
from gunicorn.util import split_request_uri |
||||
|
||||
MAX_REQUEST_LINE = 8190 |
||||
MAX_HEADERS = 32768 |
||||
DEFAULT_MAX_HEADERFIELD_SIZE = 8190 |
||||
|
||||
HEADER_RE = re.compile(r"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\"]") |
||||
METH_RE = re.compile(r"[A-Z0-9$-_.]{3,20}") |
||||
VERSION_RE = re.compile(r"HTTP/(\d+)\.(\d+)") |
||||
|
||||
|
||||
class Message(object): |
||||
def __init__(self, cfg, unreader): |
||||
self.cfg = cfg |
||||
self.unreader = unreader |
||||
self.version = None |
||||
self.headers = [] |
||||
self.trailers = [] |
||||
self.body = None |
||||
self.scheme = "https" if cfg.is_ssl else "http" |
||||
|
||||
# set headers limits |
||||
self.limit_request_fields = cfg.limit_request_fields |
||||
if (self.limit_request_fields <= 0 |
||||
or self.limit_request_fields > MAX_HEADERS): |
||||
self.limit_request_fields = MAX_HEADERS |
||||
self.limit_request_field_size = cfg.limit_request_field_size |
||||
if self.limit_request_field_size < 0: |
||||
self.limit_request_field_size = DEFAULT_MAX_HEADERFIELD_SIZE |
||||
|
||||
# set max header buffer size |
||||
max_header_field_size = self.limit_request_field_size or DEFAULT_MAX_HEADERFIELD_SIZE |
||||
self.max_buffer_headers = self.limit_request_fields * \ |
||||
(max_header_field_size + 2) + 4 |
||||
|
||||
unused = self.parse(self.unreader) |
||||
self.unreader.unread(unused) |
||||
self.set_body_reader() |
||||
|
||||
def parse(self, unreader): |
||||
raise NotImplementedError() |
||||
|
||||
def parse_headers(self, data): |
||||
cfg = self.cfg |
||||
headers = [] |
||||
|
||||
# Split lines on \r\n keeping the \r\n on each line |
||||
lines = [bytes_to_str(line) + "\r\n" for line in data.split(b"\r\n")] |
||||
|
||||
# handle scheme headers |
||||
scheme_header = False |
||||
secure_scheme_headers = {} |
||||
if '*' in cfg.forwarded_allow_ips: |
||||
secure_scheme_headers = cfg.secure_scheme_headers |
||||
elif isinstance(self.unreader, SocketUnreader): |
||||
remote_addr = self.unreader.sock.getpeername() |
||||
if isinstance(remote_addr, tuple): |
||||
remote_host = remote_addr[0] |
||||
if remote_host in cfg.forwarded_allow_ips: |
||||
secure_scheme_headers = cfg.secure_scheme_headers |
||||
elif isinstance(remote_addr, string_types): |
||||
secure_scheme_headers = cfg.secure_scheme_headers |
||||
|
||||
# Parse headers into key/value pairs paying attention |
||||
# to continuation lines. |
||||
while lines: |
||||
if len(headers) >= self.limit_request_fields: |
||||
raise LimitRequestHeaders("limit request headers fields") |
||||
|
||||
# Parse initial header name : value pair. |
||||
curr = lines.pop(0) |
||||
header_length = len(curr) |
||||
if curr.find(":") < 0: |
||||
raise InvalidHeader(curr.strip()) |
||||
name, value = curr.split(":", 1) |
||||
name = name.rstrip(" \t").upper() |
||||
if HEADER_RE.search(name): |
||||
raise InvalidHeaderName(name) |
||||
|
||||
name, value = name.strip(), [value.lstrip()] |
||||
|
||||
# Consume value continuation lines |
||||
while lines and lines[0].startswith((" ", "\t")): |
||||
curr = lines.pop(0) |
||||
header_length += len(curr) |
||||
if header_length > self.limit_request_field_size > 0: |
||||
raise LimitRequestHeaders("limit request headers " |
||||
+ "fields size") |
||||
value.append(curr) |
||||
value = ''.join(value).rstrip() |
||||
|
||||
if header_length > self.limit_request_field_size > 0: |
||||
raise LimitRequestHeaders("limit request headers fields size") |
||||
|
||||
if name in secure_scheme_headers: |
||||
secure = value == secure_scheme_headers[name] |
||||
scheme = "https" if secure else "http" |
||||
if scheme_header: |
||||
if scheme != self.scheme: |
||||
raise InvalidSchemeHeaders() |
||||
else: |
||||
scheme_header = True |
||||
self.scheme = scheme |
||||
|
||||
headers.append((name, value)) |
||||
|
||||
return headers |
||||
|
||||
def set_body_reader(self): |
||||
chunked = False |
||||
content_length = None |
||||
for (name, value) in self.headers: |
||||
if name == "CONTENT-LENGTH": |
||||
content_length = value |
||||
elif name == "TRANSFER-ENCODING": |
||||
chunked = value.lower() == "chunked" |
||||
elif name == "SEC-WEBSOCKET-KEY1": |
||||
content_length = 8 |
||||
|
||||
if chunked: |
||||
self.body = Body(ChunkedReader(self, self.unreader)) |
||||
elif content_length is not None: |
||||
try: |
||||
content_length = int(content_length) |
||||
except ValueError: |
||||
raise InvalidHeader("CONTENT-LENGTH", req=self) |
||||
|
||||
if content_length < 0: |
||||
raise InvalidHeader("CONTENT-LENGTH", req=self) |
||||
|
||||
self.body = Body(LengthReader(self.unreader, content_length)) |
||||
else: |
||||
self.body = Body(EOFReader(self.unreader)) |
||||
|
||||
def should_close(self): |
||||
for (h, v) in self.headers: |
||||
if h == "CONNECTION": |
||||
v = v.lower().strip() |
||||
if v == "close": |
||||
return True |
||||
elif v == "keep-alive": |
||||
return False |
||||
break |
||||
return self.version <= (1, 0) |
||||
|
||||
|
||||
class Request(Message): |
||||
def __init__(self, cfg, unreader, req_number=1): |
||||
self.method = None |
||||
self.uri = None |
||||
self.path = None |
||||
self.query = None |
||||
self.fragment = None |
||||
|
||||
# get max request line size |
||||
self.limit_request_line = cfg.limit_request_line |
||||
if (self.limit_request_line < 0 |
||||
or self.limit_request_line >= MAX_REQUEST_LINE): |
||||
self.limit_request_line = MAX_REQUEST_LINE |
||||
|
||||
self.req_number = req_number |
||||
self.proxy_protocol_info = None |
||||
super(Request, self).__init__(cfg, unreader) |
||||
|
||||
def get_data(self, unreader, buf, stop=False): |
||||
data = unreader.read() |
||||
if not data: |
||||
if stop: |
||||
raise StopIteration() |
||||
raise NoMoreData(buf.getvalue()) |
||||
buf.write(data) |
||||
|
||||
def parse(self, unreader): |
||||
buf = BytesIO() |
||||
self.get_data(unreader, buf, stop=True) |
||||
|
||||
# get request line |
||||
line, rbuf = self.read_line(unreader, buf, self.limit_request_line) |
||||
|
||||
# proxy protocol |
||||
if self.proxy_protocol(bytes_to_str(line)): |
||||
# get next request line |
||||
buf = BytesIO() |
||||
buf.write(rbuf) |
||||
line, rbuf = self.read_line(unreader, buf, self.limit_request_line) |
||||
|
||||
self.parse_request_line(line) |
||||
buf = BytesIO() |
||||
buf.write(rbuf) |
||||
|
||||
# Headers |
||||
data = buf.getvalue() |
||||
idx = data.find(b"\r\n\r\n") |
||||
|
||||
done = data[:2] == b"\r\n" |
||||
while True: |
||||
idx = data.find(b"\r\n\r\n") |
||||
done = data[:2] == b"\r\n" |
||||
|
||||
if idx < 0 and not done: |
||||
self.get_data(unreader, buf) |
||||
data = buf.getvalue() |
||||
if len(data) > self.max_buffer_headers: |
||||
raise LimitRequestHeaders("max buffer headers") |
||||
else: |
||||
break |
||||
|
||||
if done: |
||||
self.unreader.unread(data[2:]) |
||||
return b"" |
||||
|
||||
self.headers = self.parse_headers(data[:idx]) |
||||
|
||||
ret = data[idx + 4:] |
||||
buf = None |
||||
return ret |
||||
|
||||
def read_line(self, unreader, buf, limit=0): |
||||
data = buf.getvalue() |
||||
|
||||
while True: |
||||
idx = data.find(b"\r\n") |
||||
if idx >= 0: |
||||
# check if the request line is too large |
||||
if idx > limit > 0: |
||||
raise LimitRequestLine(idx, limit) |
||||
break |
||||
elif len(data) - 2 > limit > 0: |
||||
raise LimitRequestLine(len(data), limit) |
||||
self.get_data(unreader, buf) |
||||
data = buf.getvalue() |
||||
|
||||
return (data[:idx], # request line, |
||||
data[idx + 2:]) # residue in the buffer, skip \r\n |
||||
|
||||
def proxy_protocol(self, line): |
||||
"""\ |
||||
Detect, check and parse proxy protocol. |
||||
|
||||
:raises: ForbiddenProxyRequest, InvalidProxyLine. |
||||
:return: True for proxy protocol line else False |
||||
""" |
||||
if not self.cfg.proxy_protocol: |
||||
return False |
||||
|
||||
if self.req_number != 1: |
||||
return False |
||||
|
||||
if not line.startswith("PROXY"): |
||||
return False |
||||
|
||||
self.proxy_protocol_access_check() |
||||
self.parse_proxy_protocol(line) |
||||
|
||||
return True |
||||
|
||||
def proxy_protocol_access_check(self): |
||||
# check in allow list |
||||
if isinstance(self.unreader, SocketUnreader): |
||||
try: |
||||
remote_host = self.unreader.sock.getpeername()[0] |
||||
except socket.error as e: |
||||
if e.args[0] == ENOTCONN: |
||||
raise ForbiddenProxyRequest("UNKNOW") |
||||
raise |
||||
if ("*" not in self.cfg.proxy_allow_ips and |
||||
remote_host not in self.cfg.proxy_allow_ips): |
||||
raise ForbiddenProxyRequest(remote_host) |
||||
|
||||
def parse_proxy_protocol(self, line): |
||||
bits = line.split() |
||||
|
||||
if len(bits) != 6: |
||||
raise InvalidProxyLine(line) |
||||
|
||||
# Extract data |
||||
proto = bits[1] |
||||
s_addr = bits[2] |
||||
d_addr = bits[3] |
||||
|
||||
# Validation |
||||
if proto not in ["TCP4", "TCP6"]: |
||||
raise InvalidProxyLine("protocol '%s' not supported" % proto) |
||||
if proto == "TCP4": |
||||
try: |
||||
socket.inet_pton(socket.AF_INET, s_addr) |
||||
socket.inet_pton(socket.AF_INET, d_addr) |
||||
except socket.error: |
||||
raise InvalidProxyLine(line) |
||||
elif proto == "TCP6": |
||||
try: |
||||
socket.inet_pton(socket.AF_INET6, s_addr) |
||||
socket.inet_pton(socket.AF_INET6, d_addr) |
||||
except socket.error: |
||||
raise InvalidProxyLine(line) |
||||
|
||||
try: |
||||
s_port = int(bits[4]) |
||||
d_port = int(bits[5]) |
||||
except ValueError: |
||||
raise InvalidProxyLine("invalid port %s" % line) |
||||
|
||||
if not ((0 <= s_port <= 65535) and (0 <= d_port <= 65535)): |
||||
raise InvalidProxyLine("invalid port %s" % line) |
||||
|
||||
# Set data |
||||
self.proxy_protocol_info = { |
||||
"proxy_protocol": proto, |
||||
"client_addr": s_addr, |
||||
"client_port": s_port, |
||||
"proxy_addr": d_addr, |
||||
"proxy_port": d_port |
||||
} |
||||
|
||||
def parse_request_line(self, line_bytes): |
||||
bits = [bytes_to_str(bit) for bit in line_bytes.split(None, 2)] |
||||
if len(bits) != 3: |
||||
raise InvalidRequestLine(bytes_to_str(line_bytes)) |
||||
|
||||
# Method |
||||
if not METH_RE.match(bits[0]): |
||||
raise InvalidRequestMethod(bits[0]) |
||||
self.method = bits[0].upper() |
||||
|
||||
# URI |
||||
self.uri = bits[1] |
||||
|
||||
try: |
||||
parts = split_request_uri(self.uri) |
||||
except ValueError: |
||||
raise InvalidRequestLine(bytes_to_str(line_bytes)) |
||||
self.path = parts.path or "" |
||||
self.query = parts.query or "" |
||||
self.fragment = parts.fragment or "" |
||||
|
||||
# Version |
||||
match = VERSION_RE.match(bits[2]) |
||||
if match is None: |
||||
raise InvalidHTTPVersion(bits[2]) |
||||
self.version = (int(match.group(1)), int(match.group(2))) |
||||
|
||||
def set_body_reader(self): |
||||
super(Request, self).set_body_reader() |
||||
if isinstance(self.body.reader, EOFReader): |
||||
self.body = Body(LengthReader(self.unreader, 0)) |
@ -0,0 +1,51 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
from gunicorn.http.message import Request |
||||
from gunicorn.http.unreader import SocketUnreader, IterUnreader |
||||
|
||||
|
||||
class Parser(object): |
||||
|
||||
mesg_class = None |
||||
|
||||
def __init__(self, cfg, source): |
||||
self.cfg = cfg |
||||
if hasattr(source, "recv"): |
||||
self.unreader = SocketUnreader(source) |
||||
else: |
||||
self.unreader = IterUnreader(source) |
||||
self.mesg = None |
||||
|
||||
# request counter (for keepalive connetions) |
||||
self.req_count = 0 |
||||
|
||||
def __iter__(self): |
||||
return self |
||||
|
||||
def __next__(self): |
||||
# Stop if HTTP dictates a stop. |
||||
if self.mesg and self.mesg.should_close(): |
||||
raise StopIteration() |
||||
|
||||
# Discard any unread body of the previous message |
||||
if self.mesg: |
||||
data = self.mesg.body.read(8192) |
||||
while data: |
||||
data = self.mesg.body.read(8192) |
||||
|
||||
# Parse the next request |
||||
self.req_count += 1 |
||||
self.mesg = self.mesg_class(self.cfg, self.unreader, self.req_count) |
||||
if not self.mesg: |
||||
raise StopIteration() |
||||
return self.mesg |
||||
|
||||
next = __next__ |
||||
|
||||
|
||||
class RequestParser(Parser): |
||||
|
||||
mesg_class = Request |
@ -0,0 +1,80 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import os |
||||
|
||||
from gunicorn import six |
||||
|
||||
# Classes that can undo reading data from |
||||
# a given type of data source. |
||||
|
||||
|
||||
class Unreader(object): |
||||
def __init__(self): |
||||
self.buf = six.BytesIO() |
||||
|
||||
def chunk(self): |
||||
raise NotImplementedError() |
||||
|
||||
def read(self, size=None): |
||||
if size is not None and not isinstance(size, six.integer_types): |
||||
raise TypeError("size parameter must be an int or long.") |
||||
|
||||
if size is not None: |
||||
if size == 0: |
||||
return b"" |
||||
if size < 0: |
||||
size = None |
||||
|
||||
self.buf.seek(0, os.SEEK_END) |
||||
|
||||
if size is None and self.buf.tell(): |
||||
ret = self.buf.getvalue() |
||||
self.buf = six.BytesIO() |
||||
return ret |
||||
if size is None: |
||||
d = self.chunk() |
||||
return d |
||||
|
||||
while self.buf.tell() < size: |
||||
chunk = self.chunk() |
||||
if not chunk: |
||||
ret = self.buf.getvalue() |
||||
self.buf = six.BytesIO() |
||||
return ret |
||||
self.buf.write(chunk) |
||||
data = self.buf.getvalue() |
||||
self.buf = six.BytesIO() |
||||
self.buf.write(data[size:]) |
||||
return data[:size] |
||||
|
||||
def unread(self, data): |
||||
self.buf.seek(0, os.SEEK_END) |
||||
self.buf.write(data) |
||||
|
||||
|
||||
class SocketUnreader(Unreader): |
||||
def __init__(self, sock, max_chunk=8192): |
||||
super(SocketUnreader, self).__init__() |
||||
self.sock = sock |
||||
self.mxchunk = max_chunk |
||||
|
||||
def chunk(self): |
||||
return self.sock.recv(self.mxchunk) |
||||
|
||||
|
||||
class IterUnreader(Unreader): |
||||
def __init__(self, iterable): |
||||
super(IterUnreader, self).__init__() |
||||
self.iter = iter(iterable) |
||||
|
||||
def chunk(self): |
||||
if not self.iter: |
||||
return b"" |
||||
try: |
||||
return six.next(self.iter) |
||||
except StopIteration: |
||||
self.iter = None |
||||
return b"" |
@ -0,0 +1,411 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import io |
||||
import logging |
||||
import os |
||||
import re |
||||
import sys |
||||
|
||||
from gunicorn._compat import unquote_to_wsgi_str |
||||
from gunicorn.http.message import HEADER_RE |
||||
from gunicorn.http.errors import InvalidHeader, InvalidHeaderName |
||||
from gunicorn.six import string_types, binary_type, reraise |
||||
from gunicorn import SERVER_SOFTWARE |
||||
import gunicorn.util as util |
||||
|
||||
try: |
||||
# Python 3.3 has os.sendfile(). |
||||
from os import sendfile |
||||
except ImportError: |
||||
try: |
||||
from ._sendfile import sendfile |
||||
except ImportError: |
||||
sendfile = None |
||||
|
||||
# Send files in at most 1GB blocks as some operating systems can have problems |
||||
# with sending files in blocks over 2GB. |
||||
BLKSIZE = 0x3FFFFFFF |
||||
|
||||
HEADER_VALUE_RE = re.compile(r'[\x00-\x1F\x7F]') |
||||
|
||||
log = logging.getLogger(__name__) |
||||
|
||||
|
||||
class FileWrapper(object): |
||||
|
||||
def __init__(self, filelike, blksize=8192): |
||||
self.filelike = filelike |
||||
self.blksize = blksize |
||||
if hasattr(filelike, 'close'): |
||||
self.close = filelike.close |
||||
|
||||
def __getitem__(self, key): |
||||
data = self.filelike.read(self.blksize) |
||||
if data: |
||||
return data |
||||
raise IndexError |
||||
|
||||
|
||||
class WSGIErrorsWrapper(io.RawIOBase): |
||||
|
||||
def __init__(self, cfg): |
||||
# There is no public __init__ method for RawIOBase so |
||||
# we don't need to call super() in the __init__ method. |
||||
# pylint: disable=super-init-not-called |
||||
errorlog = logging.getLogger("gunicorn.error") |
||||
handlers = errorlog.handlers |
||||
self.streams = [] |
||||
|
||||
if cfg.errorlog == "-": |
||||
self.streams.append(sys.stderr) |
||||
handlers = handlers[1:] |
||||
|
||||
for h in handlers: |
||||
if hasattr(h, "stream"): |
||||
self.streams.append(h.stream) |
||||
|
||||
def write(self, data): |
||||
for stream in self.streams: |
||||
try: |
||||
stream.write(data) |
||||
except UnicodeError: |
||||
stream.write(data.encode("UTF-8")) |
||||
stream.flush() |
||||
|
||||
|
||||
def base_environ(cfg): |
||||
return { |
||||
"wsgi.errors": WSGIErrorsWrapper(cfg), |
||||
"wsgi.version": (1, 0), |
||||
"wsgi.multithread": False, |
||||
"wsgi.multiprocess": (cfg.workers > 1), |
||||
"wsgi.run_once": False, |
||||
"wsgi.file_wrapper": FileWrapper, |
||||
"SERVER_SOFTWARE": SERVER_SOFTWARE, |
||||
} |
||||
|
||||
|
||||
def default_environ(req, sock, cfg): |
||||
env = base_environ(cfg) |
||||
env.update({ |
||||
"wsgi.input": req.body, |
||||
"gunicorn.socket": sock, |
||||
"REQUEST_METHOD": req.method, |
||||
"QUERY_STRING": req.query, |
||||
"RAW_URI": req.uri, |
||||
"SERVER_PROTOCOL": "HTTP/%s" % ".".join([str(v) for v in req.version]) |
||||
}) |
||||
return env |
||||
|
||||
|
||||
def proxy_environ(req): |
||||
info = req.proxy_protocol_info |
||||
|
||||
if not info: |
||||
return {} |
||||
|
||||
return { |
||||
"PROXY_PROTOCOL": info["proxy_protocol"], |
||||
"REMOTE_ADDR": info["client_addr"], |
||||
"REMOTE_PORT": str(info["client_port"]), |
||||
"PROXY_ADDR": info["proxy_addr"], |
||||
"PROXY_PORT": str(info["proxy_port"]), |
||||
} |
||||
|
||||
|
||||
def create(req, sock, client, server, cfg): |
||||
resp = Response(req, sock, cfg) |
||||
|
||||
# set initial environ |
||||
environ = default_environ(req, sock, cfg) |
||||
|
||||
# default variables |
||||
host = None |
||||
script_name = os.environ.get("SCRIPT_NAME", "") |
||||
|
||||
# add the headers to the environ |
||||
for hdr_name, hdr_value in req.headers: |
||||
if hdr_name == "EXPECT": |
||||
# handle expect |
||||
if hdr_value.lower() == "100-continue": |
||||
sock.send(b"HTTP/1.1 100 Continue\r\n\r\n") |
||||
elif hdr_name == 'HOST': |
||||
host = hdr_value |
||||
elif hdr_name == "SCRIPT_NAME": |
||||
script_name = hdr_value |
||||
elif hdr_name == "CONTENT-TYPE": |
||||
environ['CONTENT_TYPE'] = hdr_value |
||||
continue |
||||
elif hdr_name == "CONTENT-LENGTH": |
||||
environ['CONTENT_LENGTH'] = hdr_value |
||||
continue |
||||
|
||||
key = 'HTTP_' + hdr_name.replace('-', '_') |
||||
if key in environ: |
||||
hdr_value = "%s,%s" % (environ[key], hdr_value) |
||||
environ[key] = hdr_value |
||||
|
||||
# set the url scheme |
||||
environ['wsgi.url_scheme'] = req.scheme |
||||
|
||||
# set the REMOTE_* keys in environ |
||||
# authors should be aware that REMOTE_HOST and REMOTE_ADDR |
||||
# may not qualify the remote addr: |
||||
# http://www.ietf.org/rfc/rfc3875 |
||||
if isinstance(client, string_types): |
||||
environ['REMOTE_ADDR'] = client |
||||
elif isinstance(client, binary_type): |
||||
environ['REMOTE_ADDR'] = str(client) |
||||
else: |
||||
environ['REMOTE_ADDR'] = client[0] |
||||
environ['REMOTE_PORT'] = str(client[1]) |
||||
|
||||
# handle the SERVER_* |
||||
# Normally only the application should use the Host header but since the |
||||
# WSGI spec doesn't support unix sockets, we are using it to create |
||||
# viable SERVER_* if possible. |
||||
if isinstance(server, string_types): |
||||
server = server.split(":") |
||||
if len(server) == 1: |
||||
# unix socket |
||||
if host: |
||||
server = host.split(':') |
||||
if len(server) == 1: |
||||
if req.scheme == "http": |
||||
server.append(80) |
||||
elif req.scheme == "https": |
||||
server.append(443) |
||||
else: |
||||
server.append('') |
||||
else: |
||||
# no host header given which means that we are not behind a |
||||
# proxy, so append an empty port. |
||||
server.append('') |
||||
environ['SERVER_NAME'] = server[0] |
||||
environ['SERVER_PORT'] = str(server[1]) |
||||
|
||||
# set the path and script name |
||||
path_info = req.path |
||||
if script_name: |
||||
path_info = path_info.split(script_name, 1)[1] |
||||
environ['PATH_INFO'] = unquote_to_wsgi_str(path_info) |
||||
environ['SCRIPT_NAME'] = script_name |
||||
|
||||
# override the environ with the correct remote and server address if |
||||
# we are behind a proxy using the proxy protocol. |
||||
environ.update(proxy_environ(req)) |
||||
return resp, environ |
||||
|
||||
|
||||
class Response(object): |
||||
|
||||
def __init__(self, req, sock, cfg): |
||||
self.req = req |
||||
self.sock = sock |
||||
self.version = SERVER_SOFTWARE |
||||
self.status = None |
||||
self.chunked = False |
||||
self.must_close = False |
||||
self.headers = [] |
||||
self.headers_sent = False |
||||
self.response_length = None |
||||
self.sent = 0 |
||||
self.upgrade = False |
||||
self.cfg = cfg |
||||
|
||||
def force_close(self): |
||||
self.must_close = True |
||||
|
||||
def should_close(self): |
||||
if self.must_close or self.req.should_close(): |
||||
return True |
||||
if self.response_length is not None or self.chunked: |
||||
return False |
||||
if self.req.method == 'HEAD': |
||||
return False |
||||
if self.status_code < 200 or self.status_code in (204, 304): |
||||
return False |
||||
return True |
||||
|
||||
def start_response(self, status, headers, exc_info=None): |
||||
if exc_info: |
||||
try: |
||||
if self.status and self.headers_sent: |
||||
reraise(exc_info[0], exc_info[1], exc_info[2]) |
||||
finally: |
||||
exc_info = None |
||||
elif self.status is not None: |
||||
raise AssertionError("Response headers already set!") |
||||
|
||||
self.status = status |
||||
|
||||
# get the status code from the response here so we can use it to check |
||||
# the need for the connection header later without parsing the string |
||||
# each time. |
||||
try: |
||||
self.status_code = int(self.status.split()[0]) |
||||
except ValueError: |
||||
self.status_code = None |
||||
|
||||
self.process_headers(headers) |
||||
self.chunked = self.is_chunked() |
||||
return self.write |
||||
|
||||
def process_headers(self, headers): |
||||
for name, value in headers: |
||||
if not isinstance(name, string_types): |
||||
raise TypeError('%r is not a string' % name) |
||||
|
||||
if HEADER_RE.search(name): |
||||
raise InvalidHeaderName('%r' % name) |
||||
|
||||
if HEADER_VALUE_RE.search(value): |
||||
raise InvalidHeader('%r' % value) |
||||
|
||||
value = str(value).strip() |
||||
lname = name.lower().strip() |
||||
if lname == "content-length": |
||||
self.response_length = int(value) |
||||
elif util.is_hoppish(name): |
||||
if lname == "connection": |
||||
# handle websocket |
||||
if value.lower().strip() == "upgrade": |
||||
self.upgrade = True |
||||
elif lname == "upgrade": |
||||
if value.lower().strip() == "websocket": |
||||
self.headers.append((name.strip(), value)) |
||||
|
||||
# ignore hopbyhop headers |
||||
continue |
||||
self.headers.append((name.strip(), value)) |
||||
|
||||
def is_chunked(self): |
||||
# Only use chunked responses when the client is |
||||
# speaking HTTP/1.1 or newer and there was |
||||
# no Content-Length header set. |
||||
if self.response_length is not None: |
||||
return False |
||||
elif self.req.version <= (1, 0): |
||||
return False |
||||
elif self.req.method == 'HEAD': |
||||
# Responses to a HEAD request MUST NOT contain a response body. |
||||
return False |
||||
elif self.status_code in (204, 304): |
||||
# Do not use chunked responses when the response is guaranteed to |
||||
# not have a response body. |
||||
return False |
||||
return True |
||||
|
||||
def default_headers(self): |
||||
# set the connection header |
||||
if self.upgrade: |
||||
connection = "upgrade" |
||||
elif self.should_close(): |
||||
connection = "close" |
||||
else: |
||||
connection = "keep-alive" |
||||
|
||||
headers = [ |
||||
"HTTP/%s.%s %s\r\n" % (self.req.version[0], |
||||
self.req.version[1], self.status), |
||||
"Server: %s\r\n" % self.version, |
||||
"Date: %s\r\n" % util.http_date(), |
||||
"Connection: %s\r\n" % connection |
||||
] |
||||
if self.chunked: |
||||
headers.append("Transfer-Encoding: chunked\r\n") |
||||
return headers |
||||
|
||||
def send_headers(self): |
||||
if self.headers_sent: |
||||
return |
||||
tosend = self.default_headers() |
||||
tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers]) |
||||
|
||||
header_str = "%s\r\n" % "".join(tosend) |
||||
util.write(self.sock, util.to_bytestring(header_str, "ascii")) |
||||
self.headers_sent = True |
||||
|
||||
def write(self, arg): |
||||
self.send_headers() |
||||
if not isinstance(arg, binary_type): |
||||
raise TypeError('%r is not a byte' % arg) |
||||
arglen = len(arg) |
||||
tosend = arglen |
||||
if self.response_length is not None: |
||||
if self.sent >= self.response_length: |
||||
# Never write more than self.response_length bytes |
||||
return |
||||
|
||||
tosend = min(self.response_length - self.sent, tosend) |
||||
if tosend < arglen: |
||||
arg = arg[:tosend] |
||||
|
||||
# Sending an empty chunk signals the end of the |
||||
# response and prematurely closes the response |
||||
if self.chunked and tosend == 0: |
||||
return |
||||
|
||||
self.sent += tosend |
||||
util.write(self.sock, arg, self.chunked) |
||||
|
||||
def can_sendfile(self): |
||||
return self.cfg.sendfile is not False and sendfile is not None |
||||
|
||||
def sendfile(self, respiter): |
||||
if self.cfg.is_ssl or not self.can_sendfile(): |
||||
return False |
||||
|
||||
if not util.has_fileno(respiter.filelike): |
||||
return False |
||||
|
||||
fileno = respiter.filelike.fileno() |
||||
try: |
||||
offset = os.lseek(fileno, 0, os.SEEK_CUR) |
||||
if self.response_length is None: |
||||
filesize = os.fstat(fileno).st_size |
||||
|
||||
# The file may be special and sendfile will fail. |
||||
# It may also be zero-length, but that is okay. |
||||
if filesize == 0: |
||||
return False |
||||
|
||||
nbytes = filesize - offset |
||||
else: |
||||
nbytes = self.response_length |
||||
except (OSError, io.UnsupportedOperation): |
||||
return False |
||||
|
||||
self.send_headers() |
||||
|
||||
if self.is_chunked(): |
||||
chunk_size = "%X\r\n" % nbytes |
||||
self.sock.sendall(chunk_size.encode('utf-8')) |
||||
|
||||
sockno = self.sock.fileno() |
||||
sent = 0 |
||||
|
||||
while sent != nbytes: |
||||
count = min(nbytes - sent, BLKSIZE) |
||||
sent += sendfile(sockno, fileno, offset + sent, count) |
||||
|
||||
if self.is_chunked(): |
||||
self.sock.sendall(b"\r\n") |
||||
|
||||
os.lseek(fileno, offset, os.SEEK_SET) |
||||
|
||||
return True |
||||
|
||||
def write_file(self, respiter): |
||||
if not self.sendfile(respiter): |
||||
for item in respiter: |
||||
self.write(item) |
||||
|
||||
def close(self): |
||||
if not self.headers_sent: |
||||
self.send_headers() |
||||
if self.chunked: |
||||
util.write_chunk(self.sock, b"") |
@ -0,0 +1,123 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
"Bare-bones implementation of statsD's protocol, client-side" |
||||
|
||||
import socket |
||||
import logging |
||||
from re import sub |
||||
|
||||
from gunicorn.glogging import Logger |
||||
from gunicorn import six |
||||
|
||||
# Instrumentation constants |
||||
METRIC_VAR = "metric" |
||||
VALUE_VAR = "value" |
||||
MTYPE_VAR = "mtype" |
||||
GAUGE_TYPE = "gauge" |
||||
COUNTER_TYPE = "counter" |
||||
HISTOGRAM_TYPE = "histogram" |
||||
|
||||
class Statsd(Logger): |
||||
"""statsD-based instrumentation, that passes as a logger |
||||
""" |
||||
def __init__(self, cfg): |
||||
"""host, port: statsD server |
||||
""" |
||||
Logger.__init__(self, cfg) |
||||
self.prefix = sub(r"^(.+[^.]+)\.*$", "\\g<1>.", cfg.statsd_prefix) |
||||
try: |
||||
host, port = cfg.statsd_host |
||||
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) |
||||
self.sock.connect((host, int(port))) |
||||
except Exception: |
||||
self.sock = None |
||||
|
||||
# Log errors and warnings |
||||
def critical(self, msg, *args, **kwargs): |
||||
Logger.critical(self, msg, *args, **kwargs) |
||||
self.increment("gunicorn.log.critical", 1) |
||||
|
||||
def error(self, msg, *args, **kwargs): |
||||
Logger.error(self, msg, *args, **kwargs) |
||||
self.increment("gunicorn.log.error", 1) |
||||
|
||||
def warning(self, msg, *args, **kwargs): |
||||
Logger.warning(self, msg, *args, **kwargs) |
||||
self.increment("gunicorn.log.warning", 1) |
||||
|
||||
def exception(self, msg, *args, **kwargs): |
||||
Logger.exception(self, msg, *args, **kwargs) |
||||
self.increment("gunicorn.log.exception", 1) |
||||
|
||||
# Special treatement for info, the most common log level |
||||
def info(self, msg, *args, **kwargs): |
||||
self.log(logging.INFO, msg, *args, **kwargs) |
||||
|
||||
# skip the run-of-the-mill logs |
||||
def debug(self, msg, *args, **kwargs): |
||||
self.log(logging.DEBUG, msg, *args, **kwargs) |
||||
|
||||
def log(self, lvl, msg, *args, **kwargs): |
||||
"""Log a given statistic if metric, value and type are present |
||||
""" |
||||
try: |
||||
extra = kwargs.get("extra", None) |
||||
if extra is not None: |
||||
metric = extra.get(METRIC_VAR, None) |
||||
value = extra.get(VALUE_VAR, None) |
||||
typ = extra.get(MTYPE_VAR, None) |
||||
if metric and value and typ: |
||||
if typ == GAUGE_TYPE: |
||||
self.gauge(metric, value) |
||||
elif typ == COUNTER_TYPE: |
||||
self.increment(metric, value) |
||||
elif typ == HISTOGRAM_TYPE: |
||||
self.histogram(metric, value) |
||||
else: |
||||
pass |
||||
|
||||
# Log to parent logger only if there is something to say |
||||
if msg: |
||||
Logger.log(self, lvl, msg, *args, **kwargs) |
||||
except Exception: |
||||
Logger.warning(self, "Failed to log to statsd", exc_info=True) |
||||
|
||||
# access logging |
||||
def access(self, resp, req, environ, request_time): |
||||
"""Measure request duration |
||||
request_time is a datetime.timedelta |
||||
""" |
||||
Logger.access(self, resp, req, environ, request_time) |
||||
duration_in_ms = request_time.seconds * 1000 + float(request_time.microseconds) / 10 ** 3 |
||||
status = resp.status |
||||
if isinstance(status, str): |
||||
status = int(status.split(None, 1)[0]) |
||||
self.histogram("gunicorn.request.duration", duration_in_ms) |
||||
self.increment("gunicorn.requests", 1) |
||||
self.increment("gunicorn.request.status.%d" % status, 1) |
||||
|
||||
# statsD methods |
||||
# you can use those directly if you want |
||||
def gauge(self, name, value): |
||||
self._sock_send("{0}{1}:{2}|g".format(self.prefix, name, value)) |
||||
|
||||
def increment(self, name, value, sampling_rate=1.0): |
||||
self._sock_send("{0}{1}:{2}|c|@{3}".format(self.prefix, name, value, sampling_rate)) |
||||
|
||||
def decrement(self, name, value, sampling_rate=1.0): |
||||
self._sock_send("{0}{1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate)) |
||||
|
||||
def histogram(self, name, value): |
||||
self._sock_send("{0}{1}:{2}|ms".format(self.prefix, name, value)) |
||||
|
||||
def _sock_send(self, msg): |
||||
try: |
||||
if isinstance(msg, six.text_type): |
||||
msg = msg.encode("ascii") |
||||
if self.sock: |
||||
self.sock.send(msg) |
||||
except Exception: |
||||
Logger.warning(self, "Error sending message to statsd", exc_info=True) |
@ -0,0 +1,86 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import errno |
||||
import os |
||||
import tempfile |
||||
|
||||
|
||||
class Pidfile(object): |
||||
"""\ |
||||
Manage a PID file. If a specific name is provided |
||||
it and '"%s.oldpid" % name' will be used. Otherwise |
||||
we create a temp file using os.mkstemp. |
||||
""" |
||||
|
||||
def __init__(self, fname): |
||||
self.fname = fname |
||||
self.pid = None |
||||
|
||||
def create(self, pid): |
||||
oldpid = self.validate() |
||||
if oldpid: |
||||
if oldpid == os.getpid(): |
||||
return |
||||
msg = "Already running on PID %s (or pid file '%s' is stale)" |
||||
raise RuntimeError(msg % (oldpid, self.fname)) |
||||
|
||||
self.pid = pid |
||||
|
||||
# Write pidfile |
||||
fdir = os.path.dirname(self.fname) |
||||
if fdir and not os.path.isdir(fdir): |
||||
raise RuntimeError("%s doesn't exist. Can't create pidfile." % fdir) |
||||
fd, fname = tempfile.mkstemp(dir=fdir) |
||||
os.write(fd, ("%s\n" % self.pid).encode('utf-8')) |
||||
if self.fname: |
||||
os.rename(fname, self.fname) |
||||
else: |
||||
self.fname = fname |
||||
os.close(fd) |
||||
|
||||
# set permissions to -rw-r--r-- |
||||
os.chmod(self.fname, 420) |
||||
|
||||
def rename(self, path): |
||||
self.unlink() |
||||
self.fname = path |
||||
self.create(self.pid) |
||||
|
||||
def unlink(self): |
||||
""" delete pidfile""" |
||||
try: |
||||
with open(self.fname, "r") as f: |
||||
pid1 = int(f.read() or 0) |
||||
|
||||
if pid1 == self.pid: |
||||
os.unlink(self.fname) |
||||
except: |
||||
pass |
||||
|
||||
def validate(self): |
||||
""" Validate pidfile and make it stale if needed""" |
||||
if not self.fname: |
||||
return |
||||
try: |
||||
with open(self.fname, "r") as f: |
||||
try: |
||||
wpid = int(f.read()) |
||||
except ValueError: |
||||
return |
||||
|
||||
try: |
||||
os.kill(wpid, 0) |
||||
return wpid |
||||
except OSError as e: |
||||
if e.args[0] == errno.EPERM: |
||||
return wpid |
||||
if e.args[0] == errno.ESRCH: |
||||
return |
||||
raise |
||||
except IOError as e: |
||||
if e.args[0] == errno.ENOENT: |
||||
return |
||||
raise |
@ -0,0 +1,130 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import os |
||||
import os.path |
||||
import re |
||||
import sys |
||||
import time |
||||
import threading |
||||
|
||||
|
||||
class Reloader(threading.Thread): |
||||
def __init__(self, extra_files=None, interval=1, callback=None): |
||||
super(Reloader, self).__init__() |
||||
self.setDaemon(True) |
||||
self._extra_files = set(extra_files or ()) |
||||
self._extra_files_lock = threading.RLock() |
||||
self._interval = interval |
||||
self._callback = callback |
||||
|
||||
def add_extra_file(self, filename): |
||||
with self._extra_files_lock: |
||||
self._extra_files.add(filename) |
||||
|
||||
def get_files(self): |
||||
fnames = [ |
||||
re.sub('py[co]$', 'py', module.__file__) |
||||
for module in list(sys.modules.values()) |
||||
if getattr(module, '__file__', None) |
||||
] |
||||
|
||||
with self._extra_files_lock: |
||||
fnames.extend(self._extra_files) |
||||
|
||||
return fnames |
||||
|
||||
def run(self): |
||||
mtimes = {} |
||||
while True: |
||||
for filename in self.get_files(): |
||||
try: |
||||
mtime = os.stat(filename).st_mtime |
||||
except OSError: |
||||
continue |
||||
old_time = mtimes.get(filename) |
||||
if old_time is None: |
||||
mtimes[filename] = mtime |
||||
continue |
||||
elif mtime > old_time: |
||||
if self._callback: |
||||
self._callback(filename) |
||||
time.sleep(self._interval) |
||||
|
||||
has_inotify = False |
||||
if sys.platform.startswith('linux'): |
||||
try: |
||||
from inotify.adapters import Inotify |
||||
import inotify.constants |
||||
has_inotify = True |
||||
except ImportError: |
||||
pass |
||||
|
||||
|
||||
if has_inotify: |
||||
|
||||
class InotifyReloader(threading.Thread): |
||||
event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE |
||||
| inotify.constants.IN_DELETE_SELF | inotify.constants.IN_MODIFY |
||||
| inotify.constants.IN_MOVE_SELF | inotify.constants.IN_MOVED_FROM |
||||
| inotify.constants.IN_MOVED_TO) |
||||
|
||||
def __init__(self, extra_files=None, callback=None): |
||||
super(InotifyReloader, self).__init__() |
||||
self.setDaemon(True) |
||||
self._callback = callback |
||||
self._dirs = set() |
||||
self._watcher = Inotify() |
||||
|
||||
for extra_file in extra_files: |
||||
self.add_extra_file(extra_file) |
||||
|
||||
def add_extra_file(self, filename): |
||||
dirname = os.path.dirname(filename) |
||||
|
||||
if dirname in self._dirs: |
||||
return |
||||
|
||||
self._watcher.add_watch(dirname, mask=self.event_mask) |
||||
self._dirs.add(dirname) |
||||
|
||||
def get_dirs(self): |
||||
fnames = [ |
||||
os.path.dirname(re.sub('py[co]$', 'py', module.__file__)) |
||||
for module in list(sys.modules.values()) |
||||
if hasattr(module, '__file__') |
||||
] |
||||
|
||||
return set(fnames) |
||||
|
||||
def run(self): |
||||
self._dirs = self.get_dirs() |
||||
|
||||
for dirname in self._dirs: |
||||
self._watcher.add_watch(dirname, mask=self.event_mask) |
||||
|
||||
for event in self._watcher.event_gen(): |
||||
if event is None: |
||||
continue |
||||
|
||||
filename = event[3] |
||||
|
||||
self._callback(filename) |
||||
|
||||
else: |
||||
|
||||
class InotifyReloader(object): |
||||
def __init__(self, callback=None): |
||||
raise ImportError('You must have the inotify module installed to ' |
||||
'use the inotify reloader') |
||||
|
||||
|
||||
preferred_reloader = InotifyReloader if has_inotify else Reloader |
||||
|
||||
reloader_engines = { |
||||
'auto': preferred_reloader, |
||||
'poll': Reloader, |
||||
'inotify': InotifyReloader, |
||||
} |
@ -0,0 +1,592 @@ |
||||
"""Selectors module. |
||||
|
||||
This module allows high-level and efficient I/O multiplexing, built upon the |
||||
`select` module primitives. |
||||
|
||||
The following code adapted from trollius.selectors. |
||||
""" |
||||
|
||||
|
||||
from abc import ABCMeta, abstractmethod |
||||
from collections import namedtuple, Mapping |
||||
import math |
||||
import select |
||||
import sys |
||||
|
||||
from gunicorn._compat import wrap_error, InterruptedError |
||||
from gunicorn import six |
||||
|
||||
|
||||
# generic events, that must be mapped to implementation-specific ones |
||||
EVENT_READ = (1 << 0) |
||||
EVENT_WRITE = (1 << 1) |
||||
|
||||
|
||||
def _fileobj_to_fd(fileobj): |
||||
"""Return a file descriptor from a file object. |
||||
|
||||
Parameters: |
||||
fileobj -- file object or file descriptor |
||||
|
||||
Returns: |
||||
corresponding file descriptor |
||||
|
||||
Raises: |
||||
ValueError if the object is invalid |
||||
""" |
||||
if isinstance(fileobj, six.integer_types): |
||||
fd = fileobj |
||||
else: |
||||
try: |
||||
fd = int(fileobj.fileno()) |
||||
except (AttributeError, TypeError, ValueError): |
||||
raise ValueError("Invalid file object: " |
||||
"{0!r}".format(fileobj)) |
||||
if fd < 0: |
||||
raise ValueError("Invalid file descriptor: {0}".format(fd)) |
||||
return fd |
||||
|
||||
|
||||
SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) |
||||
"""Object used to associate a file object to its backing file descriptor, |
||||
selected event mask and attached data.""" |
||||
|
||||
|
||||
class _SelectorMapping(Mapping): |
||||
"""Mapping of file objects to selector keys.""" |
||||
|
||||
def __init__(self, selector): |
||||
self._selector = selector |
||||
|
||||
def __len__(self): |
||||
return len(self._selector._fd_to_key) |
||||
|
||||
def __getitem__(self, fileobj): |
||||
try: |
||||
fd = self._selector._fileobj_lookup(fileobj) |
||||
return self._selector._fd_to_key[fd] |
||||
except KeyError: |
||||
raise KeyError("{0!r} is not registered".format(fileobj)) |
||||
|
||||
def __iter__(self): |
||||
return iter(self._selector._fd_to_key) |
||||
|
||||
|
||||
class BaseSelector(six.with_metaclass(ABCMeta)): |
||||
"""Selector abstract base class. |
||||
|
||||
A selector supports registering file objects to be monitored for specific |
||||
I/O events. |
||||
|
||||
A file object is a file descriptor or any object with a `fileno()` method. |
||||
An arbitrary object can be attached to the file object, which can be used |
||||
for example to store context information, a callback, etc. |
||||
|
||||
A selector can use various implementations (select(), poll(), epoll()...) |
||||
depending on the platform. The default `Selector` class uses the most |
||||
efficient implementation on the current platform. |
||||
""" |
||||
|
||||
@abstractmethod |
||||
def register(self, fileobj, events, data=None): |
||||
"""Register a file object. |
||||
|
||||
Parameters: |
||||
fileobj -- file object or file descriptor |
||||
events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) |
||||
data -- attached data |
||||
|
||||
Returns: |
||||
SelectorKey instance |
||||
|
||||
Raises: |
||||
ValueError if events is invalid |
||||
KeyError if fileobj is already registered |
||||
OSError if fileobj is closed or otherwise is unacceptable to |
||||
the underlying system call (if a system call is made) |
||||
|
||||
Note: |
||||
OSError may or may not be raised |
||||
""" |
||||
raise NotImplementedError |
||||
|
||||
@abstractmethod |
||||
def unregister(self, fileobj): |
||||
"""Unregister a file object. |
||||
|
||||
Parameters: |
||||
fileobj -- file object or file descriptor |
||||
|
||||
Returns: |
||||
SelectorKey instance |
||||
|
||||
Raises: |
||||
KeyError if fileobj is not registered |
||||
|
||||
Note: |
||||
If fileobj is registered but has since been closed this does |
||||
*not* raise OSError (even if the wrapped syscall does) |
||||
""" |
||||
raise NotImplementedError |
||||
|
||||
def modify(self, fileobj, events, data=None): |
||||
"""Change a registered file object monitored events or attached data. |
||||
|
||||
Parameters: |
||||
fileobj -- file object or file descriptor |
||||
events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) |
||||
data -- attached data |
||||
|
||||
Returns: |
||||
SelectorKey instance |
||||
|
||||
Raises: |
||||
Anything that unregister() or register() raises |
||||
""" |
||||
self.unregister(fileobj) |
||||
return self.register(fileobj, events, data) |
||||
|
||||
@abstractmethod |
||||
def select(self, timeout=None): |
||||
"""Perform the actual selection, until some monitored file objects are |
||||
ready or a timeout expires. |
||||
|
||||
Parameters: |
||||
timeout -- if timeout > 0, this specifies the maximum wait time, in |
||||
seconds |
||||
if timeout <= 0, the select() call won't block, and will |
||||
report the currently ready file objects |
||||
if timeout is None, select() will block until a monitored |
||||
file object becomes ready |
||||
|
||||
Returns: |
||||
list of (key, events) for ready file objects |
||||
`events` is a bitwise mask of EVENT_READ|EVENT_WRITE |
||||
""" |
||||
raise NotImplementedError |
||||
|
||||
def close(self): |
||||
"""Close the selector. |
||||
|
||||
This must be called to make sure that any underlying resource is freed. |
||||
""" |
||||
pass |
||||
|
||||
def get_key(self, fileobj): |
||||
"""Return the key associated to a registered file object. |
||||
|
||||
Returns: |
||||
SelectorKey for this file object |
||||
""" |
||||
mapping = self.get_map() |
||||
try: |
||||
return mapping[fileobj] |
||||
except KeyError: |
||||
raise KeyError("{0!r} is not registered".format(fileobj)) |
||||
|
||||
@abstractmethod |
||||
def get_map(self): |
||||
"""Return a mapping of file objects to selector keys.""" |
||||
raise NotImplementedError |
||||
|
||||
def __enter__(self): |
||||
return self |
||||
|
||||
def __exit__(self, *args): |
||||
self.close() |
||||
|
||||
|
||||
class _BaseSelectorImpl(BaseSelector): |
||||
"""Base selector implementation.""" |
||||
|
||||
def __init__(self): |
||||
# this maps file descriptors to keys |
||||
self._fd_to_key = {} |
||||
# read-only mapping returned by get_map() |
||||
self._map = _SelectorMapping(self) |
||||
|
||||
def _fileobj_lookup(self, fileobj): |
||||
"""Return a file descriptor from a file object. |
||||
|
||||
This wraps _fileobj_to_fd() to do an exhaustive search in case |
||||
the object is invalid but we still have it in our map. This |
||||
is used by unregister() so we can unregister an object that |
||||
was previously registered even if it is closed. It is also |
||||
used by _SelectorMapping. |
||||
""" |
||||
try: |
||||
return _fileobj_to_fd(fileobj) |
||||
except ValueError: |
||||
# Do an exhaustive search. |
||||
for key in self._fd_to_key.values(): |
||||
if key.fileobj is fileobj: |
||||
return key.fd |
||||
# Raise ValueError after all. |
||||
raise |
||||
|
||||
def register(self, fileobj, events, data=None): |
||||
if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): |
||||
raise ValueError("Invalid events: {0!r}".format(events)) |
||||
|
||||
key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) |
||||
|
||||
if key.fd in self._fd_to_key: |
||||
raise KeyError("{0!r} (FD {1}) is already registered" |
||||
.format(fileobj, key.fd)) |
||||
|
||||
self._fd_to_key[key.fd] = key |
||||
return key |
||||
|
||||
def unregister(self, fileobj): |
||||
try: |
||||
key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) |
||||
except KeyError: |
||||
raise KeyError("{0!r} is not registered".format(fileobj)) |
||||
return key |
||||
|
||||
def modify(self, fileobj, events, data=None): |
||||
# TODO: Subclasses can probably optimize this even further. |
||||
try: |
||||
key = self._fd_to_key[self._fileobj_lookup(fileobj)] |
||||
except KeyError: |
||||
raise KeyError("{0!r} is not registered".format(fileobj)) |
||||
if events != key.events: |
||||
self.unregister(fileobj) |
||||
key = self.register(fileobj, events, data) |
||||
elif data != key.data: |
||||
# Use a shortcut to update the data. |
||||
key = key._replace(data=data) |
||||
self._fd_to_key[key.fd] = key |
||||
return key |
||||
|
||||
def close(self): |
||||
self._fd_to_key.clear() |
||||
|
||||
def get_map(self): |
||||
return self._map |
||||
|
||||
def _key_from_fd(self, fd): |
||||
"""Return the key associated to a given file descriptor. |
||||
|
||||
Parameters: |
||||
fd -- file descriptor |
||||
|
||||
Returns: |
||||
corresponding key, or None if not found |
||||
""" |
||||
try: |
||||
return self._fd_to_key[fd] |
||||
except KeyError: |
||||
return None |
||||
|
||||
|
||||
class SelectSelector(_BaseSelectorImpl): |
||||
"""Select-based selector.""" |
||||
|
||||
def __init__(self): |
||||
super(SelectSelector, self).__init__() |
||||
self._readers = set() |
||||
self._writers = set() |
||||
|
||||
def register(self, fileobj, events, data=None): |
||||
key = super(SelectSelector, self).register(fileobj, events, data) |
||||
if events & EVENT_READ: |
||||
self._readers.add(key.fd) |
||||
if events & EVENT_WRITE: |
||||
self._writers.add(key.fd) |
||||
return key |
||||
|
||||
def unregister(self, fileobj): |
||||
key = super(SelectSelector, self).unregister(fileobj) |
||||
self._readers.discard(key.fd) |
||||
self._writers.discard(key.fd) |
||||
return key |
||||
|
||||
if sys.platform == 'win32': |
||||
def _select(self, r, w, _, timeout=None): |
||||
r, w, x = select.select(r, w, w, timeout) |
||||
return r, w + x, [] |
||||
else: |
||||
_select = select.select |
||||
|
||||
def select(self, timeout=None): |
||||
timeout = None if timeout is None else max(timeout, 0) |
||||
ready = [] |
||||
try: |
||||
r, w, _ = wrap_error(self._select, |
||||
self._readers, self._writers, [], timeout) |
||||
except InterruptedError: |
||||
return ready |
||||
r = set(r) |
||||
w = set(w) |
||||
for fd in r | w: |
||||
events = 0 |
||||
if fd in r: |
||||
events |= EVENT_READ |
||||
if fd in w: |
||||
events |= EVENT_WRITE |
||||
|
||||
key = self._key_from_fd(fd) |
||||
if key: |
||||
ready.append((key, events & key.events)) |
||||
return ready |
||||
|
||||
|
||||
if hasattr(select, 'poll'): |
||||
|
||||
class PollSelector(_BaseSelectorImpl): |
||||
"""Poll-based selector.""" |
||||
|
||||
def __init__(self): |
||||
super(PollSelector, self).__init__() |
||||
self._poll = select.poll() |
||||
|
||||
def register(self, fileobj, events, data=None): |
||||
key = super(PollSelector, self).register(fileobj, events, data) |
||||
poll_events = 0 |
||||
if events & EVENT_READ: |
||||
poll_events |= select.POLLIN |
||||
if events & EVENT_WRITE: |
||||
poll_events |= select.POLLOUT |
||||
self._poll.register(key.fd, poll_events) |
||||
return key |
||||
|
||||
def unregister(self, fileobj): |
||||
key = super(PollSelector, self).unregister(fileobj) |
||||
self._poll.unregister(key.fd) |
||||
return key |
||||
|
||||
def select(self, timeout=None): |
||||
if timeout is None: |
||||
timeout = None |
||||
elif timeout <= 0: |
||||
timeout = 0 |
||||
else: |
||||
# poll() has a resolution of 1 millisecond, round away from |
||||
# zero to wait *at least* timeout seconds. |
||||
timeout = int(math.ceil(timeout * 1e3)) |
||||
ready = [] |
||||
try: |
||||
fd_event_list = wrap_error(self._poll.poll, timeout) |
||||
except InterruptedError: |
||||
return ready |
||||
for fd, event in fd_event_list: |
||||
events = 0 |
||||
if event & ~select.POLLIN: |
||||
events |= EVENT_WRITE |
||||
if event & ~select.POLLOUT: |
||||
events |= EVENT_READ |
||||
|
||||
key = self._key_from_fd(fd) |
||||
if key: |
||||
ready.append((key, events & key.events)) |
||||
return ready |
||||
|
||||
|
||||
if hasattr(select, 'epoll'): |
||||
|
||||
class EpollSelector(_BaseSelectorImpl): |
||||
"""Epoll-based selector.""" |
||||
|
||||
def __init__(self): |
||||
super(EpollSelector, self).__init__() |
||||
self._epoll = select.epoll() |
||||
|
||||
def fileno(self): |
||||
return self._epoll.fileno() |
||||
|
||||
def register(self, fileobj, events, data=None): |
||||
key = super(EpollSelector, self).register(fileobj, events, data) |
||||
epoll_events = 0 |
||||
if events & EVENT_READ: |
||||
epoll_events |= select.EPOLLIN |
||||
if events & EVENT_WRITE: |
||||
epoll_events |= select.EPOLLOUT |
||||
self._epoll.register(key.fd, epoll_events) |
||||
return key |
||||
|
||||
def unregister(self, fileobj): |
||||
key = super(EpollSelector, self).unregister(fileobj) |
||||
try: |
||||
self._epoll.unregister(key.fd) |
||||
except OSError: |
||||
# This can happen if the FD was closed since it |
||||
# was registered. |
||||
pass |
||||
return key |
||||
|
||||
def select(self, timeout=None): |
||||
if timeout is None: |
||||
timeout = -1 |
||||
elif timeout <= 0: |
||||
timeout = 0 |
||||
else: |
||||
# epoll_wait() has a resolution of 1 millisecond, round away |
||||
# from zero to wait *at least* timeout seconds. |
||||
timeout = math.ceil(timeout * 1e3) * 1e-3 |
||||
max_ev = len(self._fd_to_key) |
||||
ready = [] |
||||
try: |
||||
fd_event_list = wrap_error(self._epoll.poll, timeout, max_ev) |
||||
except InterruptedError: |
||||
return ready |
||||
for fd, event in fd_event_list: |
||||
events = 0 |
||||
if event & ~select.EPOLLIN: |
||||
events |= EVENT_WRITE |
||||
if event & ~select.EPOLLOUT: |
||||
events |= EVENT_READ |
||||
|
||||
key = self._key_from_fd(fd) |
||||
if key: |
||||
ready.append((key, events & key.events)) |
||||
return ready |
||||
|
||||
def close(self): |
||||
self._epoll.close() |
||||
super(EpollSelector, self).close() |
||||
|
||||
|
||||
if hasattr(select, 'devpoll'): |
||||
|
||||
class DevpollSelector(_BaseSelectorImpl): |
||||
"""Solaris /dev/poll selector.""" |
||||
|
||||
def __init__(self): |
||||
super(DevpollSelector, self).__init__() |
||||
self._devpoll = select.devpoll() |
||||
|
||||
def fileno(self): |
||||
return self._devpoll.fileno() |
||||
|
||||
def register(self, fileobj, events, data=None): |
||||
key = super(DevpollSelector, self).register(fileobj, events, data) |
||||
poll_events = 0 |
||||
if events & EVENT_READ: |
||||
poll_events |= select.POLLIN |
||||
if events & EVENT_WRITE: |
||||
poll_events |= select.POLLOUT |
||||
self._devpoll.register(key.fd, poll_events) |
||||
return key |
||||
|
||||
def unregister(self, fileobj): |
||||
key = super(DevpollSelector, self).unregister(fileobj) |
||||
self._devpoll.unregister(key.fd) |
||||
return key |
||||
|
||||
def select(self, timeout=None): |
||||
if timeout is None: |
||||
timeout = None |
||||
elif timeout <= 0: |
||||
timeout = 0 |
||||
else: |
||||
# devpoll() has a resolution of 1 millisecond, round away from |
||||
# zero to wait *at least* timeout seconds. |
||||
timeout = math.ceil(timeout * 1e3) |
||||
ready = [] |
||||
try: |
||||
fd_event_list = self._devpoll.poll(timeout) |
||||
except InterruptedError: |
||||
return ready |
||||
for fd, event in fd_event_list: |
||||
events = 0 |
||||
if event & ~select.POLLIN: |
||||
events |= EVENT_WRITE |
||||
if event & ~select.POLLOUT: |
||||
events |= EVENT_READ |
||||
|
||||
key = self._key_from_fd(fd) |
||||
if key: |
||||
ready.append((key, events & key.events)) |
||||
return ready |
||||
|
||||
def close(self): |
||||
self._devpoll.close() |
||||
super(DevpollSelector, self).close() |
||||
|
||||
|
||||
if hasattr(select, 'kqueue'): |
||||
|
||||
class KqueueSelector(_BaseSelectorImpl): |
||||
"""Kqueue-based selector.""" |
||||
|
||||
def __init__(self): |
||||
super(KqueueSelector, self).__init__() |
||||
self._kqueue = select.kqueue() |
||||
|
||||
def fileno(self): |
||||
return self._kqueue.fileno() |
||||
|
||||
def register(self, fileobj, events, data=None): |
||||
key = super(KqueueSelector, self).register(fileobj, events, data) |
||||
if events & EVENT_READ: |
||||
kev = select.kevent(key.fd, select.KQ_FILTER_READ, |
||||
select.KQ_EV_ADD) |
||||
self._kqueue.control([kev], 0, 0) |
||||
if events & EVENT_WRITE: |
||||
kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, |
||||
select.KQ_EV_ADD) |
||||
self._kqueue.control([kev], 0, 0) |
||||
return key |
||||
|
||||
def unregister(self, fileobj): |
||||
key = super(KqueueSelector, self).unregister(fileobj) |
||||
if key.events & EVENT_READ: |
||||
kev = select.kevent(key.fd, select.KQ_FILTER_READ, |
||||
select.KQ_EV_DELETE) |
||||
try: |
||||
self._kqueue.control([kev], 0, 0) |
||||
except OSError: |
||||
# This can happen if the FD was closed since it |
||||
# was registered. |
||||
pass |
||||
if key.events & EVENT_WRITE: |
||||
kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, |
||||
select.KQ_EV_DELETE) |
||||
try: |
||||
self._kqueue.control([kev], 0, 0) |
||||
except OSError: |
||||
# See comment above. |
||||
pass |
||||
return key |
||||
|
||||
def select(self, timeout=None): |
||||
timeout = None if timeout is None else max(timeout, 0) |
||||
max_ev = len(self._fd_to_key) |
||||
ready = [] |
||||
try: |
||||
kev_list = wrap_error(self._kqueue.control, |
||||
None, max_ev, timeout) |
||||
except InterruptedError: |
||||
return ready |
||||
for kev in kev_list: |
||||
fd = kev.ident |
||||
flag = kev.filter |
||||
events = 0 |
||||
if flag == select.KQ_FILTER_READ: |
||||
events |= EVENT_READ |
||||
if flag == select.KQ_FILTER_WRITE: |
||||
events |= EVENT_WRITE |
||||
|
||||
key = self._key_from_fd(fd) |
||||
if key: |
||||
ready.append((key, events & key.events)) |
||||
return ready |
||||
|
||||
def close(self): |
||||
self._kqueue.close() |
||||
super(KqueueSelector, self).close() |
||||
|
||||
|
||||
# Choose the best implementation: roughly, epoll|kqueue|devpoll > poll > select. |
||||
# select() also can't accept a FD > FD_SETSIZE (usually around 1024) |
||||
if 'KqueueSelector' in globals(): |
||||
DefaultSelector = KqueueSelector |
||||
elif 'EpollSelector' in globals(): |
||||
DefaultSelector = EpollSelector |
||||
elif 'DevpollSelector' in globals(): |
||||
DefaultSelector = DevpollSelector |
||||
elif 'PollSelector' in globals(): |
||||
DefaultSelector = PollSelector |
||||
else: |
||||
DefaultSelector = SelectSelector |
@ -0,0 +1,762 @@ |
||||
"""Utilities for writing code that runs on Python 2 and 3""" |
||||
|
||||
# Copyright (c) 2010-2014 Benjamin Peterson |
||||
# |
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
||||
# of this software and associated documentation files (the "Software"), to deal |
||||
# in the Software without restriction, including without limitation the rights |
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
||||
# copies of the Software, and to permit persons to whom the Software is |
||||
# furnished to do so, subject to the following conditions: |
||||
# |
||||
# The above copyright notice and this permission notice shall be included in all |
||||
# copies or substantial portions of the Software. |
||||
# |
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
||||
# SOFTWARE. |
||||
|
||||
from __future__ import absolute_import |
||||
|
||||
import functools |
||||
import operator |
||||
import sys |
||||
import types |
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>" |
||||
__version__ = "1.8.0" |
||||
|
||||
|
||||
# Useful for very coarse version differentiation. |
||||
PY2 = sys.version_info[0] == 2 |
||||
PY3 = sys.version_info[0] == 3 |
||||
|
||||
if PY3: |
||||
string_types = str, |
||||
integer_types = int, |
||||
class_types = type, |
||||
text_type = str |
||||
binary_type = bytes |
||||
|
||||
MAXSIZE = sys.maxsize |
||||
else: |
||||
string_types = basestring, |
||||
integer_types = (int, long) |
||||
class_types = (type, types.ClassType) |
||||
text_type = unicode |
||||
binary_type = str |
||||
|
||||
if sys.platform.startswith("java"): |
||||
# Jython always uses 32 bits. |
||||
MAXSIZE = int((1 << 31) - 1) |
||||
else: |
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t). |
||||
class X(object): |
||||
def __len__(self): |
||||
return 1 << 31 |
||||
try: |
||||
len(X()) |
||||
except OverflowError: |
||||
# 32-bit |
||||
MAXSIZE = int((1 << 31) - 1) |
||||
else: |
||||
# 64-bit |
||||
MAXSIZE = int((1 << 63) - 1) |
||||
del X |
||||
|
||||
|
||||
def _add_doc(func, doc): |
||||
"""Add documentation to a function.""" |
||||
func.__doc__ = doc |
||||
|
||||
|
||||
def _import_module(name): |
||||
"""Import module, returning the module after the last dot.""" |
||||
__import__(name) |
||||
return sys.modules[name] |
||||
|
||||
|
||||
class _LazyDescr(object): |
||||
|
||||
def __init__(self, name): |
||||
self.name = name |
||||
|
||||
def __get__(self, obj, tp): |
||||
result = self._resolve() |
||||
setattr(obj, self.name, result) # Invokes __set__. |
||||
# This is a bit ugly, but it avoids running this again. |
||||
delattr(obj.__class__, self.name) |
||||
return result |
||||
|
||||
|
||||
class MovedModule(_LazyDescr): |
||||
|
||||
def __init__(self, name, old, new=None): |
||||
super(MovedModule, self).__init__(name) |
||||
if PY3: |
||||
if new is None: |
||||
new = name |
||||
self.mod = new |
||||
else: |
||||
self.mod = old |
||||
|
||||
def _resolve(self): |
||||
return _import_module(self.mod) |
||||
|
||||
def __getattr__(self, attr): |
||||
_module = self._resolve() |
||||
value = getattr(_module, attr) |
||||
setattr(self, attr, value) |
||||
return value |
||||
|
||||
|
||||
class _LazyModule(types.ModuleType): |
||||
|
||||
def __init__(self, name): |
||||
super(_LazyModule, self).__init__(name) |
||||
self.__doc__ = self.__class__.__doc__ |
||||
|
||||
def __dir__(self): |
||||
attrs = ["__doc__", "__name__"] |
||||
attrs += [attr.name for attr in self._moved_attributes] |
||||
return attrs |
||||
|
||||
# Subclasses should override this |
||||
_moved_attributes = [] |
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr): |
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): |
||||
super(MovedAttribute, self).__init__(name) |
||||
if PY3: |
||||
if new_mod is None: |
||||
new_mod = name |
||||
self.mod = new_mod |
||||
if new_attr is None: |
||||
if old_attr is None: |
||||
new_attr = name |
||||
else: |
||||
new_attr = old_attr |
||||
self.attr = new_attr |
||||
else: |
||||
self.mod = old_mod |
||||
if old_attr is None: |
||||
old_attr = name |
||||
self.attr = old_attr |
||||
|
||||
def _resolve(self): |
||||
module = _import_module(self.mod) |
||||
return getattr(module, self.attr) |
||||
|
||||
|
||||
class _SixMetaPathImporter(object): |
||||
""" |
||||
A meta path importer to import six.moves and its submodules. |
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible |
||||
with Python 2.5 and all existing versions of Python3 |
||||
""" |
||||
def __init__(self, six_module_name): |
||||
self.name = six_module_name |
||||
self.known_modules = {} |
||||
|
||||
def _add_module(self, mod, *fullnames): |
||||
for fullname in fullnames: |
||||
self.known_modules[self.name + "." + fullname] = mod |
||||
|
||||
def _get_module(self, fullname): |
||||
return self.known_modules[self.name + "." + fullname] |
||||
|
||||
def find_module(self, fullname, path=None): |
||||
if fullname in self.known_modules: |
||||
return self |
||||
return None |
||||
|
||||
def __get_module(self, fullname): |
||||
try: |
||||
return self.known_modules[fullname] |
||||
except KeyError: |
||||
raise ImportError("This loader does not know module " + fullname) |
||||
|
||||
def load_module(self, fullname): |
||||
try: |
||||
# in case of a reload |
||||
return sys.modules[fullname] |
||||
except KeyError: |
||||
pass |
||||
mod = self.__get_module(fullname) |
||||
if isinstance(mod, MovedModule): |
||||
mod = mod._resolve() |
||||
else: |
||||
mod.__loader__ = self |
||||
sys.modules[fullname] = mod |
||||
return mod |
||||
|
||||
def is_package(self, fullname): |
||||
""" |
||||
Return true, if the named module is a package. |
||||
|
||||
We need this method to get correct spec objects with |
||||
Python 3.4 (see PEP451) |
||||
""" |
||||
return hasattr(self.__get_module(fullname), "__path__") |
||||
|
||||
def get_code(self, fullname): |
||||
"""Return None |
||||
|
||||
Required, if is_package is implemented""" |
||||
self.__get_module(fullname) # eventually raises ImportError |
||||
return None |
||||
get_source = get_code # same as get_code |
||||
|
||||
_importer = _SixMetaPathImporter(__name__) |
||||
|
||||
|
||||
class _MovedItems(_LazyModule): |
||||
"""Lazy loading of moved objects""" |
||||
__path__ = [] # mark as package |
||||
|
||||
|
||||
_moved_attributes = [ |
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), |
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), |
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), |
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), |
||||
MovedAttribute("intern", "__builtin__", "sys"), |
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"), |
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), |
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"), |
||||
MovedAttribute("reduce", "__builtin__", "functools"), |
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), |
||||
MovedAttribute("StringIO", "StringIO", "io"), |
||||
MovedAttribute("UserDict", "UserDict", "collections"), |
||||
MovedAttribute("UserList", "UserList", "collections"), |
||||
MovedAttribute("UserString", "UserString", "collections"), |
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), |
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), |
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), |
||||
|
||||
MovedModule("builtins", "__builtin__"), |
||||
MovedModule("configparser", "ConfigParser"), |
||||
MovedModule("copyreg", "copy_reg"), |
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), |
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), |
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), |
||||
MovedModule("http_cookies", "Cookie", "http.cookies"), |
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"), |
||||
MovedModule("html_parser", "HTMLParser", "html.parser"), |
||||
MovedModule("http_client", "httplib", "http.client"), |
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), |
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), |
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), |
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), |
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), |
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), |
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), |
||||
MovedModule("cPickle", "cPickle", "pickle"), |
||||
MovedModule("queue", "Queue"), |
||||
MovedModule("reprlib", "repr"), |
||||
MovedModule("socketserver", "SocketServer"), |
||||
MovedModule("_thread", "thread", "_thread"), |
||||
MovedModule("tkinter", "Tkinter"), |
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), |
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), |
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), |
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), |
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"), |
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), |
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), |
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), |
||||
MovedModule("tkinter_colorchooser", "tkColorChooser", |
||||
"tkinter.colorchooser"), |
||||
MovedModule("tkinter_commondialog", "tkCommonDialog", |
||||
"tkinter.commondialog"), |
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), |
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"), |
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), |
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", |
||||
"tkinter.simpledialog"), |
||||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), |
||||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), |
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), |
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), |
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), |
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), |
||||
MovedModule("winreg", "_winreg"), |
||||
] |
||||
for attr in _moved_attributes: |
||||
setattr(_MovedItems, attr.name, attr) |
||||
if isinstance(attr, MovedModule): |
||||
_importer._add_module(attr, "moves." + attr.name) |
||||
del attr |
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes |
||||
|
||||
moves = _MovedItems(__name__ + ".moves") |
||||
_importer._add_module(moves, "moves") |
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule): |
||||
"""Lazy loading of moved objects in six.moves.urllib_parse""" |
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [ |
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"), |
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"), |
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"), |
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), |
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"), |
||||
MovedAttribute("urljoin", "urlparse", "urllib.parse"), |
||||
MovedAttribute("urlparse", "urlparse", "urllib.parse"), |
||||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"), |
||||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"), |
||||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), |
||||
MovedAttribute("quote", "urllib", "urllib.parse"), |
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"), |
||||
MovedAttribute("unquote", "urllib", "urllib.parse"), |
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"), |
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"), |
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"), |
||||
MovedAttribute("splittag", "urllib", "urllib.parse"), |
||||
MovedAttribute("splituser", "urllib", "urllib.parse"), |
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), |
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), |
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"), |
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"), |
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"), |
||||
] |
||||
for attr in _urllib_parse_moved_attributes: |
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr) |
||||
del attr |
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes |
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), |
||||
"moves.urllib_parse", "moves.urllib.parse") |
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule): |
||||
"""Lazy loading of moved objects in six.moves.urllib_error""" |
||||
|
||||
|
||||
_urllib_error_moved_attributes = [ |
||||
MovedAttribute("URLError", "urllib2", "urllib.error"), |
||||
MovedAttribute("HTTPError", "urllib2", "urllib.error"), |
||||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), |
||||
] |
||||
for attr in _urllib_error_moved_attributes: |
||||
setattr(Module_six_moves_urllib_error, attr.name, attr) |
||||
del attr |
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes |
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), |
||||
"moves.urllib_error", "moves.urllib.error") |
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule): |
||||
"""Lazy loading of moved objects in six.moves.urllib_request""" |
||||
|
||||
|
||||
_urllib_request_moved_attributes = [ |
||||
MovedAttribute("urlopen", "urllib2", "urllib.request"), |
||||
MovedAttribute("install_opener", "urllib2", "urllib.request"), |
||||
MovedAttribute("build_opener", "urllib2", "urllib.request"), |
||||
MovedAttribute("pathname2url", "urllib", "urllib.request"), |
||||
MovedAttribute("url2pathname", "urllib", "urllib.request"), |
||||
MovedAttribute("getproxies", "urllib", "urllib.request"), |
||||
MovedAttribute("Request", "urllib2", "urllib.request"), |
||||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), |
||||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), |
||||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("FileHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), |
||||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), |
||||
MovedAttribute("urlretrieve", "urllib", "urllib.request"), |
||||
MovedAttribute("urlcleanup", "urllib", "urllib.request"), |
||||
MovedAttribute("URLopener", "urllib", "urllib.request"), |
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"), |
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"), |
||||
] |
||||
for attr in _urllib_request_moved_attributes: |
||||
setattr(Module_six_moves_urllib_request, attr.name, attr) |
||||
del attr |
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes |
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), |
||||
"moves.urllib_request", "moves.urllib.request") |
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule): |
||||
"""Lazy loading of moved objects in six.moves.urllib_response""" |
||||
|
||||
|
||||
_urllib_response_moved_attributes = [ |
||||
MovedAttribute("addbase", "urllib", "urllib.response"), |
||||
MovedAttribute("addclosehook", "urllib", "urllib.response"), |
||||
MovedAttribute("addinfo", "urllib", "urllib.response"), |
||||
MovedAttribute("addinfourl", "urllib", "urllib.response"), |
||||
] |
||||
for attr in _urllib_response_moved_attributes: |
||||
setattr(Module_six_moves_urllib_response, attr.name, attr) |
||||
del attr |
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes |
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), |
||||
"moves.urllib_response", "moves.urllib.response") |
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule): |
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser""" |
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [ |
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), |
||||
] |
||||
for attr in _urllib_robotparser_moved_attributes: |
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr) |
||||
del attr |
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes |
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), |
||||
"moves.urllib_robotparser", "moves.urllib.robotparser") |
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType): |
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace""" |
||||
__path__ = [] # mark as package |
||||
parse = _importer._get_module("moves.urllib_parse") |
||||
error = _importer._get_module("moves.urllib_error") |
||||
request = _importer._get_module("moves.urllib_request") |
||||
response = _importer._get_module("moves.urllib_response") |
||||
robotparser = _importer._get_module("moves.urllib_robotparser") |
||||
|
||||
def __dir__(self): |
||||
return ['parse', 'error', 'request', 'response', 'robotparser'] |
||||
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), |
||||
"moves.urllib") |
||||
|
||||
|
||||
def add_move(move): |
||||
"""Add an item to six.moves.""" |
||||
setattr(_MovedItems, move.name, move) |
||||
|
||||
|
||||
def remove_move(name): |
||||
"""Remove item from six.moves.""" |
||||
try: |
||||
delattr(_MovedItems, name) |
||||
except AttributeError: |
||||
try: |
||||
del moves.__dict__[name] |
||||
except KeyError: |
||||
raise AttributeError("no such move, %r" % (name,)) |
||||
|
||||
|
||||
if PY3: |
||||
_meth_func = "__func__" |
||||
_meth_self = "__self__" |
||||
|
||||
_func_closure = "__closure__" |
||||
_func_code = "__code__" |
||||
_func_defaults = "__defaults__" |
||||
_func_globals = "__globals__" |
||||
else: |
||||
_meth_func = "im_func" |
||||
_meth_self = "im_self" |
||||
|
||||
_func_closure = "func_closure" |
||||
_func_code = "func_code" |
||||
_func_defaults = "func_defaults" |
||||
_func_globals = "func_globals" |
||||
|
||||
|
||||
try: |
||||
advance_iterator = next |
||||
except NameError: |
||||
def advance_iterator(it): |
||||
return it.next() |
||||
next = advance_iterator |
||||
|
||||
|
||||
try: |
||||
callable = callable |
||||
except NameError: |
||||
def callable(obj): |
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) |
||||
|
||||
|
||||
if PY3: |
||||
def get_unbound_function(unbound): |
||||
return unbound |
||||
|
||||
create_bound_method = types.MethodType |
||||
|
||||
Iterator = object |
||||
else: |
||||
def get_unbound_function(unbound): |
||||
return unbound.im_func |
||||
|
||||
def create_bound_method(func, obj): |
||||
return types.MethodType(func, obj, obj.__class__) |
||||
|
||||
class Iterator(object): |
||||
|
||||
def next(self): |
||||
return type(self).__next__(self) |
||||
|
||||
callable = callable |
||||
_add_doc(get_unbound_function, |
||||
"""Get the function out of a possibly unbound function""") |
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func) |
||||
get_method_self = operator.attrgetter(_meth_self) |
||||
get_function_closure = operator.attrgetter(_func_closure) |
||||
get_function_code = operator.attrgetter(_func_code) |
||||
get_function_defaults = operator.attrgetter(_func_defaults) |
||||
get_function_globals = operator.attrgetter(_func_globals) |
||||
|
||||
|
||||
if PY3: |
||||
def iterkeys(d, **kw): |
||||
return iter(d.keys(**kw)) |
||||
|
||||
def itervalues(d, **kw): |
||||
return iter(d.values(**kw)) |
||||
|
||||
def iteritems(d, **kw): |
||||
return iter(d.items(**kw)) |
||||
|
||||
def iterlists(d, **kw): |
||||
return iter(d.lists(**kw)) |
||||
else: |
||||
def iterkeys(d, **kw): |
||||
return iter(d.iterkeys(**kw)) |
||||
|
||||
def itervalues(d, **kw): |
||||
return iter(d.itervalues(**kw)) |
||||
|
||||
def iteritems(d, **kw): |
||||
return iter(d.iteritems(**kw)) |
||||
|
||||
def iterlists(d, **kw): |
||||
return iter(d.iterlists(**kw)) |
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") |
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.") |
||||
_add_doc(iteritems, |
||||
"Return an iterator over the (key, value) pairs of a dictionary.") |
||||
_add_doc(iterlists, |
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.") |
||||
|
||||
|
||||
if PY3: |
||||
def b(s): |
||||
return s.encode("latin-1") |
||||
def u(s): |
||||
return s |
||||
unichr = chr |
||||
if sys.version_info[1] <= 1: |
||||
def int2byte(i): |
||||
return bytes((i,)) |
||||
else: |
||||
# This is about 2x faster than the implementation above on 3.2+ |
||||
int2byte = operator.methodcaller("to_bytes", 1, "big") |
||||
byte2int = operator.itemgetter(0) |
||||
indexbytes = operator.getitem |
||||
iterbytes = iter |
||||
import io |
||||
StringIO = io.StringIO |
||||
BytesIO = io.BytesIO |
||||
else: |
||||
def b(s): |
||||
return s |
||||
# Workaround for standalone backslash |
||||
def u(s): |
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") |
||||
unichr = unichr |
||||
int2byte = chr |
||||
def byte2int(bs): |
||||
return ord(bs[0]) |
||||
def indexbytes(buf, i): |
||||
return ord(buf[i]) |
||||
def iterbytes(buf): |
||||
return (ord(byte) for byte in buf) |
||||
import StringIO |
||||
StringIO = BytesIO = StringIO.StringIO |
||||
_add_doc(b, """Byte literal""") |
||||
_add_doc(u, """Text literal""") |
||||
|
||||
|
||||
if PY3: |
||||
exec_ = getattr(moves.builtins, "exec") |
||||
|
||||
|
||||
def reraise(tp, value, tb=None): |
||||
if value is None: |
||||
value = tp() |
||||
if value.__traceback__ is not tb: |
||||
raise value.with_traceback(tb) |
||||
raise value |
||||
|
||||
else: |
||||
def exec_(_code_, _globs_=None, _locs_=None): |
||||
"""Execute code in a namespace.""" |
||||
if _globs_ is None: |
||||
frame = sys._getframe(1) |
||||
_globs_ = frame.f_globals |
||||
if _locs_ is None: |
||||
_locs_ = frame.f_locals |
||||
del frame |
||||
elif _locs_ is None: |
||||
_locs_ = _globs_ |
||||
exec("""exec _code_ in _globs_, _locs_""") |
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None): |
||||
raise tp, value, tb |
||||
""") |
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None) |
||||
if print_ is None: |
||||
def print_(*args, **kwargs): |
||||
"""The new-style print function for Python 2.4 and 2.5.""" |
||||
fp = kwargs.pop("file", sys.stdout) |
||||
if fp is None: |
||||
return |
||||
def write(data): |
||||
if not isinstance(data, basestring): |
||||
data = str(data) |
||||
# If the file has an encoding, encode unicode with it. |
||||
if (isinstance(fp, file) and |
||||
isinstance(data, unicode) and |
||||
fp.encoding is not None): |
||||
errors = getattr(fp, "errors", None) |
||||
if errors is None: |
||||
errors = "strict" |
||||
data = data.encode(fp.encoding, errors) |
||||
fp.write(data) |
||||
want_unicode = False |
||||
sep = kwargs.pop("sep", None) |
||||
if sep is not None: |
||||
if isinstance(sep, unicode): |
||||
want_unicode = True |
||||
elif not isinstance(sep, str): |
||||
raise TypeError("sep must be None or a string") |
||||
end = kwargs.pop("end", None) |
||||
if end is not None: |
||||
if isinstance(end, unicode): |
||||
want_unicode = True |
||||
elif not isinstance(end, str): |
||||
raise TypeError("end must be None or a string") |
||||
if kwargs: |
||||
raise TypeError("invalid keyword arguments to print()") |
||||
if not want_unicode: |
||||
for arg in args: |
||||
if isinstance(arg, unicode): |
||||
want_unicode = True |
||||
break |
||||
if want_unicode: |
||||
newline = unicode("\n") |
||||
space = unicode(" ") |
||||
else: |
||||
newline = "\n" |
||||
space = " " |
||||
if sep is None: |
||||
sep = space |
||||
if end is None: |
||||
end = newline |
||||
for i, arg in enumerate(args): |
||||
if i: |
||||
write(sep) |
||||
write(arg) |
||||
write(end) |
||||
|
||||
_add_doc(reraise, """Reraise an exception.""") |
||||
|
||||
if sys.version_info[0:2] < (3, 4): |
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, |
||||
updated=functools.WRAPPER_UPDATES): |
||||
def wrapper(f): |
||||
f = functools.wraps(wrapped)(f) |
||||
f.__wrapped__ = wrapped |
||||
return f |
||||
return wrapper |
||||
else: |
||||
wraps = functools.wraps |
||||
|
||||
def with_metaclass(meta, *bases): |
||||
"""Create a base class with a metaclass.""" |
||||
# This requires a bit of explanation: the basic idea is to make a dummy |
||||
# metaclass for one level of class instantiation that replaces itself with |
||||
# the actual metaclass. |
||||
class metaclass(meta): |
||||
def __new__(cls, name, this_bases, d): |
||||
return meta(name, bases, d) |
||||
return type.__new__(metaclass, 'temporary_class', (), {}) |
||||
|
||||
|
||||
def add_metaclass(metaclass): |
||||
"""Class decorator for creating a class with a metaclass.""" |
||||
def wrapper(cls): |
||||
orig_vars = cls.__dict__.copy() |
||||
slots = orig_vars.get('__slots__') |
||||
if slots is not None: |
||||
if isinstance(slots, str): |
||||
slots = [slots] |
||||
for slots_var in slots: |
||||
orig_vars.pop(slots_var) |
||||
orig_vars.pop('__dict__', None) |
||||
orig_vars.pop('__weakref__', None) |
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars) |
||||
return wrapper |
||||
|
||||
# Complete the moves implementation. |
||||
# This code is at the end of this module to speed up module loading. |
||||
# Turn this module into a package. |
||||
__path__ = [] # required for PEP 302 and PEP 451 |
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment |
||||
if globals().get("__spec__") is not None: |
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable |
||||
# Remove other six meta path importers, since they cause problems. This can |
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does |
||||
# this for some reason.) |
||||
if sys.meta_path: |
||||
for i, importer in enumerate(sys.meta_path): |
||||
# Here's some real nastiness: Another "instance" of the six module might |
||||
# be floating around. Therefore, we can't use isinstance() to check for |
||||
# the six meta path importer, since the other six instance will have |
||||
# inserted an importer with different class. |
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and |
||||
importer.name == __name__): |
||||
del sys.meta_path[i] |
||||
break |
||||
del i, importer |
||||
# Finally, add the importer to the meta path import hook. |
||||
sys.meta_path.append(_importer) |
@ -0,0 +1,209 @@ |
||||
# -*- coding: utf-8 - |
||||
# |
||||
# This file is part of gunicorn released under the MIT license. |
||||
# See the NOTICE for more information. |
||||
|
||||
import errno |
||||
import os |
||||
import socket |
||||
import stat |
||||
import sys |
||||
import time |
||||
|
||||
from gunicorn import util |
||||
from gunicorn.six import string_types |
||||
|
||||
|
||||
class BaseSocket(object): |
||||
|
||||
def __init__(self, address, conf, log, fd=None): |
||||
self.log = log |
||||
self.conf = conf |
||||
|
||||
self.cfg_addr = address |
||||
if fd is None: |
||||
sock = socket.socket(self.FAMILY, socket.SOCK_STREAM) |
||||
bound = False |
||||
else: |
||||
sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM) |
||||
os.close(fd) |
||||
bound = True |
||||
|
||||
self.sock = self.set_options(sock, bound=bound) |
||||
|
||||
def __str__(self): |
||||
return "<socket %d>" % self.sock.fileno() |
||||
|
||||
def __getattr__(self, name): |
||||
return getattr(self.sock, name) |
||||
|
||||
def set_options(self, sock, bound=False): |
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) |
||||
if (self.conf.reuse_port |
||||
and hasattr(socket, 'SO_REUSEPORT')): # pragma: no cover |
||||
try: |
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) |
||||
except socket.error as err: |
||||
if err.errno not in (errno.ENOPROTOOPT, errno.EINVAL): |
||||
raise |
||||
if not bound: |
||||
self.bind(sock) |
||||
sock.setblocking(0) |
||||
|
||||
# make sure that the socket can be inherited |
||||
if hasattr(sock, "set_inheritable"): |
||||
sock.set_inheritable(True) |
||||
|
||||
sock.listen(self.conf.backlog) |
||||
return sock |
||||
|
||||
def bind(self, sock): |
||||
sock.bind(self.cfg_addr) |
||||
|
||||
def close(self): |
||||
if self.sock is None: |
||||
return |
||||
|
||||
try: |
||||
self.sock.close() |
||||
except socket.error as e: |
||||
self.log.info("Error while closing socket %s", str(e)) |
||||
|
||||
self.sock = None |
||||
|
||||
|
||||
class TCPSocket(BaseSocket): |
||||
|
||||
FAMILY = socket.AF_INET |
||||
|
||||
def __str__(self): |
||||
if self.conf.is_ssl: |
||||
scheme = "https" |
||||
else: |
||||
scheme = "http" |
||||
|
||||
addr = self.sock.getsockname() |
||||
return "%s://%s:%d" % (scheme, addr[0], addr[1]) |
||||
|
||||
def set_options(self, sock, bound=False): |
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) |
||||
return super(TCPSocket, self).set_options(sock, bound=bound) |
||||
|
||||
|
||||
class TCP6Socket(TCPSocket): |
||||
|
||||
FAMILY = socket.AF_INET6 |
||||
|
||||
def __str__(self): |
||||
(host, port, _, _) = self.sock.getsockname() |
||||
return "http://[%s]:%d" % (host, port) |
||||
|
||||
|
||||
class UnixSocket(BaseSocket): |
||||
|
||||
FAMILY = socket.AF_UNIX |
||||
|
||||
def __init__(self, addr, conf, log, fd=None): |
||||
if fd is None: |
||||
try: |
||||
st = os.stat(addr) |
||||
except OSError as e: |
||||
if e.args[0] != errno.ENOENT: |
||||
raise |
||||
else: |
||||
if stat.S_ISSOCK(st.st_mode): |
||||
os.remove(addr) |
||||
else: |
||||
raise ValueError("%r is not a socket" % addr) |
||||
super(UnixSocket, self).__init__(addr, conf, log, fd=fd) |
||||
|
||||
def __str__(self): |
||||
return "unix:%s" % self.cfg_addr |
||||
|
||||
def bind(self, sock): |
||||
old_umask = os.umask(self.conf.umask) |
||||
sock.bind(self.cfg_addr) |
||||
util.chown(self.cfg_addr, self.conf.uid, self.conf.gid) |
||||
os.umask(old_umask) |
||||
|
||||
|
||||
def _sock_type(addr): |
||||
if isinstance(addr, tuple): |
||||
if util.is_ipv6(addr[0]): |
||||
sock_type = TCP6Socket |
||||
else: |
||||
sock_type = TCPSocket |
||||
elif isinstance(addr, string_types): |
||||
sock_type = UnixSocket |
||||
else: |
||||
raise TypeError("Unable to create socket from: %r" % addr) |
||||
return sock_type |
||||
|
||||
|
||||
def create_sockets(conf, log, fds=None): |
||||
""" |
||||
Create a new socket for the configured addresses or file descriptors. |
||||
|
||||
If a configured address is a tuple then a TCP socket is created. |
||||
If it is a string, a Unix socket is created. Otherwise, a TypeError is |
||||
raised. |
||||
""" |
||||
listeners = [] |
||||
|
||||
# get it only once |
||||
laddr = conf.address |
||||
|
||||
# check ssl config early to raise the error on startup |
||||
# only the certfile is needed since it can contains the keyfile |
||||
if conf.certfile and not os.path.exists(conf.certfile): |
||||
raise ValueError('certfile "%s" does not exist' % conf.certfile) |
||||
|
||||
if conf.keyfile and not os.path.exists(conf.keyfile): |
||||
raise ValueError('keyfile "%s" does not exist' % conf.keyfile) |
||||
|
||||
# sockets are already bound |
||||
if fds is not None: |
||||
for fd in fds: |
||||
sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM) |
||||
sock_name = sock.getsockname() |
||||
sock_type = _sock_type(sock_name) |
||||
listener = sock_type(sock_name, conf, log, fd=fd) |
||||
listeners.append(listener) |
||||
|
||||
return listeners |
||||
|
||||
# no sockets is bound, first initialization of gunicorn in this env. |
||||
for addr in laddr: |
||||
sock_type = _sock_type(addr) |
||||
sock = None |
||||
for i in range(5): |
||||
try: |
||||
sock = sock_type(addr, conf, log) |
||||
except socket.error as e: |
||||
if e.args[0] == errno.EADDRINUSE: |
||||
log.error("Connection in use: %s", str(addr)) |
||||
if e.args[0] == errno.EADDRNOTAVAIL: |
||||
log.error("Invalid address: %s", str(addr)) |
||||
if i < 5: |
||||
msg = "connection to {addr} failed: {error}" |
||||
log.debug(msg.format(addr=str(addr), error=str(e))) |
||||
log.error("Retrying in 1 second.") |
||||
time.sleep(1) |
||||
else: |
||||
break |
||||
|
||||
if sock is None: |
||||
log.error("Can't connect to %s", str(addr)) |
||||
sys.exit(1) |
||||
|
||||
listeners.append(sock) |
||||
|
||||
return listeners |
||||
|
||||
|
||||
def close_sockets(listeners, unlink=True): |
||||
for sock in listeners: |
||||
sock_name = sock.getsockname() |
||||
sock.close() |
||||
if unlink and _sock_type(sock_name) is UnixSocket: |
||||
os.unlink(sock_name) |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue