Logout"""
- % escape(request.user),
- mimetype="text/html",
- )
-
-
-@AppRequest.application
-def application(request):
- if request.args.get("do") == "logout":
- request.logout()
- response = redirect(".")
- elif request.logged_in:
- response = index(request)
- else:
- response = login_form(request)
- request.session.save_cookie(response)
- return response
-
-
-if __name__ == "__main__":
- run_simple("localhost", 4000, application)
diff --git a/src/werkzeug/contrib/__init__.py b/src/werkzeug/contrib/__init__.py
deleted file mode 100644
index 0e741f07f..000000000
--- a/src/werkzeug/contrib/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- werkzeug.contrib
- ~~~~~~~~~~~~~~~~
-
- Contains user-submitted code that other users may find useful, but which
- is not part of the Werkzeug core. Anyone can write code for inclusion in
- the `contrib` package. All modules in this package are distributed as an
- add-on library and thus are not part of Werkzeug itself.
-
- This file itself is mostly for informational purposes and to tell the
- Python interpreter that `contrib` is a package.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
diff --git a/src/werkzeug/contrib/atom.py b/src/werkzeug/contrib/atom.py
deleted file mode 100644
index d079d2bf2..000000000
--- a/src/werkzeug/contrib/atom.py
+++ /dev/null
@@ -1,362 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- werkzeug.contrib.atom
- ~~~~~~~~~~~~~~~~~~~~~
-
- This module provides a class called :class:`AtomFeed` which can be
- used to generate feeds in the Atom syndication format (see :rfc:`4287`).
-
- Example::
-
- def atom_feed(request):
- feed = AtomFeed("My Blog", feed_url=request.url,
- url=request.host_url,
- subtitle="My example blog for a feed test.")
- for post in Post.query.limit(10).all():
- feed.add(post.title, post.body, content_type='html',
- author=post.author, url=post.url, id=post.uid,
- updated=post.last_update, published=post.pub_date)
- return feed.get_response()
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import warnings
-from datetime import datetime
-
-from .._compat import implements_to_string
-from .._compat import string_types
-from ..utils import escape
-from ..wrappers import BaseResponse
-
-warnings.warn(
- "'werkzeug.contrib.atom' is deprecated as of version 0.15 and will"
- " be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
-)
-
-XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml"
-
-
-def _make_text_block(name, content, content_type=None):
- """Helper function for the builder that creates an XML text block."""
- if content_type == "xhtml":
- return u'<%s type="xhtml">
%s
%s>\n' % (
- name,
- XHTML_NAMESPACE,
- content,
- name,
- )
- if not content_type:
- return u"<%s>%s%s>\n" % (name, escape(content), name)
- return u'<%s type="%s">%s%s>\n' % (name, content_type, escape(content), name)
-
-
-def format_iso8601(obj):
- """Format a datetime object for iso8601"""
- iso8601 = obj.isoformat()
- if obj.tzinfo:
- return iso8601
- return iso8601 + "Z"
-
-
-@implements_to_string
-class AtomFeed(object):
-
- """A helper class that creates Atom feeds.
-
- :param title: the title of the feed. Required.
- :param title_type: the type attribute for the title element. One of
- ``'html'``, ``'text'`` or ``'xhtml'``.
- :param url: the url for the feed (not the url *of* the feed)
- :param id: a globally unique id for the feed. Must be an URI. If
- not present the `feed_url` is used, but one of both is
- required.
- :param updated: the time the feed was modified the last time. Must
- be a :class:`datetime.datetime` object. If not
- present the latest entry's `updated` is used.
- Treated as UTC if naive datetime.
- :param feed_url: the URL to the feed. Should be the URL that was
- requested.
- :param author: the author of the feed. Must be either a string (the
- name) or a dict with name (required) and uri or
- email (both optional). Can be a list of (may be
- mixed, too) strings and dicts, too, if there are
- multiple authors. Required if not every entry has an
- author element.
- :param icon: an icon for the feed.
- :param logo: a logo for the feed.
- :param rights: copyright information for the feed.
- :param rights_type: the type attribute for the rights element. One of
- ``'html'``, ``'text'`` or ``'xhtml'``. Default is
- ``'text'``.
- :param subtitle: a short description of the feed.
- :param subtitle_type: the type attribute for the subtitle element.
- One of ``'text'``, ``'html'``, ``'text'``
- or ``'xhtml'``. Default is ``'text'``.
- :param links: additional links. Must be a list of dictionaries with
- href (required) and rel, type, hreflang, title, length
- (all optional)
- :param generator: the software that generated this feed. This must be
- a tuple in the form ``(name, url, version)``. If
- you don't want to specify one of them, set the item
- to `None`.
- :param entries: a list with the entries for the feed. Entries can also
- be added later with :meth:`add`.
-
- For more information on the elements see
- http://www.atomenabled.org/developers/syndication/
-
- Everywhere where a list is demanded, any iterable can be used.
- """
-
- default_generator = ("Werkzeug", None, None)
-
- def __init__(self, title=None, entries=None, **kwargs):
- self.title = title
- self.title_type = kwargs.get("title_type", "text")
- self.url = kwargs.get("url")
- self.feed_url = kwargs.get("feed_url", self.url)
- self.id = kwargs.get("id", self.feed_url)
- self.updated = kwargs.get("updated")
- self.author = kwargs.get("author", ())
- self.icon = kwargs.get("icon")
- self.logo = kwargs.get("logo")
- self.rights = kwargs.get("rights")
- self.rights_type = kwargs.get("rights_type")
- self.subtitle = kwargs.get("subtitle")
- self.subtitle_type = kwargs.get("subtitle_type", "text")
- self.generator = kwargs.get("generator")
- if self.generator is None:
- self.generator = self.default_generator
- self.links = kwargs.get("links", [])
- self.entries = list(entries) if entries else []
-
- if not hasattr(self.author, "__iter__") or isinstance(
- self.author, string_types + (dict,)
- ):
- self.author = [self.author]
- for i, author in enumerate(self.author):
- if not isinstance(author, dict):
- self.author[i] = {"name": author}
-
- if not self.title:
- raise ValueError("title is required")
- if not self.id:
- raise ValueError("id is required")
- for author in self.author:
- if "name" not in author:
- raise TypeError("author must contain at least a name")
-
- def add(self, *args, **kwargs):
- """Add a new entry to the feed. This function can either be called
- with a :class:`FeedEntry` or some keyword and positional arguments
- that are forwarded to the :class:`FeedEntry` constructor.
- """
- if len(args) == 1 and not kwargs and isinstance(args[0], FeedEntry):
- self.entries.append(args[0])
- else:
- kwargs["feed_url"] = self.feed_url
- self.entries.append(FeedEntry(*args, **kwargs))
-
- def __repr__(self):
- return "<%s %r (%d entries)>" % (
- self.__class__.__name__,
- self.title,
- len(self.entries),
- )
-
- def generate(self):
- """Return a generator that yields pieces of XML."""
- # atom demands either an author element in every entry or a global one
- if not self.author:
- if any(not e.author for e in self.entries):
- self.author = ({"name": "Unknown author"},)
-
- if not self.updated:
- dates = sorted([entry.updated for entry in self.entries])
- self.updated = dates[-1] if dates else datetime.utcnow()
-
- yield u'\n'
- yield u'\n'
- yield " " + _make_text_block("title", self.title, self.title_type)
- yield u" %s\n" % escape(self.id)
- yield u" %s\n" % format_iso8601(self.updated)
- if self.url:
- yield u' \n' % escape(self.url)
- if self.feed_url:
- yield u' \n' % escape(self.feed_url)
- for link in self.links:
- yield u" \n" % "".join(
- '%s="%s" ' % (k, escape(link[k])) for k in link
- )
- for author in self.author:
- yield u" \n"
- yield u" %s\n" % escape(author["name"])
- if "uri" in author:
- yield u" %s\n" % escape(author["uri"])
- if "email" in author:
- yield " %s\n" % escape(author["email"])
- yield " \n"
- if self.subtitle:
- yield " " + _make_text_block("subtitle", self.subtitle, self.subtitle_type)
- if self.icon:
- yield u" %s\n" % escape(self.icon)
- if self.logo:
- yield u" %s\n" % escape(self.logo)
- if self.rights:
- yield " " + _make_text_block("rights", self.rights, self.rights_type)
- generator_name, generator_url, generator_version = self.generator
- if generator_name or generator_url or generator_version:
- tmp = [u" %s\n" % escape(generator_name))
- yield u"".join(tmp)
- for entry in self.entries:
- for line in entry.generate():
- yield u" " + line
- yield u"\n"
-
- def to_string(self):
- """Convert the feed into a string."""
- return u"".join(self.generate())
-
- def get_response(self):
- """Return a response object for the feed."""
- return BaseResponse(self.to_string(), mimetype="application/atom+xml")
-
- def __call__(self, environ, start_response):
- """Use the class as WSGI response object."""
- return self.get_response()(environ, start_response)
-
- def __str__(self):
- return self.to_string()
-
-
-@implements_to_string
-class FeedEntry(object):
-
- """Represents a single entry in a feed.
-
- :param title: the title of the entry. Required.
- :param title_type: the type attribute for the title element. One of
- ``'html'``, ``'text'`` or ``'xhtml'``.
- :param content: the content of the entry.
- :param content_type: the type attribute for the content element. One
- of ``'html'``, ``'text'`` or ``'xhtml'``.
- :param summary: a summary of the entry's content.
- :param summary_type: the type attribute for the summary element. One
- of ``'html'``, ``'text'`` or ``'xhtml'``.
- :param url: the url for the entry.
- :param id: a globally unique id for the entry. Must be an URI. If
- not present the URL is used, but one of both is required.
- :param updated: the time the entry was modified the last time. Must
- be a :class:`datetime.datetime` object. Treated as
- UTC if naive datetime. Required.
- :param author: the author of the entry. Must be either a string (the
- name) or a dict with name (required) and uri or
- email (both optional). Can be a list of (may be
- mixed, too) strings and dicts, too, if there are
- multiple authors. Required if the feed does not have an
- author element.
- :param published: the time the entry was initially published. Must
- be a :class:`datetime.datetime` object. Treated as
- UTC if naive datetime.
- :param rights: copyright information for the entry.
- :param rights_type: the type attribute for the rights element. One of
- ``'html'``, ``'text'`` or ``'xhtml'``. Default is
- ``'text'``.
- :param links: additional links. Must be a list of dictionaries with
- href (required) and rel, type, hreflang, title, length
- (all optional)
- :param categories: categories for the entry. Must be a list of dictionaries
- with term (required), scheme and label (all optional)
- :param xml_base: The xml base (url) for this feed item. If not provided
- it will default to the item url.
-
- For more information on the elements see
- http://www.atomenabled.org/developers/syndication/
-
- Everywhere where a list is demanded, any iterable can be used.
- """
-
- def __init__(self, title=None, content=None, feed_url=None, **kwargs):
- self.title = title
- self.title_type = kwargs.get("title_type", "text")
- self.content = content
- self.content_type = kwargs.get("content_type", "html")
- self.url = kwargs.get("url")
- self.id = kwargs.get("id", self.url)
- self.updated = kwargs.get("updated")
- self.summary = kwargs.get("summary")
- self.summary_type = kwargs.get("summary_type", "html")
- self.author = kwargs.get("author", ())
- self.published = kwargs.get("published")
- self.rights = kwargs.get("rights")
- self.links = kwargs.get("links", [])
- self.categories = kwargs.get("categories", [])
- self.xml_base = kwargs.get("xml_base", feed_url)
-
- if not hasattr(self.author, "__iter__") or isinstance(
- self.author, string_types + (dict,)
- ):
- self.author = [self.author]
- for i, author in enumerate(self.author):
- if not isinstance(author, dict):
- self.author[i] = {"name": author}
-
- if not self.title:
- raise ValueError("title is required")
- if not self.id:
- raise ValueError("id is required")
- if not self.updated:
- raise ValueError("updated is required")
-
- def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.title)
-
- def generate(self):
- """Yields pieces of ATOM XML."""
- base = ""
- if self.xml_base:
- base = ' xml:base="%s"' % escape(self.xml_base)
- yield u"\n" % base
- yield u" " + _make_text_block("title", self.title, self.title_type)
- yield u" %s\n" % escape(self.id)
- yield u" %s\n" % format_iso8601(self.updated)
- if self.published:
- yield u" %s\n" % format_iso8601(self.published)
- if self.url:
- yield u' \n' % escape(self.url)
- for author in self.author:
- yield u" \n"
- yield u" %s\n" % escape(author["name"])
- if "uri" in author:
- yield u" %s\n" % escape(author["uri"])
- if "email" in author:
- yield u" %s\n" % escape(author["email"])
- yield u" \n"
- for link in self.links:
- yield u" \n" % "".join(
- '%s="%s" ' % (k, escape(link[k])) for k in link
- )
- for category in self.categories:
- yield u" \n" % "".join(
- '%s="%s" ' % (k, escape(category[k])) for k in category
- )
- if self.summary:
- yield u" " + _make_text_block("summary", self.summary, self.summary_type)
- if self.content:
- yield u" " + _make_text_block("content", self.content, self.content_type)
- yield u"\n"
-
- def to_string(self):
- """Convert the feed item into a unicode object."""
- return u"".join(self.generate())
-
- def __str__(self):
- return self.to_string()
diff --git a/src/werkzeug/contrib/cache.py b/src/werkzeug/contrib/cache.py
deleted file mode 100644
index 79c749b5e..000000000
--- a/src/werkzeug/contrib/cache.py
+++ /dev/null
@@ -1,933 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- werkzeug.contrib.cache
- ~~~~~~~~~~~~~~~~~~~~~~
-
- The main problem with dynamic Web sites is, well, they're dynamic. Each
- time a user requests a page, the webserver executes a lot of code, queries
- the database, renders templates until the visitor gets the page he sees.
-
- This is a lot more expensive than just loading a file from the file system
- and sending it to the visitor.
-
- For most Web applications, this overhead isn't a big deal but once it
- becomes, you will be glad to have a cache system in place.
-
- How Caching Works
- =================
-
- Caching is pretty simple. Basically you have a cache object lurking around
- somewhere that is connected to a remote cache or the file system or
- something else. When the request comes in you check if the current page
- is already in the cache and if so, you're returning it from the cache.
- Otherwise you generate the page and put it into the cache. (Or a fragment
- of the page, you don't have to cache the full thing)
-
- Here is a simple example of how to cache a sidebar for 5 minutes::
-
- def get_sidebar(user):
- identifier = 'sidebar_for/user%d' % user.id
- value = cache.get(identifier)
- if value is not None:
- return value
- value = generate_sidebar_for(user=user)
- cache.set(identifier, value, timeout=60 * 5)
- return value
-
- Creating a Cache Object
- =======================
-
- To create a cache object you just import the cache system of your choice
- from the cache module and instantiate it. Then you can start working
- with that object:
-
- >>> from werkzeug.contrib.cache import SimpleCache
- >>> c = SimpleCache()
- >>> c.set("foo", "value")
- >>> c.get("foo")
- 'value'
- >>> c.get("missing") is None
- True
-
- Please keep in mind that you have to create the cache and put it somewhere
- you have access to it (either as a module global you can import or you just
- put it into your WSGI application).
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import errno
-import os
-import platform
-import re
-import tempfile
-import warnings
-from hashlib import md5
-from time import time
-
-from .._compat import integer_types
-from .._compat import iteritems
-from .._compat import string_types
-from .._compat import text_type
-from .._compat import to_native
-from ..posixemulation import rename
-
-try:
- import cPickle as pickle
-except ImportError: # pragma: no cover
- import pickle
-
-warnings.warn(
- "'werkzeug.contrib.cache' is deprecated as of version 0.15 and will"
- " be removed in version 1.0. It has moved to https://github.com"
- "/pallets/cachelib.",
- DeprecationWarning,
- stacklevel=2,
-)
-
-
-def _items(mappingorseq):
- """Wrapper for efficient iteration over mappings represented by dicts
- or sequences::
-
- >>> for k, v in _items((i, i*i) for i in xrange(5)):
- ... assert k*k == v
-
- >>> for k, v in _items(dict((i, i*i) for i in xrange(5))):
- ... assert k*k == v
-
- """
- if hasattr(mappingorseq, "items"):
- return iteritems(mappingorseq)
- return mappingorseq
-
-
-class BaseCache(object):
- """Baseclass for the cache systems. All the cache systems implement this
- API or a superset of it.
-
- :param default_timeout: the default timeout (in seconds) that is used if
- no timeout is specified on :meth:`set`. A timeout
- of 0 indicates that the cache never expires.
- """
-
- def __init__(self, default_timeout=300):
- self.default_timeout = default_timeout
-
- def _normalize_timeout(self, timeout):
- if timeout is None:
- timeout = self.default_timeout
- return timeout
-
- def get(self, key):
- """Look up key in the cache and return the value for it.
-
- :param key: the key to be looked up.
- :returns: The value if it exists and is readable, else ``None``.
- """
- return None
-
- def delete(self, key):
- """Delete `key` from the cache.
-
- :param key: the key to delete.
- :returns: Whether the key existed and has been deleted.
- :rtype: boolean
- """
- return True
-
- def get_many(self, *keys):
- """Returns a list of values for the given keys.
- For each key an item in the list is created::
-
- foo, bar = cache.get_many("foo", "bar")
-
- Has the same error handling as :meth:`get`.
-
- :param keys: The function accepts multiple keys as positional
- arguments.
- """
- return [self.get(k) for k in keys]
-
- def get_dict(self, *keys):
- """Like :meth:`get_many` but return a dict::
-
- d = cache.get_dict("foo", "bar")
- foo = d["foo"]
- bar = d["bar"]
-
- :param keys: The function accepts multiple keys as positional
- arguments.
- """
- return dict(zip(keys, self.get_many(*keys)))
-
- def set(self, key, value, timeout=None):
- """Add a new key/value to the cache (overwrites value, if key already
- exists in the cache).
-
- :param key: the key to set
- :param value: the value for the key
- :param timeout: the cache timeout for the key in seconds (if not
- specified, it uses the default timeout). A timeout of
- 0 idicates that the cache never expires.
- :returns: ``True`` if key has been updated, ``False`` for backend
- errors. Pickling errors, however, will raise a subclass of
- ``pickle.PickleError``.
- :rtype: boolean
- """
- return True
-
- def add(self, key, value, timeout=None):
- """Works like :meth:`set` but does not overwrite the values of already
- existing keys.
-
- :param key: the key to set
- :param value: the value for the key
- :param timeout: the cache timeout for the key in seconds (if not
- specified, it uses the default timeout). A timeout of
- 0 idicates that the cache never expires.
- :returns: Same as :meth:`set`, but also ``False`` for already
- existing keys.
- :rtype: boolean
- """
- return True
-
- def set_many(self, mapping, timeout=None):
- """Sets multiple keys and values from a mapping.
-
- :param mapping: a mapping with the keys/values to set.
- :param timeout: the cache timeout for the key in seconds (if not
- specified, it uses the default timeout). A timeout of
- 0 idicates that the cache never expires.
- :returns: Whether all given keys have been set.
- :rtype: boolean
- """
- rv = True
- for key, value in _items(mapping):
- if not self.set(key, value, timeout):
- rv = False
- return rv
-
- def delete_many(self, *keys):
- """Deletes multiple keys at once.
-
- :param keys: The function accepts multiple keys as positional
- arguments.
- :returns: Whether all given keys have been deleted.
- :rtype: boolean
- """
- return all(self.delete(key) for key in keys)
-
- def has(self, key):
- """Checks if a key exists in the cache without returning it. This is a
- cheap operation that bypasses loading the actual data on the backend.
-
- This method is optional and may not be implemented on all caches.
-
- :param key: the key to check
- """
- raise NotImplementedError(
- "%s doesn't have an efficient implementation of `has`. That "
- "means it is impossible to check whether a key exists without "
- "fully loading the key's data. Consider using `self.get` "
- "explicitly if you don't care about performance."
- )
-
- def clear(self):
- """Clears the cache. Keep in mind that not all caches support
- completely clearing the cache.
-
- :returns: Whether the cache has been cleared.
- :rtype: boolean
- """
- return True
-
- def inc(self, key, delta=1):
- """Increments the value of a key by `delta`. If the key does
- not yet exist it is initialized with `delta`.
-
- For supporting caches this is an atomic operation.
-
- :param key: the key to increment.
- :param delta: the delta to add.
- :returns: The new value or ``None`` for backend errors.
- """
- value = (self.get(key) or 0) + delta
- return value if self.set(key, value) else None
-
- def dec(self, key, delta=1):
- """Decrements the value of a key by `delta`. If the key does
- not yet exist it is initialized with `-delta`.
-
- For supporting caches this is an atomic operation.
-
- :param key: the key to increment.
- :param delta: the delta to subtract.
- :returns: The new value or `None` for backend errors.
- """
- value = (self.get(key) or 0) - delta
- return value if self.set(key, value) else None
-
-
-class NullCache(BaseCache):
- """A cache that doesn't cache. This can be useful for unit testing.
-
- :param default_timeout: a dummy parameter that is ignored but exists
- for API compatibility with other caches.
- """
-
- def has(self, key):
- return False
-
-
-class SimpleCache(BaseCache):
- """Simple memory cache for single process environments. This class exists
- mainly for the development server and is not 100% thread safe. It tries
- to use as many atomic operations as possible and no locks for simplicity
- but it could happen under heavy load that keys are added multiple times.
-
- :param threshold: the maximum number of items the cache stores before
- it starts deleting some.
- :param default_timeout: the default timeout that is used if no timeout is
- specified on :meth:`~BaseCache.set`. A timeout of
- 0 indicates that the cache never expires.
- """
-
- def __init__(self, threshold=500, default_timeout=300):
- BaseCache.__init__(self, default_timeout)
- self._cache = {}
- self.clear = self._cache.clear
- self._threshold = threshold
-
- def _prune(self):
- if len(self._cache) > self._threshold:
- now = time()
- toremove = []
- for idx, (key, (expires, _)) in enumerate(self._cache.items()):
- if (expires != 0 and expires <= now) or idx % 3 == 0:
- toremove.append(key)
- for key in toremove:
- self._cache.pop(key, None)
-
- def _normalize_timeout(self, timeout):
- timeout = BaseCache._normalize_timeout(self, timeout)
- if timeout > 0:
- timeout = time() + timeout
- return timeout
-
- def get(self, key):
- try:
- expires, value = self._cache[key]
- if expires == 0 or expires > time():
- return pickle.loads(value)
- except (KeyError, pickle.PickleError):
- return None
-
- def set(self, key, value, timeout=None):
- expires = self._normalize_timeout(timeout)
- self._prune()
- self._cache[key] = (expires, pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
- return True
-
- def add(self, key, value, timeout=None):
- expires = self._normalize_timeout(timeout)
- self._prune()
- item = (expires, pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
- if key in self._cache:
- return False
- self._cache.setdefault(key, item)
- return True
-
- def delete(self, key):
- return self._cache.pop(key, None) is not None
-
- def has(self, key):
- try:
- expires, value = self._cache[key]
- return expires == 0 or expires > time()
- except KeyError:
- return False
-
-
-_test_memcached_key = re.compile(r"[^\x00-\x21\xff]{1,250}$").match
-
-
-class MemcachedCache(BaseCache):
- """A cache that uses memcached as backend.
-
- The first argument can either be an object that resembles the API of a
- :class:`memcache.Client` or a tuple/list of server addresses. In the
- event that a tuple/list is passed, Werkzeug tries to import the best
- available memcache library.
-
- This cache looks into the following packages/modules to find bindings for
- memcached:
-
- - ``pylibmc``
- - ``google.appengine.api.memcached``
- - ``memcached``
- - ``libmc``
-
- Implementation notes: This cache backend works around some limitations in
- memcached to simplify the interface. For example unicode keys are encoded
- to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return
- the keys in the same format as passed. Furthermore all get methods
- silently ignore key errors to not cause problems when untrusted user data
- is passed to the get methods which is often the case in web applications.
-
- :param servers: a list or tuple of server addresses or alternatively
- a :class:`memcache.Client` or a compatible client.
- :param default_timeout: the default timeout that is used if no timeout is
- specified on :meth:`~BaseCache.set`. A timeout of
- 0 indicates that the cache never expires.
- :param key_prefix: a prefix that is added before all keys. This makes it
- possible to use the same memcached server for different
- applications. Keep in mind that
- :meth:`~BaseCache.clear` will also clear keys with a
- different prefix.
- """
-
- def __init__(self, servers=None, default_timeout=300, key_prefix=None):
- BaseCache.__init__(self, default_timeout)
- if servers is None or isinstance(servers, (list, tuple)):
- if servers is None:
- servers = ["127.0.0.1:11211"]
- self._client = self.import_preferred_memcache_lib(servers)
- if self._client is None:
- raise RuntimeError("no memcache module found")
- else:
- # NOTE: servers is actually an already initialized memcache
- # client.
- self._client = servers
-
- self.key_prefix = to_native(key_prefix)
-
- def _normalize_key(self, key):
- key = to_native(key, "utf-8")
- if self.key_prefix:
- key = self.key_prefix + key
- return key
-
- def _normalize_timeout(self, timeout):
- timeout = BaseCache._normalize_timeout(self, timeout)
- if timeout > 0:
- timeout = int(time()) + timeout
- return timeout
-
- def get(self, key):
- key = self._normalize_key(key)
- # memcached doesn't support keys longer than that. Because often
- # checks for so long keys can occur because it's tested from user
- # submitted data etc we fail silently for getting.
- if _test_memcached_key(key):
- return self._client.get(key)
-
- def get_dict(self, *keys):
- key_mapping = {}
- have_encoded_keys = False
- for key in keys:
- encoded_key = self._normalize_key(key)
- if not isinstance(key, str):
- have_encoded_keys = True
- if _test_memcached_key(key):
- key_mapping[encoded_key] = key
- _keys = list(key_mapping)
- d = rv = self._client.get_multi(_keys)
- if have_encoded_keys or self.key_prefix:
- rv = {}
- for key, value in iteritems(d):
- rv[key_mapping[key]] = value
- if len(rv) < len(keys):
- for key in keys:
- if key not in rv:
- rv[key] = None
- return rv
-
- def add(self, key, value, timeout=None):
- key = self._normalize_key(key)
- timeout = self._normalize_timeout(timeout)
- return self._client.add(key, value, timeout)
-
- def set(self, key, value, timeout=None):
- key = self._normalize_key(key)
- timeout = self._normalize_timeout(timeout)
- return self._client.set(key, value, timeout)
-
- def get_many(self, *keys):
- d = self.get_dict(*keys)
- return [d[key] for key in keys]
-
- def set_many(self, mapping, timeout=None):
- new_mapping = {}
- for key, value in _items(mapping):
- key = self._normalize_key(key)
- new_mapping[key] = value
-
- timeout = self._normalize_timeout(timeout)
- failed_keys = self._client.set_multi(new_mapping, timeout)
- return not failed_keys
-
- def delete(self, key):
- key = self._normalize_key(key)
- if _test_memcached_key(key):
- return self._client.delete(key)
-
- def delete_many(self, *keys):
- new_keys = []
- for key in keys:
- key = self._normalize_key(key)
- if _test_memcached_key(key):
- new_keys.append(key)
- return self._client.delete_multi(new_keys)
-
- def has(self, key):
- key = self._normalize_key(key)
- if _test_memcached_key(key):
- return self._client.append(key, "")
- return False
-
- def clear(self):
- return self._client.flush_all()
-
- def inc(self, key, delta=1):
- key = self._normalize_key(key)
- return self._client.incr(key, delta)
-
- def dec(self, key, delta=1):
- key = self._normalize_key(key)
- return self._client.decr(key, delta)
-
- def import_preferred_memcache_lib(self, servers):
- """Returns an initialized memcache client. Used by the constructor."""
- try:
- import pylibmc
- except ImportError:
- pass
- else:
- return pylibmc.Client(servers)
-
- try:
- from google.appengine.api import memcache
- except ImportError:
- pass
- else:
- return memcache.Client()
-
- try:
- import memcache
- except ImportError:
- pass
- else:
- return memcache.Client(servers)
-
- try:
- import libmc
- except ImportError:
- pass
- else:
- return libmc.Client(servers)
-
-
-# backwards compatibility
-GAEMemcachedCache = MemcachedCache
-
-
-class RedisCache(BaseCache):
- """Uses the Redis key-value store as a cache backend.
-
- The first argument can be either a string denoting address of the Redis
- server or an object resembling an instance of a redis.Redis class.
-
- Note: Python Redis API already takes care of encoding unicode strings on
- the fly.
-
- .. versionadded:: 0.7
-
- .. versionadded:: 0.8
- `key_prefix` was added.
-
- .. versionchanged:: 0.8
- This cache backend now properly serializes objects.
-
- .. versionchanged:: 0.8.3
- This cache backend now supports password authentication.
-
- .. versionchanged:: 0.10
- ``**kwargs`` is now passed to the redis object.
-
- :param host: address of the Redis server or an object which API is
- compatible with the official Python Redis client (redis-py).
- :param port: port number on which Redis server listens for connections.
- :param password: password authentication for the Redis server.
- :param db: db (zero-based numeric index) on Redis Server to connect.
- :param default_timeout: the default timeout that is used if no timeout is
- specified on :meth:`~BaseCache.set`. A timeout of
- 0 indicates that the cache never expires.
- :param key_prefix: A prefix that should be added to all keys.
-
- Any additional keyword arguments will be passed to ``redis.Redis``.
- """
-
- def __init__(
- self,
- host="localhost",
- port=6379,
- password=None,
- db=0,
- default_timeout=300,
- key_prefix=None,
- **kwargs
- ):
- BaseCache.__init__(self, default_timeout)
- if host is None:
- raise ValueError("RedisCache host parameter may not be None")
- if isinstance(host, string_types):
- try:
- import redis
- except ImportError:
- raise RuntimeError("no redis module found")
- if kwargs.get("decode_responses", None):
- raise ValueError("decode_responses is not supported by RedisCache.")
- self._client = redis.Redis(
- host=host, port=port, password=password, db=db, **kwargs
- )
- else:
- self._client = host
- self.key_prefix = key_prefix or ""
-
- def _normalize_timeout(self, timeout):
- timeout = BaseCache._normalize_timeout(self, timeout)
- if timeout == 0:
- timeout = -1
- return timeout
-
- def dump_object(self, value):
- """Dumps an object into a string for redis. By default it serializes
- integers as regular string and pickle dumps everything else.
- """
- t = type(value)
- if t in integer_types:
- return str(value).encode("ascii")
- return b"!" + pickle.dumps(value)
-
- def load_object(self, value):
- """The reversal of :meth:`dump_object`. This might be called with
- None.
- """
- if value is None:
- return None
- if value.startswith(b"!"):
- try:
- return pickle.loads(value[1:])
- except pickle.PickleError:
- return None
- try:
- return int(value)
- except ValueError:
- # before 0.8 we did not have serialization. Still support that.
- return value
-
- def get(self, key):
- return self.load_object(self._client.get(self.key_prefix + key))
-
- def get_many(self, *keys):
- if self.key_prefix:
- keys = [self.key_prefix + key for key in keys]
- return [self.load_object(x) for x in self._client.mget(keys)]
-
- def set(self, key, value, timeout=None):
- timeout = self._normalize_timeout(timeout)
- dump = self.dump_object(value)
- if timeout == -1:
- result = self._client.set(name=self.key_prefix + key, value=dump)
- else:
- result = self._client.setex(
- name=self.key_prefix + key, value=dump, time=timeout
- )
- return result
-
- def add(self, key, value, timeout=None):
- timeout = self._normalize_timeout(timeout)
- dump = self.dump_object(value)
- return self._client.setnx(
- name=self.key_prefix + key, value=dump
- ) and self._client.expire(name=self.key_prefix + key, time=timeout)
-
- def set_many(self, mapping, timeout=None):
- timeout = self._normalize_timeout(timeout)
- # Use transaction=False to batch without calling redis MULTI
- # which is not supported by twemproxy
- pipe = self._client.pipeline(transaction=False)
-
- for key, value in _items(mapping):
- dump = self.dump_object(value)
- if timeout == -1:
- pipe.set(name=self.key_prefix + key, value=dump)
- else:
- pipe.setex(name=self.key_prefix + key, value=dump, time=timeout)
- return pipe.execute()
-
- def delete(self, key):
- return self._client.delete(self.key_prefix + key)
-
- def delete_many(self, *keys):
- if not keys:
- return
- if self.key_prefix:
- keys = [self.key_prefix + key for key in keys]
- return self._client.delete(*keys)
-
- def has(self, key):
- return self._client.exists(self.key_prefix + key)
-
- def clear(self):
- status = False
- if self.key_prefix:
- keys = self._client.keys(self.key_prefix + "*")
- if keys:
- status = self._client.delete(*keys)
- else:
- status = self._client.flushdb()
- return status
-
- def inc(self, key, delta=1):
- return self._client.incr(name=self.key_prefix + key, amount=delta)
-
- def dec(self, key, delta=1):
- return self._client.decr(name=self.key_prefix + key, amount=delta)
-
-
-class FileSystemCache(BaseCache):
- """A cache that stores the items on the file system. This cache depends
- on being the only user of the `cache_dir`. Make absolutely sure that
- nobody but this cache stores files there or otherwise the cache will
- randomly delete files therein.
-
- :param cache_dir: the directory where cache files are stored.
- :param threshold: the maximum number of items the cache stores before
- it starts deleting some. A threshold value of 0
- indicates no threshold.
- :param default_timeout: the default timeout that is used if no timeout is
- specified on :meth:`~BaseCache.set`. A timeout of
- 0 indicates that the cache never expires.
- :param mode: the file mode wanted for the cache files, default 0600
- """
-
- #: used for temporary files by the FileSystemCache
- _fs_transaction_suffix = ".__wz_cache"
- #: keep amount of files in a cache element
- _fs_count_file = "__wz_cache_count"
-
- def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=0o600):
- BaseCache.__init__(self, default_timeout)
- self._path = cache_dir
- self._threshold = threshold
- self._mode = mode
-
- try:
- os.makedirs(self._path)
- except OSError as ex:
- if ex.errno != errno.EEXIST:
- raise
-
- self._update_count(value=len(self._list_dir()))
-
- @property
- def _file_count(self):
- return self.get(self._fs_count_file) or 0
-
- def _update_count(self, delta=None, value=None):
- # If we have no threshold, don't count files
- if self._threshold == 0:
- return
-
- if delta:
- new_count = self._file_count + delta
- else:
- new_count = value or 0
- self.set(self._fs_count_file, new_count, mgmt_element=True)
-
- def _normalize_timeout(self, timeout):
- timeout = BaseCache._normalize_timeout(self, timeout)
- if timeout != 0:
- timeout = time() + timeout
- return int(timeout)
-
- def _list_dir(self):
- """return a list of (fully qualified) cache filenames
- """
- mgmt_files = [
- self._get_filename(name).split("/")[-1] for name in (self._fs_count_file,)
- ]
- return [
- os.path.join(self._path, fn)
- for fn in os.listdir(self._path)
- if not fn.endswith(self._fs_transaction_suffix) and fn not in mgmt_files
- ]
-
- def _prune(self):
- if self._threshold == 0 or not self._file_count > self._threshold:
- return
-
- entries = self._list_dir()
- now = time()
- for idx, fname in enumerate(entries):
- try:
- remove = False
- with open(fname, "rb") as f:
- expires = pickle.load(f)
- remove = (expires != 0 and expires <= now) or idx % 3 == 0
-
- if remove:
- os.remove(fname)
- except (IOError, OSError):
- pass
- self._update_count(value=len(self._list_dir()))
-
- def clear(self):
- for fname in self._list_dir():
- try:
- os.remove(fname)
- except (IOError, OSError):
- self._update_count(value=len(self._list_dir()))
- return False
- self._update_count(value=0)
- return True
-
- def _get_filename(self, key):
- if isinstance(key, text_type):
- key = key.encode("utf-8") # XXX unicode review
- hash = md5(key).hexdigest()
- return os.path.join(self._path, hash)
-
- def get(self, key):
- filename = self._get_filename(key)
- try:
- with open(filename, "rb") as f:
- pickle_time = pickle.load(f)
- if pickle_time == 0 or pickle_time >= time():
- return pickle.load(f)
- else:
- os.remove(filename)
- return None
- except (IOError, OSError, pickle.PickleError):
- return None
-
- def add(self, key, value, timeout=None):
- filename = self._get_filename(key)
- if not os.path.exists(filename):
- return self.set(key, value, timeout)
- return False
-
- def set(self, key, value, timeout=None, mgmt_element=False):
- # Management elements have no timeout
- if mgmt_element:
- timeout = 0
-
- # Don't prune on management element update, to avoid loop
- else:
- self._prune()
-
- timeout = self._normalize_timeout(timeout)
- filename = self._get_filename(key)
- try:
- fd, tmp = tempfile.mkstemp(
- suffix=self._fs_transaction_suffix, dir=self._path
- )
- with os.fdopen(fd, "wb") as f:
- pickle.dump(timeout, f, 1)
- pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
- rename(tmp, filename)
- os.chmod(filename, self._mode)
- except (IOError, OSError):
- return False
- else:
- # Management elements should not count towards threshold
- if not mgmt_element:
- self._update_count(delta=1)
- return True
-
- def delete(self, key, mgmt_element=False):
- try:
- os.remove(self._get_filename(key))
- except (IOError, OSError):
- return False
- else:
- # Management elements should not count towards threshold
- if not mgmt_element:
- self._update_count(delta=-1)
- return True
-
- def has(self, key):
- filename = self._get_filename(key)
- try:
- with open(filename, "rb") as f:
- pickle_time = pickle.load(f)
- if pickle_time == 0 or pickle_time >= time():
- return True
- else:
- os.remove(filename)
- return False
- except (IOError, OSError, pickle.PickleError):
- return False
-
-
-class UWSGICache(BaseCache):
- """Implements the cache using uWSGI's caching framework.
-
- .. note::
- This class cannot be used when running under PyPy, because the uWSGI
- API implementation for PyPy is lacking the needed functionality.
-
- :param default_timeout: The default timeout in seconds.
- :param cache: The name of the caching instance to connect to, for
- example: mycache@localhost:3031, defaults to an empty string, which
- means uWSGI will cache in the local instance. If the cache is in the
- same instance as the werkzeug app, you only have to provide the name of
- the cache.
- """
-
- def __init__(self, default_timeout=300, cache=""):
- BaseCache.__init__(self, default_timeout)
-
- if platform.python_implementation() == "PyPy":
- raise RuntimeError(
- "uWSGI caching does not work under PyPy, see "
- "the docs for more details."
- )
-
- try:
- import uwsgi
-
- self._uwsgi = uwsgi
- except ImportError:
- raise RuntimeError(
- "uWSGI could not be imported, are you running under uWSGI?"
- )
-
- self.cache = cache
-
- def get(self, key):
- rv = self._uwsgi.cache_get(key, self.cache)
- if rv is None:
- return
- return pickle.loads(rv)
-
- def delete(self, key):
- return self._uwsgi.cache_del(key, self.cache)
-
- def set(self, key, value, timeout=None):
- return self._uwsgi.cache_update(
- key, pickle.dumps(value), self._normalize_timeout(timeout), self.cache
- )
-
- def add(self, key, value, timeout=None):
- return self._uwsgi.cache_set(
- key, pickle.dumps(value), self._normalize_timeout(timeout), self.cache
- )
-
- def clear(self):
- return self._uwsgi.cache_clear(self.cache)
-
- def has(self, key):
- return self._uwsgi.cache_exists(key, self.cache) is not None
diff --git a/src/werkzeug/contrib/fixers.py b/src/werkzeug/contrib/fixers.py
deleted file mode 100644
index 8df0afdab..000000000
--- a/src/werkzeug/contrib/fixers.py
+++ /dev/null
@@ -1,262 +0,0 @@
-"""
-Fixers
-======
-
-.. warning::
- .. deprecated:: 0.15
- ``ProxyFix`` has moved to :mod:`werkzeug.middleware.proxy_fix`.
- All other code in this module is deprecated and will be removed
- in version 1.0.
-
-.. versionadded:: 0.5
-
-This module includes various helpers that fix web server behavior.
-
-.. autoclass:: ProxyFix
- :members:
-
-.. autoclass:: CGIRootFix
-
-.. autoclass:: PathInfoFromRequestUriFix
-
-.. autoclass:: HeaderRewriterFix
-
-.. autoclass:: InternetExplorerFix
-
-:copyright: 2007 Pallets
-:license: BSD-3-Clause
-"""
-import warnings
-
-from ..datastructures import Headers
-from ..datastructures import ResponseCacheControl
-from ..http import parse_cache_control_header
-from ..http import parse_options_header
-from ..http import parse_set_header
-from ..middleware.proxy_fix import ProxyFix as _ProxyFix
-from ..useragents import UserAgent
-
-try:
- from urllib.parse import unquote
-except ImportError:
- from urllib import unquote
-
-
-class CGIRootFix(object):
- """Wrap the application in this middleware if you are using FastCGI
- or CGI and you have problems with your app root being set to the CGI
- script's path instead of the path users are going to visit.
-
- :param app: the WSGI application
- :param app_root: Defaulting to ``'/'``, you can set this to
- something else if your app is mounted somewhere else.
-
- .. deprecated:: 0.15
- This middleware will be removed in version 1.0.
-
- .. versionchanged:: 0.9
- Added `app_root` parameter and renamed from
- ``LighttpdCGIRootFix``.
- """
-
- def __init__(self, app, app_root="/"):
- warnings.warn(
- "'CGIRootFix' is deprecated as of version 0.15 and will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- self.app = app
- self.app_root = app_root.strip("/")
-
- def __call__(self, environ, start_response):
- environ["SCRIPT_NAME"] = self.app_root
- return self.app(environ, start_response)
-
-
-class LighttpdCGIRootFix(CGIRootFix):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'LighttpdCGIRootFix' is renamed 'CGIRootFix'. Both will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(LighttpdCGIRootFix, self).__init__(*args, **kwargs)
-
-
-class PathInfoFromRequestUriFix(object):
- """On windows environment variables are limited to the system charset
- which makes it impossible to store the `PATH_INFO` variable in the
- environment without loss of information on some systems.
-
- This is for example a problem for CGI scripts on a Windows Apache.
-
- This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`,
- `REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the
- fix can only be applied if the webserver supports either of these
- variables.
-
- :param app: the WSGI application
-
- .. deprecated:: 0.15
- This middleware will be removed in version 1.0.
- """
-
- def __init__(self, app):
- warnings.warn(
- "'PathInfoFromRequestUriFix' is deprecated as of version"
- " 0.15 and will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- self.app = app
-
- def __call__(self, environ, start_response):
- for key in "REQUEST_URL", "REQUEST_URI", "UNENCODED_URL":
- if key not in environ:
- continue
- request_uri = unquote(environ[key])
- script_name = unquote(environ.get("SCRIPT_NAME", ""))
- if request_uri.startswith(script_name):
- environ["PATH_INFO"] = request_uri[len(script_name) :].split("?", 1)[0]
- break
- return self.app(environ, start_response)
-
-
-class ProxyFix(_ProxyFix):
- """
- .. deprecated:: 0.15
- ``werkzeug.contrib.fixers.ProxyFix`` has moved to
- :mod:`werkzeug.middleware.proxy_fix`. This import will be
- removed in 1.0.
- """
-
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.contrib.fixers.ProxyFix' has moved to 'werkzeug"
- ".middleware.proxy_fix.ProxyFix'. This import is deprecated"
- " as of version 0.15 and will be removed in 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(ProxyFix, self).__init__(*args, **kwargs)
-
-
-class HeaderRewriterFix(object):
- """This middleware can remove response headers and add others. This
- is for example useful to remove the `Date` header from responses if you
- are using a server that adds that header, no matter if it's present or
- not or to add `X-Powered-By` headers::
-
- app = HeaderRewriterFix(app, remove_headers=['Date'],
- add_headers=[('X-Powered-By', 'WSGI')])
-
- :param app: the WSGI application
- :param remove_headers: a sequence of header keys that should be
- removed.
- :param add_headers: a sequence of ``(key, value)`` tuples that should
- be added.
-
- .. deprecated:: 0.15
- This middleware will be removed in 1.0.
- """
-
- def __init__(self, app, remove_headers=None, add_headers=None):
- warnings.warn(
- "'HeaderRewriterFix' is deprecated as of version 0.15 and"
- " will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- self.app = app
- self.remove_headers = set(x.lower() for x in (remove_headers or ()))
- self.add_headers = list(add_headers or ())
-
- def __call__(self, environ, start_response):
- def rewriting_start_response(status, headers, exc_info=None):
- new_headers = []
- for key, value in headers:
- if key.lower() not in self.remove_headers:
- new_headers.append((key, value))
- new_headers += self.add_headers
- return start_response(status, new_headers, exc_info)
-
- return self.app(environ, rewriting_start_response)
-
-
-class InternetExplorerFix(object):
- """This middleware fixes a couple of bugs with Microsoft Internet
- Explorer. Currently the following fixes are applied:
-
- - removing of `Vary` headers for unsupported mimetypes which
- causes troubles with caching. Can be disabled by passing
- ``fix_vary=False`` to the constructor.
- see: https://support.microsoft.com/en-us/help/824847
-
- - removes offending headers to work around caching bugs in
- Internet Explorer if `Content-Disposition` is set. Can be
- disabled by passing ``fix_attach=False`` to the constructor.
-
- If it does not detect affected Internet Explorer versions it won't touch
- the request / response.
-
- .. deprecated:: 0.15
- This middleware will be removed in 1.0.
- """
-
- # This code was inspired by Django fixers for the same bugs. The
- # fix_vary and fix_attach fixers were originally implemented in Django
- # by Michael Axiak and is available as part of the Django project:
- # https://code.djangoproject.com/ticket/4148
-
- def __init__(self, app, fix_vary=True, fix_attach=True):
- warnings.warn(
- "'InternetExplorerFix' is deprecated as of version 0.15 and"
- " will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- self.app = app
- self.fix_vary = fix_vary
- self.fix_attach = fix_attach
-
- def fix_headers(self, environ, headers, status=None):
- if self.fix_vary:
- header = headers.get("content-type", "")
- mimetype, options = parse_options_header(header)
- if mimetype not in ("text/html", "text/plain", "text/sgml"):
- headers.pop("vary", None)
-
- if self.fix_attach and "content-disposition" in headers:
- pragma = parse_set_header(headers.get("pragma", ""))
- pragma.discard("no-cache")
- header = pragma.to_header()
- if not header:
- headers.pop("pragma", "")
- else:
- headers["Pragma"] = header
- header = headers.get("cache-control", "")
- if header:
- cc = parse_cache_control_header(header, cls=ResponseCacheControl)
- cc.no_cache = None
- cc.no_store = False
- header = cc.to_header()
- if not header:
- headers.pop("cache-control", "")
- else:
- headers["Cache-Control"] = header
-
- def run_fixed(self, environ, start_response):
- def fixing_start_response(status, headers, exc_info=None):
- headers = Headers(headers)
- self.fix_headers(environ, headers, status)
- return start_response(status, headers.to_wsgi_list(), exc_info)
-
- return self.app(environ, fixing_start_response)
-
- def __call__(self, environ, start_response):
- ua = UserAgent(environ)
- if ua.browser != "msie":
- return self.app(environ, start_response)
- return self.run_fixed(environ, start_response)
diff --git a/src/werkzeug/contrib/iterio.py b/src/werkzeug/contrib/iterio.py
deleted file mode 100644
index b67245409..000000000
--- a/src/werkzeug/contrib/iterio.py
+++ /dev/null
@@ -1,358 +0,0 @@
-# -*- coding: utf-8 -*-
-r"""
- werkzeug.contrib.iterio
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- This module implements a :class:`IterIO` that converts an iterator into
- a stream object and the other way round. Converting streams into
- iterators requires the `greenlet`_ module.
-
- To convert an iterator into a stream all you have to do is to pass it
- directly to the :class:`IterIO` constructor. In this example we pass it
- a newly created generator::
-
- def foo():
- yield "something\n"
- yield "otherthings"
- stream = IterIO(foo())
- print stream.read() # read the whole iterator
-
- The other way round works a bit different because we have to ensure that
- the code execution doesn't take place yet. An :class:`IterIO` call with a
- callable as first argument does two things. The function itself is passed
- an :class:`IterIO` stream it can feed. The object returned by the
- :class:`IterIO` constructor on the other hand is not an stream object but
- an iterator::
-
- def foo(stream):
- stream.write("some")
- stream.write("thing")
- stream.flush()
- stream.write("otherthing")
- iterator = IterIO(foo)
- print iterator.next() # prints something
- print iterator.next() # prints otherthing
- iterator.next() # raises StopIteration
-
- .. _greenlet: https://github.com/python-greenlet/greenlet
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import warnings
-
-from .._compat import implements_iterator
-
-try:
- import greenlet
-except ImportError:
- greenlet = None
-
-warnings.warn(
- "'werkzeug.contrib.iterio' is deprecated as of version 0.15 and"
- " will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
-)
-
-
-def _mixed_join(iterable, sentinel):
- """concatenate any string type in an intelligent way."""
- iterator = iter(iterable)
- first_item = next(iterator, sentinel)
- if isinstance(first_item, bytes):
- return first_item + b"".join(iterator)
- return first_item + u"".join(iterator)
-
-
-def _newline(reference_string):
- if isinstance(reference_string, bytes):
- return b"\n"
- return u"\n"
-
-
-@implements_iterator
-class IterIO(object):
- """Instances of this object implement an interface compatible with the
- standard Python :class:`file` object. Streams are either read-only or
- write-only depending on how the object is created.
-
- If the first argument is an iterable a file like object is returned that
- returns the contents of the iterable. In case the iterable is empty
- read operations will return the sentinel value.
-
- If the first argument is a callable then the stream object will be
- created and passed to that function. The caller itself however will
- not receive a stream but an iterable. The function will be executed
- step by step as something iterates over the returned iterable. Each
- call to :meth:`flush` will create an item for the iterable. If
- :meth:`flush` is called without any writes in-between the sentinel
- value will be yielded.
-
- Note for Python 3: due to the incompatible interface of bytes and
- streams you should set the sentinel value explicitly to an empty
- bytestring (``b''``) if you are expecting to deal with bytes as
- otherwise the end of the stream is marked with the wrong sentinel
- value.
-
- .. versionadded:: 0.9
- `sentinel` parameter was added.
- """
-
- def __new__(cls, obj, sentinel=""):
- try:
- iterator = iter(obj)
- except TypeError:
- return IterI(obj, sentinel)
- return IterO(iterator, sentinel)
-
- def __iter__(self):
- return self
-
- def tell(self):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- return self.pos
-
- def isatty(self):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- return False
-
- def seek(self, pos, mode=0):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- raise IOError(9, "Bad file descriptor")
-
- def truncate(self, size=None):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- raise IOError(9, "Bad file descriptor")
-
- def write(self, s):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- raise IOError(9, "Bad file descriptor")
-
- def writelines(self, list):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- raise IOError(9, "Bad file descriptor")
-
- def read(self, n=-1):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- raise IOError(9, "Bad file descriptor")
-
- def readlines(self, sizehint=0):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- raise IOError(9, "Bad file descriptor")
-
- def readline(self, length=None):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- raise IOError(9, "Bad file descriptor")
-
- def flush(self):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- raise IOError(9, "Bad file descriptor")
-
- def __next__(self):
- if self.closed:
- raise StopIteration()
- line = self.readline()
- if not line:
- raise StopIteration()
- return line
-
-
-class IterI(IterIO):
- """Convert an stream into an iterator."""
-
- def __new__(cls, func, sentinel=""):
- if greenlet is None:
- raise RuntimeError("IterI requires greenlet support")
- stream = object.__new__(cls)
- stream._parent = greenlet.getcurrent()
- stream._buffer = []
- stream.closed = False
- stream.sentinel = sentinel
- stream.pos = 0
-
- def run():
- func(stream)
- stream.close()
-
- g = greenlet.greenlet(run, stream._parent)
- while 1:
- rv = g.switch()
- if not rv:
- return
- yield rv[0]
-
- def close(self):
- if not self.closed:
- self.closed = True
- self._flush_impl()
-
- def write(self, s):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- if s:
- self.pos += len(s)
- self._buffer.append(s)
-
- def writelines(self, list):
- for item in list:
- self.write(item)
-
- def flush(self):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- self._flush_impl()
-
- def _flush_impl(self):
- data = _mixed_join(self._buffer, self.sentinel)
- self._buffer = []
- if not data and self.closed:
- self._parent.switch()
- else:
- self._parent.switch((data,))
-
-
-class IterO(IterIO):
- """Iter output. Wrap an iterator and give it a stream like interface."""
-
- def __new__(cls, gen, sentinel=""):
- self = object.__new__(cls)
- self._gen = gen
- self._buf = None
- self.sentinel = sentinel
- self.closed = False
- self.pos = 0
- return self
-
- def __iter__(self):
- return self
-
- def _buf_append(self, string):
- """Replace string directly without appending to an empty string,
- avoiding type issues."""
- if not self._buf:
- self._buf = string
- else:
- self._buf += string
-
- def close(self):
- if not self.closed:
- self.closed = True
- if hasattr(self._gen, "close"):
- self._gen.close()
-
- def seek(self, pos, mode=0):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- if mode == 1:
- pos += self.pos
- elif mode == 2:
- self.read()
- self.pos = min(self.pos, self.pos + pos)
- return
- elif mode != 0:
- raise IOError("Invalid argument")
- buf = []
- try:
- tmp_end_pos = len(self._buf or "")
- while pos > tmp_end_pos:
- item = next(self._gen)
- tmp_end_pos += len(item)
- buf.append(item)
- except StopIteration:
- pass
- if buf:
- self._buf_append(_mixed_join(buf, self.sentinel))
- self.pos = max(0, pos)
-
- def read(self, n=-1):
- if self.closed:
- raise ValueError("I/O operation on closed file")
- if n < 0:
- self._buf_append(_mixed_join(self._gen, self.sentinel))
- result = self._buf[self.pos :]
- self.pos += len(result)
- return result
- new_pos = self.pos + n
- buf = []
- try:
- tmp_end_pos = 0 if self._buf is None else len(self._buf)
- while new_pos > tmp_end_pos or (self._buf is None and not buf):
- item = next(self._gen)
- tmp_end_pos += len(item)
- buf.append(item)
- except StopIteration:
- pass
- if buf:
- self._buf_append(_mixed_join(buf, self.sentinel))
-
- if self._buf is None:
- return self.sentinel
-
- new_pos = max(0, new_pos)
- try:
- return self._buf[self.pos : new_pos]
- finally:
- self.pos = min(new_pos, len(self._buf))
-
- def readline(self, length=None):
- if self.closed:
- raise ValueError("I/O operation on closed file")
-
- nl_pos = -1
- if self._buf:
- nl_pos = self._buf.find(_newline(self._buf), self.pos)
- buf = []
- try:
- if self._buf is None:
- pos = self.pos
- else:
- pos = len(self._buf)
- while nl_pos < 0:
- item = next(self._gen)
- local_pos = item.find(_newline(item))
- buf.append(item)
- if local_pos >= 0:
- nl_pos = pos + local_pos
- break
- pos += len(item)
- except StopIteration:
- pass
- if buf:
- self._buf_append(_mixed_join(buf, self.sentinel))
-
- if self._buf is None:
- return self.sentinel
-
- if nl_pos < 0:
- new_pos = len(self._buf)
- else:
- new_pos = nl_pos + 1
- if length is not None and self.pos + length < new_pos:
- new_pos = self.pos + length
- try:
- return self._buf[self.pos : new_pos]
- finally:
- self.pos = min(new_pos, len(self._buf))
-
- def readlines(self, sizehint=0):
- total = 0
- lines = []
- line = self.readline()
- while line:
- lines.append(line)
- total += len(line)
- if 0 < sizehint <= total:
- break
- line = self.readline()
- return lines
diff --git a/src/werkzeug/contrib/lint.py b/src/werkzeug/contrib/lint.py
deleted file mode 100644
index 8bd8b8ab2..000000000
--- a/src/werkzeug/contrib/lint.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import warnings
-
-from ..middleware.lint import * # noqa: F401, F403
-
-warnings.warn(
- "'werkzeug.contrib.lint' has moved to 'werkzeug.middleware.lint'."
- " This import is deprecated as of version 0.15 and will be removed"
- " in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
-)
diff --git a/src/werkzeug/contrib/profiler.py b/src/werkzeug/contrib/profiler.py
deleted file mode 100644
index b79fe567f..000000000
--- a/src/werkzeug/contrib/profiler.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import warnings
-
-from ..middleware.profiler import * # noqa: F401, F403
-
-warnings.warn(
- "'werkzeug.contrib.profiler' has moved to"
- "'werkzeug.middleware.profiler'. This import is deprecated as of"
- "version 0.15 and will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
-)
-
-
-class MergeStream(object):
- """An object that redirects ``write`` calls to multiple streams.
- Use this to log to both ``sys.stdout`` and a file::
-
- f = open('profiler.log', 'w')
- stream = MergeStream(sys.stdout, f)
- profiler = ProfilerMiddleware(app, stream)
-
- .. deprecated:: 0.15
- Use the ``tee`` command in your terminal instead. This class
- will be removed in 1.0.
- """
-
- def __init__(self, *streams):
- warnings.warn(
- "'MergeStream' is deprecated as of version 0.15 and will be removed in"
- " version 1.0. Use your terminal's 'tee' command instead.",
- DeprecationWarning,
- stacklevel=2,
- )
-
- if not streams:
- raise TypeError("At least one stream must be given.")
-
- self.streams = streams
-
- def write(self, data):
- for stream in self.streams:
- stream.write(data)
diff --git a/src/werkzeug/contrib/securecookie.py b/src/werkzeug/contrib/securecookie.py
deleted file mode 100644
index c4c9eee25..000000000
--- a/src/werkzeug/contrib/securecookie.py
+++ /dev/null
@@ -1,362 +0,0 @@
-# -*- coding: utf-8 -*-
-r"""
- werkzeug.contrib.securecookie
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This module implements a cookie that is not alterable from the client
- because it adds a checksum the server checks for. You can use it as
- session replacement if all you have is a user id or something to mark
- a logged in user.
-
- Keep in mind that the data is still readable from the client as a
- normal cookie is. However you don't have to store and flush the
- sessions you have at the server.
-
- Example usage:
-
- >>> from werkzeug.contrib.securecookie import SecureCookie
- >>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef")
-
- Dumping into a string so that one can store it in a cookie:
-
- >>> value = x.serialize()
-
- Loading from that string again:
-
- >>> x = SecureCookie.unserialize(value, "deadbeef")
- >>> x["baz"]
- (1, 2, 3)
-
- If someone modifies the cookie and the checksum is wrong the unserialize
- method will fail silently and return a new empty `SecureCookie` object.
-
- Keep in mind that the values will be visible in the cookie so do not
- store data in a cookie you don't want the user to see.
-
- Application Integration
- =======================
-
- If you are using the werkzeug request objects you could integrate the
- secure cookie into your application like this::
-
- from werkzeug.utils import cached_property
- from werkzeug.wrappers import BaseRequest
- from werkzeug.contrib.securecookie import SecureCookie
-
- # don't use this key but a different one; you could just use
- # os.urandom(20) to get something random
- SECRET_KEY = '\xfa\xdd\xb8z\xae\xe0}4\x8b\xea'
-
- class Request(BaseRequest):
-
- @cached_property
- def client_session(self):
- data = self.cookies.get('session_data')
- if not data:
- return SecureCookie(secret_key=SECRET_KEY)
- return SecureCookie.unserialize(data, SECRET_KEY)
-
- def application(environ, start_response):
- request = Request(environ)
-
- # get a response object here
- response = ...
-
- if request.client_session.should_save:
- session_data = request.client_session.serialize()
- response.set_cookie('session_data', session_data,
- httponly=True)
- return response(environ, start_response)
-
- A less verbose integration can be achieved by using shorthand methods::
-
- class Request(BaseRequest):
-
- @cached_property
- def client_session(self):
- return SecureCookie.load_cookie(self, secret_key=COOKIE_SECRET)
-
- def application(environ, start_response):
- request = Request(environ)
-
- # get a response object here
- response = ...
-
- request.client_session.save_cookie(response)
- return response(environ, start_response)
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import base64
-import pickle
-import warnings
-from hashlib import sha1 as _default_hash
-from hmac import new as hmac
-from time import time
-
-from .._compat import iteritems
-from .._compat import text_type
-from .._compat import to_bytes
-from .._compat import to_native
-from .._internal import _date_to_unix
-from ..contrib.sessions import ModificationTrackingDict
-from ..security import safe_str_cmp
-from ..urls import url_quote_plus
-from ..urls import url_unquote_plus
-
-warnings.warn(
- "'werkzeug.contrib.securecookie' is deprecated as of version 0.15"
- " and will be removed in version 1.0. It has moved to"
- " https://github.com/pallets/secure-cookie.",
- DeprecationWarning,
- stacklevel=2,
-)
-
-
-class UnquoteError(Exception):
- """Internal exception used to signal failures on quoting."""
-
-
-class SecureCookie(ModificationTrackingDict):
- """Represents a secure cookie. You can subclass this class and provide
- an alternative mac method. The import thing is that the mac method
- is a function with a similar interface to the hashlib. Required
- methods are update() and digest().
-
- Example usage:
-
- >>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef")
- >>> x["foo"]
- 42
- >>> x["baz"]
- (1, 2, 3)
- >>> x["blafasel"] = 23
- >>> x.should_save
- True
-
- :param data: the initial data. Either a dict, list of tuples or `None`.
- :param secret_key: the secret key. If not set `None` or not specified
- it has to be set before :meth:`serialize` is called.
- :param new: The initial value of the `new` flag.
- """
-
- #: The hash method to use. This has to be a module with a new function
- #: or a function that creates a hashlib object. Such as `hashlib.md5`
- #: Subclasses can override this attribute. The default hash is sha1.
- #: Make sure to wrap this in staticmethod() if you store an arbitrary
- #: function there such as hashlib.sha1 which might be implemented
- #: as a function.
- hash_method = staticmethod(_default_hash)
-
- #: The module used for serialization. Should have a ``dumps`` and a
- #: ``loads`` method that takes bytes. The default is :mod:`pickle`.
- #:
- #: .. versionchanged:: 0.15
- #: The default of ``pickle`` will change to :mod:`json` in 1.0.
- serialization_method = pickle
-
- #: if the contents should be base64 quoted. This can be disabled if the
- #: serialization process returns cookie safe strings only.
- quote_base64 = True
-
- def __init__(self, data=None, secret_key=None, new=True):
- ModificationTrackingDict.__init__(self, data or ())
- # explicitly convert it into a bytestring because python 2.6
- # no longer performs an implicit string conversion on hmac
- if secret_key is not None:
- secret_key = to_bytes(secret_key, "utf-8")
- self.secret_key = secret_key
- self.new = new
-
- if self.serialization_method is pickle:
- warnings.warn(
- "The default 'SecureCookie.serialization_method' will"
- " change from pickle to json in version 1.0. To upgrade"
- " existing tokens, override 'unquote' to try pickle if"
- " json fails.",
- stacklevel=2,
- )
-
- def __repr__(self):
- return "<%s %s%s>" % (
- self.__class__.__name__,
- dict.__repr__(self),
- "*" if self.should_save else "",
- )
-
- @property
- def should_save(self):
- """True if the session should be saved. By default this is only true
- for :attr:`modified` cookies, not :attr:`new`.
- """
- return self.modified
-
- @classmethod
- def quote(cls, value):
- """Quote the value for the cookie. This can be any object supported
- by :attr:`serialization_method`.
-
- :param value: the value to quote.
- """
- if cls.serialization_method is not None:
- value = cls.serialization_method.dumps(value)
- if cls.quote_base64:
- value = b"".join(
- base64.b64encode(to_bytes(value, "utf8")).splitlines()
- ).strip()
- return value
-
- @classmethod
- def unquote(cls, value):
- """Unquote the value for the cookie. If unquoting does not work a
- :exc:`UnquoteError` is raised.
-
- :param value: the value to unquote.
- """
- try:
- if cls.quote_base64:
- value = base64.b64decode(value)
- if cls.serialization_method is not None:
- value = cls.serialization_method.loads(value)
- return value
- except Exception:
- # unfortunately pickle and other serialization modules can
- # cause pretty every error here. if we get one we catch it
- # and convert it into an UnquoteError
- raise UnquoteError()
-
- def serialize(self, expires=None):
- """Serialize the secure cookie into a string.
-
- If expires is provided, the session will be automatically invalidated
- after expiration when you unseralize it. This provides better
- protection against session cookie theft.
-
- :param expires: an optional expiration date for the cookie (a
- :class:`datetime.datetime` object)
- """
- if self.secret_key is None:
- raise RuntimeError("no secret key defined")
- if expires:
- self["_expires"] = _date_to_unix(expires)
- result = []
- mac = hmac(self.secret_key, None, self.hash_method)
- for key, value in sorted(self.items()):
- result.append(
- (
- "%s=%s" % (url_quote_plus(key), self.quote(value).decode("ascii"))
- ).encode("ascii")
- )
- mac.update(b"|" + result[-1])
- return b"?".join([base64.b64encode(mac.digest()).strip(), b"&".join(result)])
-
- @classmethod
- def unserialize(cls, string, secret_key):
- """Load the secure cookie from a serialized string.
-
- :param string: the cookie value to unserialize.
- :param secret_key: the secret key used to serialize the cookie.
- :return: a new :class:`SecureCookie`.
- """
- if isinstance(string, text_type):
- string = string.encode("utf-8", "replace")
- if isinstance(secret_key, text_type):
- secret_key = secret_key.encode("utf-8", "replace")
- try:
- base64_hash, data = string.split(b"?", 1)
- except (ValueError, IndexError):
- items = ()
- else:
- items = {}
- mac = hmac(secret_key, None, cls.hash_method)
- for item in data.split(b"&"):
- mac.update(b"|" + item)
- if b"=" not in item:
- items = None
- break
- key, value = item.split(b"=", 1)
- # try to make the key a string
- key = url_unquote_plus(key.decode("ascii"))
- try:
- key = to_native(key)
- except UnicodeError:
- pass
- items[key] = value
-
- # no parsing error and the mac looks okay, we can now
- # sercurely unpickle our cookie.
- try:
- client_hash = base64.b64decode(base64_hash)
- except TypeError:
- items = client_hash = None
- if items is not None and safe_str_cmp(client_hash, mac.digest()):
- try:
- for key, value in iteritems(items):
- items[key] = cls.unquote(value)
- except UnquoteError:
- items = ()
- else:
- if "_expires" in items:
- if time() > items["_expires"]:
- items = ()
- else:
- del items["_expires"]
- else:
- items = ()
- return cls(items, secret_key, False)
-
- @classmethod
- def load_cookie(cls, request, key="session", secret_key=None):
- """Loads a :class:`SecureCookie` from a cookie in request. If the
- cookie is not set, a new :class:`SecureCookie` instanced is
- returned.
-
- :param request: a request object that has a `cookies` attribute
- which is a dict of all cookie values.
- :param key: the name of the cookie.
- :param secret_key: the secret key used to unquote the cookie.
- Always provide the value even though it has
- no default!
- """
- data = request.cookies.get(key)
- if not data:
- return cls(secret_key=secret_key)
- return cls.unserialize(data, secret_key)
-
- def save_cookie(
- self,
- response,
- key="session",
- expires=None,
- session_expires=None,
- max_age=None,
- path="/",
- domain=None,
- secure=None,
- httponly=False,
- force=False,
- ):
- """Saves the SecureCookie in a cookie on response object. All
- parameters that are not described here are forwarded directly
- to :meth:`~BaseResponse.set_cookie`.
-
- :param response: a response object that has a
- :meth:`~BaseResponse.set_cookie` method.
- :param key: the name of the cookie.
- :param session_expires: the expiration date of the secure cookie
- stored information. If this is not provided
- the cookie `expires` date is used instead.
- """
- if force or self.should_save:
- data = self.serialize(session_expires or expires)
- response.set_cookie(
- key,
- data,
- expires=expires,
- max_age=max_age,
- path=path,
- domain=domain,
- secure=secure,
- httponly=httponly,
- )
diff --git a/src/werkzeug/contrib/sessions.py b/src/werkzeug/contrib/sessions.py
deleted file mode 100644
index 866e827c1..000000000
--- a/src/werkzeug/contrib/sessions.py
+++ /dev/null
@@ -1,389 +0,0 @@
-# -*- coding: utf-8 -*-
-r"""
- werkzeug.contrib.sessions
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This module contains some helper classes that help one to add session
- support to a python WSGI application. For full client-side session
- storage see :mod:`~werkzeug.contrib.securecookie` which implements a
- secure, client-side session storage.
-
-
- Application Integration
- =======================
-
- ::
-
- from werkzeug.contrib.sessions import SessionMiddleware, \
- FilesystemSessionStore
-
- app = SessionMiddleware(app, FilesystemSessionStore())
-
- The current session will then appear in the WSGI environment as
- `werkzeug.session`. However it's recommended to not use the middleware
- but the stores directly in the application. However for very simple
- scripts a middleware for sessions could be sufficient.
-
- This module does not implement methods or ways to check if a session is
- expired. That should be done by a cronjob and storage specific. For
- example to prune unused filesystem sessions one could check the modified
- time of the files. If sessions are stored in the database the new()
- method should add an expiration timestamp for the session.
-
- For better flexibility it's recommended to not use the middleware but the
- store and session object directly in the application dispatching::
-
- session_store = FilesystemSessionStore()
-
- def application(environ, start_response):
- request = Request(environ)
- sid = request.cookies.get('cookie_name')
- if sid is None:
- request.session = session_store.new()
- else:
- request.session = session_store.get(sid)
- response = get_the_response_object(request)
- if request.session.should_save:
- session_store.save(request.session)
- response.set_cookie('cookie_name', request.session.sid)
- return response(environ, start_response)
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import os
-import re
-import tempfile
-import warnings
-from hashlib import sha1
-from os import path
-from pickle import dump
-from pickle import HIGHEST_PROTOCOL
-from pickle import load
-from random import random
-from time import time
-
-from .._compat import PY2
-from .._compat import text_type
-from ..datastructures import CallbackDict
-from ..filesystem import get_filesystem_encoding
-from ..posixemulation import rename
-from ..utils import dump_cookie
-from ..utils import parse_cookie
-from ..wsgi import ClosingIterator
-
-warnings.warn(
- "'werkzeug.contrib.sessions' is deprecated as of version 0.15 and"
- " will be removed in version 1.0. It has moved to"
- " https://github.com/pallets/secure-cookie.",
- DeprecationWarning,
- stacklevel=2,
-)
-
-_sha1_re = re.compile(r"^[a-f0-9]{40}$")
-
-
-def _urandom():
- if hasattr(os, "urandom"):
- return os.urandom(30)
- return text_type(random()).encode("ascii")
-
-
-def generate_key(salt=None):
- if salt is None:
- salt = repr(salt).encode("ascii")
- return sha1(b"".join([salt, str(time()).encode("ascii"), _urandom()])).hexdigest()
-
-
-class ModificationTrackingDict(CallbackDict):
- __slots__ = ("modified",)
-
- def __init__(self, *args, **kwargs):
- def on_update(self):
- self.modified = True
-
- self.modified = False
- CallbackDict.__init__(self, on_update=on_update)
- dict.update(self, *args, **kwargs)
-
- def copy(self):
- """Create a flat copy of the dict."""
- missing = object()
- result = object.__new__(self.__class__)
- for name in self.__slots__:
- val = getattr(self, name, missing)
- if val is not missing:
- setattr(result, name, val)
- return result
-
- def __copy__(self):
- return self.copy()
-
-
-class Session(ModificationTrackingDict):
- """Subclass of a dict that keeps track of direct object changes. Changes
- in mutable structures are not tracked, for those you have to set
- `modified` to `True` by hand.
- """
-
- __slots__ = ModificationTrackingDict.__slots__ + ("sid", "new")
-
- def __init__(self, data, sid, new=False):
- ModificationTrackingDict.__init__(self, data)
- self.sid = sid
- self.new = new
-
- def __repr__(self):
- return "<%s %s%s>" % (
- self.__class__.__name__,
- dict.__repr__(self),
- "*" if self.should_save else "",
- )
-
- @property
- def should_save(self):
- """True if the session should be saved.
-
- .. versionchanged:: 0.6
- By default the session is now only saved if the session is
- modified, not if it is new like it was before.
- """
- return self.modified
-
-
-class SessionStore(object):
- """Baseclass for all session stores. The Werkzeug contrib module does not
- implement any useful stores besides the filesystem store, application
- developers are encouraged to create their own stores.
-
- :param session_class: The session class to use. Defaults to
- :class:`Session`.
- """
-
- def __init__(self, session_class=None):
- if session_class is None:
- session_class = Session
- self.session_class = session_class
-
- def is_valid_key(self, key):
- """Check if a key has the correct format."""
- return _sha1_re.match(key) is not None
-
- def generate_key(self, salt=None):
- """Simple function that generates a new session key."""
- return generate_key(salt)
-
- def new(self):
- """Generate a new session."""
- return self.session_class({}, self.generate_key(), True)
-
- def save(self, session):
- """Save a session."""
-
- def save_if_modified(self, session):
- """Save if a session class wants an update."""
- if session.should_save:
- self.save(session)
-
- def delete(self, session):
- """Delete a session."""
-
- def get(self, sid):
- """Get a session for this sid or a new session object. This method
- has to check if the session key is valid and create a new session if
- that wasn't the case.
- """
- return self.session_class({}, sid, True)
-
-
-#: used for temporary files by the filesystem session store
-_fs_transaction_suffix = ".__wz_sess"
-
-
-class FilesystemSessionStore(SessionStore):
- """Simple example session store that saves sessions on the filesystem.
- This store works best on POSIX systems and Windows Vista / Windows
- Server 2008 and newer.
-
- .. versionchanged:: 0.6
- `renew_missing` was added. Previously this was considered `True`,
- now the default changed to `False` and it can be explicitly
- deactivated.
-
- :param path: the path to the folder used for storing the sessions.
- If not provided the default temporary directory is used.
- :param filename_template: a string template used to give the session
- a filename. ``%s`` is replaced with the
- session id.
- :param session_class: The session class to use. Defaults to
- :class:`Session`.
- :param renew_missing: set to `True` if you want the store to
- give the user a new sid if the session was
- not yet saved.
- """
-
- def __init__(
- self,
- path=None,
- filename_template="werkzeug_%s.sess",
- session_class=None,
- renew_missing=False,
- mode=0o644,
- ):
- SessionStore.__init__(self, session_class)
- if path is None:
- path = tempfile.gettempdir()
- self.path = path
- if isinstance(filename_template, text_type) and PY2:
- filename_template = filename_template.encode(get_filesystem_encoding())
- assert not filename_template.endswith(_fs_transaction_suffix), (
- "filename templates may not end with %s" % _fs_transaction_suffix
- )
- self.filename_template = filename_template
- self.renew_missing = renew_missing
- self.mode = mode
-
- def get_session_filename(self, sid):
- # out of the box, this should be a strict ASCII subset but
- # you might reconfigure the session object to have a more
- # arbitrary string.
- if isinstance(sid, text_type) and PY2:
- sid = sid.encode(get_filesystem_encoding())
- return path.join(self.path, self.filename_template % sid)
-
- def save(self, session):
- fn = self.get_session_filename(session.sid)
- fd, tmp = tempfile.mkstemp(suffix=_fs_transaction_suffix, dir=self.path)
- f = os.fdopen(fd, "wb")
- try:
- dump(dict(session), f, HIGHEST_PROTOCOL)
- finally:
- f.close()
- try:
- rename(tmp, fn)
- os.chmod(fn, self.mode)
- except (IOError, OSError):
- pass
-
- def delete(self, session):
- fn = self.get_session_filename(session.sid)
- try:
- os.unlink(fn)
- except OSError:
- pass
-
- def get(self, sid):
- if not self.is_valid_key(sid):
- return self.new()
- try:
- f = open(self.get_session_filename(sid), "rb")
- except IOError:
- if self.renew_missing:
- return self.new()
- data = {}
- else:
- try:
- try:
- data = load(f)
- except Exception:
- data = {}
- finally:
- f.close()
- return self.session_class(data, sid, False)
-
- def list(self):
- """Lists all sessions in the store.
-
- .. versionadded:: 0.6
- """
- before, after = self.filename_template.split("%s", 1)
- filename_re = re.compile(
- r"%s(.{5,})%s$" % (re.escape(before), re.escape(after))
- )
- result = []
- for filename in os.listdir(self.path):
- #: this is a session that is still being saved.
- if filename.endswith(_fs_transaction_suffix):
- continue
- match = filename_re.match(filename)
- if match is not None:
- result.append(match.group(1))
- return result
-
-
-class SessionMiddleware(object):
- """A simple middleware that puts the session object of a store provided
- into the WSGI environ. It automatically sets cookies and restores
- sessions.
-
- However a middleware is not the preferred solution because it won't be as
- fast as sessions managed by the application itself and will put a key into
- the WSGI environment only relevant for the application which is against
- the concept of WSGI.
-
- The cookie parameters are the same as for the :func:`~dump_cookie`
- function just prefixed with ``cookie_``. Additionally `max_age` is
- called `cookie_age` and not `cookie_max_age` because of backwards
- compatibility.
- """
-
- def __init__(
- self,
- app,
- store,
- cookie_name="session_id",
- cookie_age=None,
- cookie_expires=None,
- cookie_path="/",
- cookie_domain=None,
- cookie_secure=None,
- cookie_httponly=False,
- cookie_samesite="Lax",
- environ_key="werkzeug.session",
- ):
- self.app = app
- self.store = store
- self.cookie_name = cookie_name
- self.cookie_age = cookie_age
- self.cookie_expires = cookie_expires
- self.cookie_path = cookie_path
- self.cookie_domain = cookie_domain
- self.cookie_secure = cookie_secure
- self.cookie_httponly = cookie_httponly
- self.cookie_samesite = cookie_samesite
- self.environ_key = environ_key
-
- def __call__(self, environ, start_response):
- cookie = parse_cookie(environ.get("HTTP_COOKIE", ""))
- sid = cookie.get(self.cookie_name, None)
- if sid is None:
- session = self.store.new()
- else:
- session = self.store.get(sid)
- environ[self.environ_key] = session
-
- def injecting_start_response(status, headers, exc_info=None):
- if session.should_save:
- self.store.save(session)
- headers.append(
- (
- "Set-Cookie",
- dump_cookie(
- self.cookie_name,
- session.sid,
- self.cookie_age,
- self.cookie_expires,
- self.cookie_path,
- self.cookie_domain,
- self.cookie_secure,
- self.cookie_httponly,
- samesite=self.cookie_samesite,
- ),
- )
- )
- return start_response(status, headers, exc_info)
-
- return ClosingIterator(
- self.app(environ, injecting_start_response),
- lambda: self.store.save_if_modified(session),
- )
diff --git a/src/werkzeug/contrib/wrappers.py b/src/werkzeug/contrib/wrappers.py
deleted file mode 100644
index 49b82a71e..000000000
--- a/src/werkzeug/contrib/wrappers.py
+++ /dev/null
@@ -1,385 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- werkzeug.contrib.wrappers
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Extra wrappers or mixins contributed by the community. These wrappers can
- be mixed in into request objects to add extra functionality.
-
- Example::
-
- from werkzeug.wrappers import Request as RequestBase
- from werkzeug.contrib.wrappers import JSONRequestMixin
-
- class Request(RequestBase, JSONRequestMixin):
- pass
-
- Afterwards this request object provides the extra functionality of the
- :class:`JSONRequestMixin`.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import codecs
-import warnings
-
-from .._compat import wsgi_decoding_dance
-from ..exceptions import BadRequest
-from ..http import dump_options_header
-from ..http import parse_options_header
-from ..utils import cached_property
-from ..wrappers.json import JSONMixin as _JSONMixin
-
-
-def is_known_charset(charset):
- """Checks if the given charset is known to Python."""
- try:
- codecs.lookup(charset)
- except LookupError:
- return False
- return True
-
-
-class JSONRequestMixin(_JSONMixin):
- """
- .. deprecated:: 0.15
- Moved to :class:`werkzeug.wrappers.json.JSONMixin`. This old
- import will be removed in version 1.0.
- """
-
- @property
- def json(self):
- warnings.warn(
- "'werkzeug.contrib.wrappers.JSONRequestMixin' has moved to"
- " 'werkzeug.wrappers.json.JSONMixin'. This old import will"
- " be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return super(JSONRequestMixin, self).json
-
-
-class ProtobufRequestMixin(object):
-
- """Add protobuf parsing method to a request object. This will parse the
- input data through `protobuf`_ if possible.
-
- :exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
- is not protobuf or if the data itself cannot be parsed property.
-
- .. _protobuf: https://github.com/protocolbuffers/protobuf
-
- .. deprecated:: 0.15
- This mixin will be removed in version 1.0.
- """
-
- #: by default the :class:`ProtobufRequestMixin` will raise a
- #: :exc:`~werkzeug.exceptions.BadRequest` if the object is not
- #: initialized. You can bypass that check by setting this
- #: attribute to `False`.
- protobuf_check_initialization = True
-
- def parse_protobuf(self, proto_type):
- """Parse the data into an instance of proto_type."""
- warnings.warn(
- "'werkzeug.contrib.wrappers.ProtobufRequestMixin' is"
- " deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- if "protobuf" not in self.environ.get("CONTENT_TYPE", ""):
- raise BadRequest("Not a Protobuf request")
-
- obj = proto_type()
- try:
- obj.ParseFromString(self.data)
- except Exception:
- raise BadRequest("Unable to parse Protobuf request")
-
- # Fail if not all required fields are set
- if self.protobuf_check_initialization and not obj.IsInitialized():
- raise BadRequest("Partial Protobuf request")
-
- return obj
-
-
-class RoutingArgsRequestMixin(object):
-
- """This request mixin adds support for the wsgiorg routing args
- `specification`_.
-
- .. _specification: https://wsgi.readthedocs.io/en/latest/
- specifications/routing_args.html
-
- .. deprecated:: 0.15
- This mixin will be removed in version 1.0.
- """
-
- def _get_routing_args(self):
- warnings.warn(
- "'werkzeug.contrib.wrappers.RoutingArgsRequestMixin' is"
- " deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return self.environ.get("wsgiorg.routing_args", (()))[0]
-
- def _set_routing_args(self, value):
- warnings.warn(
- "'werkzeug.contrib.wrappers.RoutingArgsRequestMixin' is"
- " deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- if self.shallow:
- raise RuntimeError(
- "A shallow request tried to modify the WSGI "
- "environment. If you really want to do that, "
- "set `shallow` to False."
- )
- self.environ["wsgiorg.routing_args"] = (value, self.routing_vars)
-
- routing_args = property(
- _get_routing_args,
- _set_routing_args,
- doc="""
- The positional URL arguments as `tuple`.""",
- )
- del _get_routing_args, _set_routing_args
-
- def _get_routing_vars(self):
- warnings.warn(
- "'werkzeug.contrib.wrappers.RoutingArgsRequestMixin' is"
- " deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- rv = self.environ.get("wsgiorg.routing_args")
- if rv is not None:
- return rv[1]
- rv = {}
- if not self.shallow:
- self.routing_vars = rv
- return rv
-
- def _set_routing_vars(self, value):
- warnings.warn(
- "'werkzeug.contrib.wrappers.RoutingArgsRequestMixin' is"
- " deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- if self.shallow:
- raise RuntimeError(
- "A shallow request tried to modify the WSGI "
- "environment. If you really want to do that, "
- "set `shallow` to False."
- )
- self.environ["wsgiorg.routing_args"] = (self.routing_args, value)
-
- routing_vars = property(
- _get_routing_vars,
- _set_routing_vars,
- doc="""
- The keyword URL arguments as `dict`.""",
- )
- del _get_routing_vars, _set_routing_vars
-
-
-class ReverseSlashBehaviorRequestMixin(object):
-
- """This mixin reverses the trailing slash behavior of :attr:`script_root`
- and :attr:`path`. This makes it possible to use :func:`~urlparse.urljoin`
- directly on the paths.
-
- Because it changes the behavior or :class:`Request` this class has to be
- mixed in *before* the actual request class::
-
- class MyRequest(ReverseSlashBehaviorRequestMixin, Request):
- pass
-
- This example shows the differences (for an application mounted on
- `/application` and the request going to `/application/foo/bar`):
-
- +---------------+-------------------+---------------------+
- | | normal behavior | reverse behavior |
- +===============+===================+=====================+
- | `script_root` | ``/application`` | ``/application/`` |
- +---------------+-------------------+---------------------+
- | `path` | ``/foo/bar`` | ``foo/bar`` |
- +---------------+-------------------+---------------------+
-
- .. deprecated:: 0.15
- This mixin will be removed in version 1.0.
- """
-
- @cached_property
- def path(self):
- """Requested path as unicode. This works a bit like the regular path
- info in the WSGI environment but will not include a leading slash.
- """
- warnings.warn(
- "'werkzeug.contrib.wrappers.ReverseSlashBehaviorRequestMixin'"
- " is deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- path = wsgi_decoding_dance(
- self.environ.get("PATH_INFO") or "", self.charset, self.encoding_errors
- )
- return path.lstrip("/")
-
- @cached_property
- def script_root(self):
- """The root path of the script includling a trailing slash."""
- warnings.warn(
- "'werkzeug.contrib.wrappers.ReverseSlashBehaviorRequestMixin'"
- " is deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- path = wsgi_decoding_dance(
- self.environ.get("SCRIPT_NAME") or "", self.charset, self.encoding_errors
- )
- return path.rstrip("/") + "/"
-
-
-class DynamicCharsetRequestMixin(object):
-
- """"If this mixin is mixed into a request class it will provide
- a dynamic `charset` attribute. This means that if the charset is
- transmitted in the content type headers it's used from there.
-
- Because it changes the behavior or :class:`Request` this class has
- to be mixed in *before* the actual request class::
-
- class MyRequest(DynamicCharsetRequestMixin, Request):
- pass
-
- By default the request object assumes that the URL charset is the
- same as the data charset. If the charset varies on each request
- based on the transmitted data it's not a good idea to let the URLs
- change based on that. Most browsers assume either utf-8 or latin1
- for the URLs if they have troubles figuring out. It's strongly
- recommended to set the URL charset to utf-8::
-
- class MyRequest(DynamicCharsetRequestMixin, Request):
- url_charset = 'utf-8'
-
- .. deprecated:: 0.15
- This mixin will be removed in version 1.0.
-
- .. versionadded:: 0.6
- """
-
- #: the default charset that is assumed if the content type header
- #: is missing or does not contain a charset parameter. The default
- #: is latin1 which is what HTTP specifies as default charset.
- #: You may however want to set this to utf-8 to better support
- #: browsers that do not transmit a charset for incoming data.
- default_charset = "latin1"
-
- def unknown_charset(self, charset):
- """Called if a charset was provided but is not supported by
- the Python codecs module. By default latin1 is assumed then
- to not lose any information, you may override this method to
- change the behavior.
-
- :param charset: the charset that was not found.
- :return: the replacement charset.
- """
- return "latin1"
-
- @cached_property
- def charset(self):
- """The charset from the content type."""
- warnings.warn(
- "'werkzeug.contrib.wrappers.DynamicCharsetRequestMixin'"
- " is deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- header = self.environ.get("CONTENT_TYPE")
- if header:
- ct, options = parse_options_header(header)
- charset = options.get("charset")
- if charset:
- if is_known_charset(charset):
- return charset
- return self.unknown_charset(charset)
- return self.default_charset
-
-
-class DynamicCharsetResponseMixin(object):
-
- """If this mixin is mixed into a response class it will provide
- a dynamic `charset` attribute. This means that if the charset is
- looked up and stored in the `Content-Type` header and updates
- itself automatically. This also means a small performance hit but
- can be useful if you're working with different charsets on
- responses.
-
- Because the charset attribute is no a property at class-level, the
- default value is stored in `default_charset`.
-
- Because it changes the behavior or :class:`Response` this class has
- to be mixed in *before* the actual response class::
-
- class MyResponse(DynamicCharsetResponseMixin, Response):
- pass
-
- .. deprecated:: 0.15
- This mixin will be removed in version 1.0.
-
- .. versionadded:: 0.6
- """
-
- #: the default charset.
- default_charset = "utf-8"
-
- def _get_charset(self):
- warnings.warn(
- "'werkzeug.contrib.wrappers.DynamicCharsetResponseMixin'"
- " is deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- header = self.headers.get("content-type")
- if header:
- charset = parse_options_header(header)[1].get("charset")
- if charset:
- return charset
- return self.default_charset
-
- def _set_charset(self, charset):
- warnings.warn(
- "'werkzeug.contrib.wrappers.DynamicCharsetResponseMixin'"
- " is deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- header = self.headers.get("content-type")
- ct, options = parse_options_header(header)
- if not ct:
- raise TypeError("Cannot set charset if Content-Type header is missing.")
- options["charset"] = charset
- self.headers["Content-Type"] = dump_options_header(ct, options)
-
- charset = property(
- _get_charset,
- _set_charset,
- doc="""
- The charset for the response. It's stored inside the
- Content-Type header as a parameter.""",
- )
- del _get_charset, _set_charset
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 9643db96c..c2b4c021b 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1260,21 +1260,6 @@ def __setitem__(self, key, value):
else:
self.set(key, value)
- def to_list(self, charset="iso-8859-1"):
- """Convert the headers into a list suitable for WSGI.
-
- .. deprecated:: 0.9
- """
- from warnings import warn
-
- warn(
- "'to_list' deprecated as of version 0.9 and will be removed"
- " in version 1.0. Use 'to_wsgi_list' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return self.to_wsgi_list()
-
def to_wsgi_list(self):
"""Convert the headers into a list suitable for WSGI.
diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py
index 1bb374eaf..e6034b52b 100644
--- a/src/werkzeug/debug/__init__.py
+++ b/src/werkzeug/debug/__init__.py
@@ -29,24 +29,9 @@
from ..wrappers import BaseRequest as Request
from ..wrappers import BaseResponse as Response
from .console import Console
-from .repr import debug_repr as _debug_repr
from .tbtools import get_current_traceback
from .tbtools import render_console_html
-
-def debug_repr(*args, **kwargs):
- import warnings
-
- warnings.warn(
- "'debug_repr' has moved to 'werkzeug.debug.repr.debug_repr'"
- " as of version 0.7. This old import will be removed in version"
- " 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return _debug_repr(*args, **kwargs)
-
-
# A week
PIN_TIME = 60 * 60 * 24 * 7
@@ -225,9 +210,6 @@ class DebuggedApplication(object):
The `evalex` keyword argument allows evaluating expressions in a
traceback's frame context.
- .. versionadded:: 0.9
- The `lodgeit_url` parameter was deprecated.
-
:param app: the WSGI application to run debugged.
:param evalex: enable exception evaluation feature (interactive
debugging). This requires a non-forking server.
@@ -253,20 +235,9 @@ def __init__(
console_path="/console",
console_init_func=None,
show_hidden_frames=False,
- lodgeit_url=None,
pin_security=True,
pin_logging=True,
):
- if lodgeit_url is not None:
- from warnings import warn
-
- warn(
- "'lodgeit_url' is no longer used as of version 0.9 and"
- " will be removed in version 1.0. Werkzeug uses"
- " https://gist.github.com/ instead.",
- DeprecationWarning,
- stacklevel=2,
- )
if not console_init_func:
console_init_func = None
self.app = app
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index af3200750..3f40b3080 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -1247,57 +1247,3 @@ def is_byte_range_valid(start, stop, length):
from .datastructures import TypeConversionDict
from .datastructures import WWWAuthenticate
from .urls import iri_to_uri
-
-# DEPRECATED
-from .datastructures import CharsetAccept as _CharsetAccept
-from .datastructures import Headers as _Headers
-from .datastructures import LanguageAccept as _LanguageAccept
-from .datastructures import MIMEAccept as _MIMEAccept
-
-
-class MIMEAccept(_MIMEAccept):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.http.MIMEAccept' has moved to 'werkzeug"
- ".datastructures.MIMEAccept' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(MIMEAccept, self).__init__(*args, **kwargs)
-
-
-class CharsetAccept(_CharsetAccept):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.http.CharsetAccept' has moved to 'werkzeug"
- ".datastructures.CharsetAccept' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(CharsetAccept, self).__init__(*args, **kwargs)
-
-
-class LanguageAccept(_LanguageAccept):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.http.LanguageAccept' has moved to 'werkzeug"
- ".datastructures.LanguageAccept' as of version 0.5. This"
- " old import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(LanguageAccept, self).__init__(*args, **kwargs)
-
-
-class Headers(_Headers):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.http.Headers' has moved to 'werkzeug"
- ".datastructures.Headers' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(Headers, self).__init__(*args, **kwargs)
diff --git a/src/werkzeug/middleware/proxy_fix.py b/src/werkzeug/middleware/proxy_fix.py
index dc1dacc8c..0046799c8 100644
--- a/src/werkzeug/middleware/proxy_fix.py
+++ b/src/werkzeug/middleware/proxy_fix.py
@@ -21,7 +21,6 @@
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
-import warnings
class ProxyFix(object):
@@ -49,7 +48,6 @@ class ProxyFix(object):
:param x_port: Number of values to trust for ``X-Forwarded-Port``.
:param x_prefix: Number of values to trust for
``X-Forwarded-Prefix``.
- :param num_proxies: Deprecated, use ``x_for`` instead.
.. code-block:: python
@@ -57,6 +55,14 @@ class ProxyFix(object):
# App is behind one proxy that sets the -For and -Host headers.
app = ProxyFix(app, x_for=1, x_host=1)
+ .. versionchanged:: 1.0
+ Deprecated code has been removed:
+
+ * The ``num_proxies`` argument and attribute.
+ * The ``get_remote_addr`` method.
+ * The environ keys ``orig_remote_addr``,
+ ``orig_wsgi_url_scheme``, and ``orig_http_host``.
+
.. versionchanged:: 0.15
All headers support multiple values. The ``num_proxies``
argument is deprecated. Each header is configured with a
@@ -76,74 +82,13 @@ class ProxyFix(object):
``SERVER_NAME`` and ``SERVER_PORT``.
"""
- def __init__(
- self, app, num_proxies=None, x_for=1, x_proto=0, x_host=0, x_port=0, x_prefix=0
- ):
+ def __init__(self, app, x_for=1, x_proto=0, x_host=0, x_port=0, x_prefix=0):
self.app = app
self.x_for = x_for
self.x_proto = x_proto
self.x_host = x_host
self.x_port = x_port
self.x_prefix = x_prefix
- self.num_proxies = num_proxies
-
- @property
- def num_proxies(self):
- """The number of proxies setting ``X-Forwarded-For`` in front
- of the application.
-
- .. deprecated:: 0.15
- A separate number of trusted proxies is configured for each
- header. ``num_proxies`` maps to ``x_for``. This method will
- be removed in 1.0.
-
- :internal:
- """
- warnings.warn(
- "'num_proxies' is deprecated as of version 0.15 and will be"
- " removed in version 1.0. Use 'x_for' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return self.x_for
-
- @num_proxies.setter
- def num_proxies(self, value):
- if value is not None:
- warnings.warn(
- "'num_proxies' is deprecated as of version 0.15 and"
- " will be removed in version 1.0. Use 'x_for' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- self.x_for = value
-
- def get_remote_addr(self, forwarded_for):
- """Get the real ``remote_addr`` by looking backwards ``x_for``
- number of values in the ``X-Forwarded-For`` header.
-
- :param forwarded_for: List of values parsed from the
- ``X-Forwarded-For`` header.
- :return: The real ``remote_addr``, or ``None`` if there were not
- at least ``x_for`` values.
-
- .. deprecated:: 0.15
- This is handled internally for each header. This method will
- be removed in 1.0.
-
- .. versionchanged:: 0.9
- Use ``num_proxies`` instead of always picking the first
- value.
-
- .. versionadded:: 0.8
- """
- warnings.warn(
- "'get_remote_addr' is deprecated as of version 0.15 and"
- " will be removed in version 1.0. It is now handled"
- " internally for each header.",
- DeprecationWarning,
- )
- return self._get_trusted_comma(self.x_for, ",".join(forwarded_for))
def _get_trusted_comma(self, trusted, value):
"""Get the real value from a comma-separated header based on the
@@ -180,11 +125,7 @@ def __call__(self, environ, start_response):
"SERVER_NAME": environ_get("SERVER_NAME"),
"SERVER_PORT": environ_get("SERVER_PORT"),
"SCRIPT_NAME": environ_get("SCRIPT_NAME"),
- },
- # todo: remove deprecated keys
- "werkzeug.proxy_fix.orig_remote_addr": orig_remote_addr,
- "werkzeug.proxy_fix.orig_wsgi_url_scheme": orig_wsgi_url_scheme,
- "werkzeug.proxy_fix.orig_http_host": orig_http_host,
+ }
}
)
diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py
index 9aeb4127c..26fa95877 100644
--- a/src/werkzeug/test.py
+++ b/src/werkzeug/test.py
@@ -30,7 +30,6 @@
from .datastructures import CombinedMultiDict
from .datastructures import EnvironHeaders
from .datastructures import FileMultiDict
-from .datastructures import FileStorage
from .datastructures import Headers
from .datastructures import MultiDict
from .http import dump_cookie
@@ -144,23 +143,6 @@ def encode_multipart(values, boundary=None, charset="utf-8"):
return boundary, stream.read()
-def File(fd, filename=None, mimetype=None):
- """Backwards compat.
-
- .. deprecated:: 0.5
- """
- from warnings import warn
-
- warn(
- "'werkzeug.test.File' is deprecated as of version 0.5 and will"
- " be removed in version 1.0. Use 'EnvironBuilder' or"
- " 'FileStorage' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return FileStorage(fd, filename=filename, content_type=mimetype)
-
-
class _TestCookieHeaders(object):
"""A headers adapter for cookielib
@@ -456,21 +438,6 @@ def _add_file_from_data(self, key, value):
"""Called in the EnvironBuilder to add files from the data dict."""
if isinstance(value, tuple):
self.files.add_file(key, *value)
- elif isinstance(value, dict):
- from warnings import warn
-
- warn(
- "Passing a dict as file data is deprecated as of"
- " version 0.5 and will be removed in version 1.0. Use"
- " a tuple or 'FileStorage' object instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- value = dict(value)
- mimetype = value.pop("mimetype", None)
- if mimetype is not None:
- value["content_type"] = mimetype
- self.files.add_file(key, **value)
else:
self.files.add_file(key, value)
diff --git a/src/werkzeug/useragents.py b/src/werkzeug/useragents.py
index e265e0939..9416b0d7a 100644
--- a/src/werkzeug/useragents.py
+++ b/src/werkzeug/useragents.py
@@ -12,7 +12,6 @@
:license: BSD-3-Clause
"""
import re
-import warnings
class UserAgentParser(object):
@@ -201,20 +200,3 @@ def __nonzero__(self):
def __repr__(self):
return "<%s %r/%s>" % (self.__class__.__name__, self.browser, self.version)
-
-
-# DEPRECATED
-from .wrappers import UserAgentMixin as _UserAgentMixin
-
-
-class UserAgentMixin(_UserAgentMixin):
- @property
- def user_agent(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.useragents.UserAgentMixin' should be imported"
- " from 'werkzeug.wrappers.UserAgentMixin'. This old import"
- " will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return super(_UserAgentMixin, self).user_agent
diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py
index 20620572c..2504380e9 100644
--- a/src/werkzeug/utils.py
+++ b/src/werkzeug/utils.py
@@ -15,7 +15,6 @@
import pkgutil
import re
import sys
-import warnings
from ._compat import iteritems
from ._compat import PY2
@@ -408,7 +407,7 @@ def secure_filename(filename):
return filename
-def escape(s, quote=None):
+def escape(s):
"""Replace special characters "&", "<", ">" and (") to HTML-safe sequences.
There is a special handling for `None` which escapes to an empty string.
@@ -423,24 +422,16 @@ def escape(s, quote=None):
return ""
elif hasattr(s, "__html__"):
return text_type(s.__html__())
- elif not isinstance(s, string_types):
+
+ if not isinstance(s, string_types):
s = text_type(s)
- if quote is not None:
- from warnings import warn
-
- warn(
- "The 'quote' parameter is no longer used as of version 0.9"
- " and will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- s = (
+
+ return (
s.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace('"', """)
)
- return s
def unescape(s):
@@ -755,82 +746,3 @@ def __repr__(self):
self.import_name,
self.exception,
)
-
-
-# DEPRECATED
-from .datastructures import CombinedMultiDict as _CombinedMultiDict
-from .datastructures import EnvironHeaders as _EnvironHeaders
-from .datastructures import Headers as _Headers
-from .datastructures import MultiDict as _MultiDict
-from .http import dump_cookie as _dump_cookie
-from .http import parse_cookie as _parse_cookie
-
-
-class MultiDict(_MultiDict):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.MultiDict' has moved to 'werkzeug"
- ".datastructures.MultiDict' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(MultiDict, self).__init__(*args, **kwargs)
-
-
-class CombinedMultiDict(_CombinedMultiDict):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.CombinedMultiDict' has moved to 'werkzeug"
- ".datastructures.CombinedMultiDict' as of version 0.5. This"
- " old import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(CombinedMultiDict, self).__init__(*args, **kwargs)
-
-
-class Headers(_Headers):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.Headers' has moved to 'werkzeug"
- ".datastructures.Headers' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(Headers, self).__init__(*args, **kwargs)
-
-
-class EnvironHeaders(_EnvironHeaders):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.EnvironHeaders' has moved to 'werkzeug"
- ".datastructures.EnvironHeaders' as of version 0.5. This"
- " old import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(EnvironHeaders, self).__init__(*args, **kwargs)
-
-
-def parse_cookie(*args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.parse_cookie' as moved to 'werkzeug.http"
- ".parse_cookie' as of version 0.5. This old import will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return _parse_cookie(*args, **kwargs)
-
-
-def dump_cookie(*args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.dump_cookie' as moved to 'werkzeug.http"
- ".dump_cookie' as of version 0.5. This old import will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return _dump_cookie(*args, **kwargs)
diff --git a/src/werkzeug/wrappers/base_request.py b/src/werkzeug/wrappers/base_request.py
index 41e8aadb3..01c476316 100644
--- a/src/werkzeug/wrappers/base_request.py
+++ b/src/werkzeug/wrappers/base_request.py
@@ -1,4 +1,3 @@
-import warnings
from functools import update_wrapper
from io import BytesIO
@@ -631,7 +630,6 @@ def remote_addr(self):
script is protected, this attribute contains the username the
user has authenticated as.""",
)
-
scheme = environ_property(
"wsgi.url_scheme",
doc="""
@@ -639,29 +637,6 @@ def remote_addr(self):
.. versionadded:: 0.7""",
)
-
- @property
- def is_xhr(self):
- """True if the request was triggered via a JavaScript XMLHttpRequest.
- This only works with libraries that support the ``X-Requested-With``
- header and set it to "XMLHttpRequest". Libraries that do that are
- prototype, jQuery and Mochikit and probably some more.
-
- .. deprecated:: 0.13
- ``X-Requested-With`` is not standard and is unreliable. You
- may be able to use :attr:`AcceptMixin.accept_mimetypes`
- instead.
- """
- warnings.warn(
- "'Request.is_xhr' is deprecated as of version 0.13 and will"
- " be removed in version 1.0. The 'X-Requested-With' header"
- " is not standard and is unreliable. You may be able to use"
- " 'accept_mimetypes' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return self.environ.get("HTTP_X_REQUESTED_WITH", "").lower() == "xmlhttprequest"
-
is_secure = property(
lambda self: self.environ["wsgi.url_scheme"] == "https",
doc="`True` if the request is secure.",
diff --git a/src/werkzeug/wsgi.py b/src/werkzeug/wsgi.py
index f069f2d86..807b462ad 100644
--- a/src/werkzeug/wsgi.py
+++ b/src/werkzeug/wsgi.py
@@ -10,7 +10,6 @@
"""
import io
import re
-import warnings
from functools import partial
from functools import update_wrapper
from itertools import chain
@@ -999,69 +998,3 @@ def __next__(self):
def readable(self):
return True
-
-
-# DEPRECATED
-from .middleware.dispatcher import DispatcherMiddleware as _DispatcherMiddleware
-from .middleware.http_proxy import ProxyMiddleware as _ProxyMiddleware
-from .middleware.shared_data import SharedDataMiddleware as _SharedDataMiddleware
-
-
-class ProxyMiddleware(_ProxyMiddleware):
- """
- .. deprecated:: 0.15
- ``werkzeug.wsgi.ProxyMiddleware`` has moved to
- :mod:`werkzeug.middleware.http_proxy`. This import will be
- removed in 1.0.
- """
-
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.wsgi.ProxyMiddleware' has moved to 'werkzeug"
- ".middleware.http_proxy.ProxyMiddleware'. This import is"
- " deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(ProxyMiddleware, self).__init__(*args, **kwargs)
-
-
-class SharedDataMiddleware(_SharedDataMiddleware):
- """
- .. deprecated:: 0.15
- ``werkzeug.wsgi.SharedDataMiddleware`` has moved to
- :mod:`werkzeug.middleware.shared_data`. This import will be
- removed in 1.0.
- """
-
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.wsgi.SharedDataMiddleware' has moved to"
- " 'werkzeug.middleware.shared_data.SharedDataMiddleware'."
- " This import is deprecated as of version 0.15 and will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(SharedDataMiddleware, self).__init__(*args, **kwargs)
-
-
-class DispatcherMiddleware(_DispatcherMiddleware):
- """
- .. deprecated:: 0.15
- ``werkzeug.wsgi.DispatcherMiddleware`` has moved to
- :mod:`werkzeug.middleware.dispatcher`. This import will be
- removed in 1.0.
- """
-
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.wsgi.DispatcherMiddleware' has moved to"
- " 'werkzeug.middleware.dispatcher.DispatcherMiddleware'."
- " This import is deprecated as of version 0.15 and will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(DispatcherMiddleware, self).__init__(*args, **kwargs)
diff --git a/tests/contrib/__init__.py b/tests/contrib/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/tests/contrib/cache/conftest.py b/tests/contrib/cache/conftest.py
deleted file mode 100644
index 655f0fa63..000000000
--- a/tests/contrib/cache/conftest.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import os
-
-import pytest
-
-# build the path to the uwsgi marker file
-# when running in tox, this will be relative to the tox env
-filename = os.path.join(os.environ.get("TOX_ENVTMPDIR", ""), "test_uwsgi_failed")
-
-
-@pytest.hookimpl(tryfirst=True, hookwrapper=True)
-def pytest_runtest_makereport(item, call):
- """``uwsgi --pyrun`` doesn't pass on the exit code when ``pytest`` fails,
- so Tox thinks the tests passed. For UWSGI tests, create a file to mark what
- tests fail. The uwsgi Tox env has a command to read this file and exit
- appropriately.
- """
- outcome = yield
- report = outcome.get_result()
-
- if item.cls.__name__ != "TestUWSGICache":
- return
-
- if report.failed:
- with open(filename, "a") as f:
- f.write(item.name + "\n")
diff --git a/tests/contrib/cache/test_cache.py b/tests/contrib/cache/test_cache.py
deleted file mode 100644
index c13227e3a..000000000
--- a/tests/contrib/cache/test_cache.py
+++ /dev/null
@@ -1,342 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- tests.cache
- ~~~~~~~~~~~
-
- Tests the cache system
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import errno
-
-import pytest
-
-from werkzeug._compat import text_type
-from werkzeug.contrib import cache
-
-try:
- import redis
-except ImportError:
- redis = None
-
-try:
- import pylibmc as memcache
-except ImportError:
- try:
- from google.appengine.api import memcache
- except ImportError:
- try:
- import memcache
- except ImportError:
- memcache = None
-
-pytestmark = pytest.mark.skip("werkzeug.contrib.cache moved to cachelib")
-
-
-class CacheTestsBase(object):
- _can_use_fast_sleep = True
- _guaranteed_deletes = True
-
- @pytest.fixture
- def fast_sleep(self, monkeypatch):
- if self._can_use_fast_sleep:
-
- def sleep(delta):
- orig_time = cache.time
- monkeypatch.setattr(cache, "time", lambda: orig_time() + delta)
-
- return sleep
- else:
- import time
-
- return time.sleep
-
- @pytest.fixture
- def make_cache(self):
- """Return a cache class or factory."""
- raise NotImplementedError()
-
- @pytest.fixture
- def c(self, make_cache):
- """Return a cache instance."""
- return make_cache()
-
-
-class GenericCacheTests(CacheTestsBase):
- def test_generic_get_dict(self, c):
- assert c.set("a", "a")
- assert c.set("b", "b")
- d = c.get_dict("a", "b")
- assert "a" in d
- assert "a" == d["a"]
- assert "b" in d
- assert "b" == d["b"]
-
- def test_generic_set_get(self, c):
- for i in range(3):
- assert c.set(str(i), i * i)
-
- for i in range(3):
- result = c.get(str(i))
- assert result == i * i, result
-
- def test_generic_get_set(self, c):
- assert c.set("foo", ["bar"])
- assert c.get("foo") == ["bar"]
-
- def test_generic_get_many(self, c):
- assert c.set("foo", ["bar"])
- assert c.set("spam", "eggs")
- assert c.get_many("foo", "spam") == [["bar"], "eggs"]
-
- def test_generic_set_many(self, c):
- assert c.set_many({"foo": "bar", "spam": ["eggs"]})
- assert c.get("foo") == "bar"
- assert c.get("spam") == ["eggs"]
-
- def test_generic_add(self, c):
- # sanity check that add() works like set()
- assert c.add("foo", "bar")
- assert c.get("foo") == "bar"
- assert not c.add("foo", "qux")
- assert c.get("foo") == "bar"
-
- def test_generic_delete(self, c):
- assert c.add("foo", "bar")
- assert c.get("foo") == "bar"
- assert c.delete("foo")
- assert c.get("foo") is None
-
- def test_generic_delete_many(self, c):
- assert c.add("foo", "bar")
- assert c.add("spam", "eggs")
- assert c.delete_many("foo", "spam")
- assert c.get("foo") is None
- assert c.get("spam") is None
-
- def test_generic_inc_dec(self, c):
- assert c.set("foo", 1)
- assert c.inc("foo") == c.get("foo") == 2
- assert c.dec("foo") == c.get("foo") == 1
- assert c.delete("foo")
-
- def test_generic_true_false(self, c):
- assert c.set("foo", True)
- assert c.get("foo") in (True, 1)
- assert c.set("bar", False)
- assert c.get("bar") in (False, 0)
-
- def test_generic_timeout(self, c, fast_sleep):
- c.set("foo", "bar", 0)
- assert c.get("foo") == "bar"
- c.set("baz", "qux", 1)
- assert c.get("baz") == "qux"
- fast_sleep(3)
- # timeout of zero means no timeout
- assert c.get("foo") == "bar"
- if self._guaranteed_deletes:
- assert c.get("baz") is None
-
- def test_generic_has(self, c):
- assert c.has("foo") in (False, 0)
- assert c.has("spam") in (False, 0)
- assert c.set("foo", "bar")
- assert c.has("foo") in (True, 1)
- assert c.has("spam") in (False, 0)
- c.delete("foo")
- assert c.has("foo") in (False, 0)
- assert c.has("spam") in (False, 0)
-
-
-class TestSimpleCache(GenericCacheTests):
- @pytest.fixture
- def make_cache(self):
- return cache.SimpleCache
-
- def test_purge(self):
- c = cache.SimpleCache(threshold=2)
- c.set("a", "a")
- c.set("b", "b")
- c.set("c", "c")
- c.set("d", "d")
- # Cache purges old items *before* it sets new ones.
- assert len(c._cache) == 3
-
-
-class TestFileSystemCache(GenericCacheTests):
- @pytest.fixture
- def make_cache(self, tmpdir):
- return lambda **kw: cache.FileSystemCache(cache_dir=str(tmpdir), **kw)
-
- def test_filesystemcache_prune(self, make_cache):
- THRESHOLD = 13
- c = make_cache(threshold=THRESHOLD)
-
- for i in range(2 * THRESHOLD):
- assert c.set(str(i), i)
-
- nof_cache_files = c.get(c._fs_count_file)
- assert nof_cache_files <= THRESHOLD
-
- def test_filesystemcache_clear(self, c):
- assert c.set("foo", "bar")
- nof_cache_files = c.get(c._fs_count_file)
- assert nof_cache_files == 1
- assert c.clear()
- nof_cache_files = c.get(c._fs_count_file)
- assert nof_cache_files == 0
- cache_files = c._list_dir()
- assert len(cache_files) == 0
-
- def test_no_threshold(self, make_cache):
- THRESHOLD = 0
- c = make_cache(threshold=THRESHOLD)
-
- for i in range(10):
- assert c.set(str(i), i)
-
- cache_files = c._list_dir()
- assert len(cache_files) == 10
-
- # File count is not maintained with threshold = 0
- nof_cache_files = c.get(c._fs_count_file)
- assert nof_cache_files is None
-
- def test_count_file_accuracy(self, c):
- assert c.set("foo", "bar")
- assert c.set("moo", "car")
- c.add("moo", "tar")
- assert c.get(c._fs_count_file) == 2
- assert c.add("too", "far")
- assert c.get(c._fs_count_file) == 3
- assert c.delete("moo")
- assert c.get(c._fs_count_file) == 2
- assert c.clear()
- assert c.get(c._fs_count_file) == 0
-
-
-# don't use pytest.mark.skipif on subclasses
-# https://bitbucket.org/hpk42/pytest/issue/568
-# skip happens in requirements fixture instead
-class TestRedisCache(GenericCacheTests):
- _can_use_fast_sleep = False
-
- @pytest.fixture(scope="class", autouse=True)
- def requirements(self, xprocess):
- if redis is None:
- pytest.skip('Python package "redis" is not installed.')
-
- def prepare(cwd):
- return "[Rr]eady to accept connections", ["redis-server"]
-
- try:
- xprocess.ensure("redis_server", prepare)
- except IOError as e:
- # xprocess raises FileNotFoundError
- if e.errno == errno.ENOENT:
- pytest.skip("Redis is not installed.")
- else:
- raise
-
- yield
- xprocess.getinfo("redis_server").terminate()
-
- @pytest.fixture(params=(None, False, True))
- def make_cache(self, request):
- if request.param is None:
- host = "localhost"
- elif request.param:
- host = redis.StrictRedis()
- else:
- host = redis.Redis()
-
- c = cache.RedisCache(host=host, key_prefix="werkzeug-test-case:")
- yield lambda: c
- c.clear()
-
- def test_compat(self, c):
- assert c._client.set(c.key_prefix + "foo", "Awesome")
- assert c.get("foo") == b"Awesome"
- assert c._client.set(c.key_prefix + "foo", "42")
- assert c.get("foo") == 42
-
- def test_empty_host(self):
- with pytest.raises(ValueError) as exc_info:
- cache.RedisCache(host=None)
- assert text_type(exc_info.value) == "RedisCache host parameter may not be None"
-
-
-class TestMemcachedCache(GenericCacheTests):
- _can_use_fast_sleep = False
- _guaranteed_deletes = False
-
- @pytest.fixture(scope="class", autouse=True)
- def requirements(self, xprocess):
- if memcache is None:
- pytest.skip(
- "Python package for memcache is not installed. Need one of "
- '"pylibmc", "google.appengine", or "memcache".'
- )
-
- def prepare(cwd):
- return "", ["memcached"]
-
- try:
- xprocess.ensure("memcached", prepare)
- except IOError as e:
- # xprocess raises FileNotFoundError
- if e.errno == errno.ENOENT:
- pytest.skip("Memcached is not installed.")
- else:
- raise
-
- yield
- xprocess.getinfo("memcached").terminate()
-
- @pytest.fixture
- def make_cache(self):
- c = cache.MemcachedCache(key_prefix="werkzeug-test-case:")
- yield lambda: c
- c.clear()
-
- def test_compat(self, c):
- assert c._client.set(c.key_prefix + "foo", "bar")
- assert c.get("foo") == "bar"
-
- def test_huge_timeouts(self, c):
- # Timeouts greater than epoch are interpreted as POSIX timestamps
- # (i.e. not relative to now, but relative to epoch)
- epoch = 2592000
- c.set("foo", "bar", epoch + 100)
- assert c.get("foo") == "bar"
-
-
-class TestUWSGICache(GenericCacheTests):
- _can_use_fast_sleep = False
- _guaranteed_deletes = False
-
- @pytest.fixture(scope="class", autouse=True)
- def requirements(self):
- try:
- import uwsgi # NOQA
- except ImportError:
- pytest.skip(
- 'Python "uwsgi" package is only avaialable when running '
- "inside uWSGI."
- )
-
- @pytest.fixture
- def make_cache(self):
- c = cache.UWSGICache(cache="werkzeugtest")
- yield lambda: c
- c.clear()
-
-
-class TestNullCache(CacheTestsBase):
- @pytest.fixture(scope="class", autouse=True)
- def make_cache(self):
- return cache.NullCache
-
- def test_has(self, c):
- assert not c.has("foo")
diff --git a/tests/contrib/test_atom.py b/tests/contrib/test_atom.py
deleted file mode 100644
index 5a10556ec..000000000
--- a/tests/contrib/test_atom.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- tests.atom
- ~~~~~~~~~~
-
- Tests the cache system
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import datetime
-
-import pytest
-
-from werkzeug.contrib.atom import AtomFeed
-from werkzeug.contrib.atom import FeedEntry
-from werkzeug.contrib.atom import format_iso8601
-
-
-class TestAtomFeed(object):
- """
- Testcase for the `AtomFeed` class
- """
-
- def test_atom_no_args(self):
- with pytest.raises(ValueError):
- AtomFeed()
-
- def test_atom_title_no_id(self):
- with pytest.raises(ValueError):
- AtomFeed(title="test_title")
-
- def test_atom_add_one(self):
- a = AtomFeed(title="test_title", id=1)
- f = FeedEntry(title="test_title", id=1, updated=datetime.datetime.now())
- assert len(a.entries) == 0
- a.add(f)
- assert len(a.entries) == 1
-
- def test_atom_add_one_kwargs(self):
- a = AtomFeed(title="test_title", id=1)
- assert len(a.entries) == 0
- a.add(title="test_title", id=1, updated=datetime.datetime.now())
- assert len(a.entries) == 1
- assert isinstance(a.entries[0], FeedEntry)
-
- def test_atom_to_str(self):
- updated_time = datetime.datetime.now()
- expected_repr = """
-
-
- test_title
- 1
- %s
- Werkzeug
-
- """ % format_iso8601(
- updated_time
- )
- a = AtomFeed(title="test_title", id=1, updated=updated_time)
- assert str(a).strip().replace(" ", "") == expected_repr.strip().replace(" ", "")
-
-
-class TestFeedEntry(object):
- """
- Test case for the `FeedEntry` object
- """
-
- def test_feed_entry_no_args(self):
- with pytest.raises(ValueError):
- FeedEntry()
-
- def test_feed_entry_no_id(self):
- with pytest.raises(ValueError):
- FeedEntry(title="test_title")
-
- def test_feed_entry_no_updated(self):
- with pytest.raises(ValueError):
- FeedEntry(title="test_title", id=1)
-
- def test_feed_entry_to_str(self):
- updated_time = datetime.datetime.now()
- expected_feed_entry_str = """
-
- test_title
- 1
- %s
-
- """ % format_iso8601(
- updated_time
- )
-
- f = FeedEntry(title="test_title", id=1, updated=updated_time)
- assert str(f).strip().replace(
- " ", ""
- ) == expected_feed_entry_str.strip().replace(" ", "")
-
-
-def test_format_iso8601():
- # naive datetime should be treated as utc
- dt = datetime.datetime(2014, 8, 31, 2, 5, 6)
- assert format_iso8601(dt) == "2014-08-31T02:05:06Z"
-
- # tz-aware datetime
- dt = datetime.datetime(2014, 8, 31, 11, 5, 6, tzinfo=KST())
- assert format_iso8601(dt) == "2014-08-31T11:05:06+09:00"
-
-
-class KST(datetime.tzinfo):
-
- """KST implementation for test_format_iso8601()."""
-
- def utcoffset(self, dt):
- return datetime.timedelta(hours=9)
-
- def tzname(self, dt):
- return "KST"
-
- def dst(self, dt):
- return datetime.timedelta(0)
diff --git a/tests/contrib/test_fixers.py b/tests/contrib/test_fixers.py
deleted file mode 100644
index 2777f6117..000000000
--- a/tests/contrib/test_fixers.py
+++ /dev/null
@@ -1,134 +0,0 @@
-from werkzeug.contrib import fixers
-from werkzeug.datastructures import ResponseCacheControl
-from werkzeug.http import parse_cache_control_header
-from werkzeug.test import Client
-from werkzeug.test import create_environ
-from werkzeug.wrappers import Request
-from werkzeug.wrappers import Response
-
-
-@Request.application
-def path_check_app(request):
- return Response(
- "PATH_INFO: %s\nSCRIPT_NAME: %s"
- % (request.environ.get("PATH_INFO", ""), request.environ.get("SCRIPT_NAME", ""))
- )
-
-
-class TestServerFixer(object):
- def test_cgi_root_fix(self):
- app = fixers.CGIRootFix(path_check_app)
- response = Response.from_app(
- app, dict(create_environ(), SCRIPT_NAME="/foo", PATH_INFO="/bar")
- )
- assert response.get_data() == b"PATH_INFO: /bar\nSCRIPT_NAME: "
-
- def test_cgi_root_fix_custom_app_root(self):
- app = fixers.CGIRootFix(path_check_app, app_root="/baz/")
- response = Response.from_app(
- app, dict(create_environ(), SCRIPT_NAME="/foo", PATH_INFO="/bar")
- )
- assert response.get_data() == b"PATH_INFO: /bar\nSCRIPT_NAME: baz"
-
- def test_path_info_from_request_uri_fix(self):
- app = fixers.PathInfoFromRequestUriFix(path_check_app)
- for key in "REQUEST_URI", "REQUEST_URL", "UNENCODED_URL":
- env = dict(create_environ(), SCRIPT_NAME="/test", PATH_INFO="/?????")
- env[key] = "/test/foo%25bar?drop=this"
- response = Response.from_app(app, env)
- assert response.get_data() == b"PATH_INFO: /foo%bar\nSCRIPT_NAME: /test"
-
- def test_header_rewriter_fix(self):
- @Request.application
- def application(request):
- return Response("", headers=[("X-Foo", "bar")])
-
- application = fixers.HeaderRewriterFix(
- application, ("X-Foo",), (("X-Bar", "42"),)
- )
- response = Response.from_app(application, create_environ())
- assert response.headers["Content-Type"] == "text/plain; charset=utf-8"
- assert "X-Foo" not in response.headers
- assert response.headers["X-Bar"] == "42"
-
-
-class TestBrowserFixer(object):
- def test_ie_fixes(self):
- @fixers.InternetExplorerFix
- @Request.application
- def application(request):
- response = Response("binary data here", mimetype="application/vnd.ms-excel")
- response.headers["Vary"] = "Cookie"
- response.headers["Content-Disposition"] = "attachment; filename=foo.xls"
- return response
-
- c = Client(application, Response)
- response = c.get(
- "/",
- headers=[
- ("User-Agent", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)")
- ],
- )
-
- # IE gets no vary
- assert response.get_data() == b"binary data here"
- assert "vary" not in response.headers
- assert response.headers["content-disposition"] == "attachment; filename=foo.xls"
- assert response.headers["content-type"] == "application/vnd.ms-excel"
-
- # other browsers do
- c = Client(application, Response)
- response = c.get("/")
- assert response.get_data() == b"binary data here"
- assert "vary" in response.headers
-
- cc = ResponseCacheControl()
- cc.no_cache = True
-
- @fixers.InternetExplorerFix
- @Request.application
- def application(request):
- response = Response("binary data here", mimetype="application/vnd.ms-excel")
- response.headers["Pragma"] = ", ".join(pragma)
- response.headers["Cache-Control"] = cc.to_header()
- response.headers["Content-Disposition"] = "attachment; filename=foo.xls"
- return response
-
- # IE has no pragma or cache control
- pragma = ("no-cache",)
- c = Client(application, Response)
- response = c.get(
- "/",
- headers=[
- ("User-Agent", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)")
- ],
- )
- assert response.get_data() == b"binary data here"
- assert "pragma" not in response.headers
- assert "cache-control" not in response.headers
- assert response.headers["content-disposition"] == "attachment; filename=foo.xls"
-
- # IE has simplified pragma
- pragma = ("no-cache", "x-foo")
- cc.proxy_revalidate = True
- response = c.get(
- "/",
- headers=[
- ("User-Agent", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)")
- ],
- )
- assert response.get_data() == b"binary data here"
- assert response.headers["pragma"] == "x-foo"
- assert response.headers["cache-control"] == "proxy-revalidate"
- assert response.headers["content-disposition"] == "attachment; filename=foo.xls"
-
- # regular browsers get everything
- response = c.get("/")
- assert response.get_data() == b"binary data here"
- assert response.headers["pragma"] == "no-cache, x-foo"
- cc = parse_cache_control_header(
- response.headers["cache-control"], cls=ResponseCacheControl
- )
- assert cc.no_cache
- assert cc.proxy_revalidate
- assert response.headers["content-disposition"] == "attachment; filename=foo.xls"
diff --git a/tests/contrib/test_iterio.py b/tests/contrib/test_iterio.py
deleted file mode 100644
index e2c481309..000000000
--- a/tests/contrib/test_iterio.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- tests.iterio
- ~~~~~~~~~~~~
-
- Tests the iterio object.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import pytest
-
-from .. import strict_eq
-from werkzeug.contrib.iterio import greenlet
-from werkzeug.contrib.iterio import IterIO
-
-
-class TestIterO(object):
- def test_basic_native(self):
- io = IterIO(["Hello", "World", "1", "2", "3"])
- io.seek(0)
- assert io.tell() == 0
- assert io.read(2) == "He"
- assert io.tell() == 2
- assert io.read(3) == "llo"
- assert io.tell() == 5
- io.seek(0)
- assert io.read(5) == "Hello"
- assert io.tell() == 5
- assert io._buf == "Hello"
- assert io.read() == "World123"
- assert io.tell() == 13
- io.close()
- assert io.closed
-
- io = IterIO(["Hello\n", "World!"])
- assert io.readline() == "Hello\n"
- assert io._buf == "Hello\n"
- assert io.read() == "World!"
- assert io._buf == "Hello\nWorld!"
- assert io.tell() == 12
- io.seek(0)
- assert io.readlines() == ["Hello\n", "World!"]
-
- io = IterIO(["Line one\nLine ", "two\nLine three"])
- assert list(io) == ["Line one\n", "Line two\n", "Line three"]
- io = IterIO(iter("Line one\nLine two\nLine three"))
- assert list(io) == ["Line one\n", "Line two\n", "Line three"]
- io = IterIO(["Line one\nL", "ine", " two", "\nLine three"])
- assert list(io) == ["Line one\n", "Line two\n", "Line three"]
-
- io = IterIO(["foo\n", "bar"])
- io.seek(-4, 2)
- assert io.read(4) == "\nbar"
-
- pytest.raises(IOError, io.seek, 2, 100)
- io.close()
- pytest.raises(ValueError, io.read)
-
- def test_basic_bytes(self):
- io = IterIO([b"Hello", b"World", b"1", b"2", b"3"])
- assert io.tell() == 0
- assert io.read(2) == b"He"
- assert io.tell() == 2
- assert io.read(3) == b"llo"
- assert io.tell() == 5
- io.seek(0)
- assert io.read(5) == b"Hello"
- assert io.tell() == 5
- assert io._buf == b"Hello"
- assert io.read() == b"World123"
- assert io.tell() == 13
- io.close()
- assert io.closed
-
- io = IterIO([b"Hello\n", b"World!"])
- assert io.readline() == b"Hello\n"
- assert io._buf == b"Hello\n"
- assert io.read() == b"World!"
- assert io._buf == b"Hello\nWorld!"
- assert io.tell() == 12
- io.seek(0)
- assert io.readlines() == [b"Hello\n", b"World!"]
-
- io = IterIO([b"foo\n", b"bar"])
- io.seek(-4, 2)
- assert io.read(4) == b"\nbar"
-
- pytest.raises(IOError, io.seek, 2, 100)
- io.close()
- pytest.raises(ValueError, io.read)
-
- def test_basic_unicode(self):
- io = IterIO([u"Hello", u"World", u"1", u"2", u"3"])
- assert io.tell() == 0
- assert io.read(2) == u"He"
- assert io.tell() == 2
- assert io.read(3) == u"llo"
- assert io.tell() == 5
- io.seek(0)
- assert io.read(5) == u"Hello"
- assert io.tell() == 5
- assert io._buf == u"Hello"
- assert io.read() == u"World123"
- assert io.tell() == 13
- io.close()
- assert io.closed
-
- io = IterIO([u"Hello\n", u"World!"])
- assert io.readline() == u"Hello\n"
- assert io._buf == u"Hello\n"
- assert io.read() == u"World!"
- assert io._buf == u"Hello\nWorld!"
- assert io.tell() == 12
- io.seek(0)
- assert io.readlines() == [u"Hello\n", u"World!"]
-
- io = IterIO([u"foo\n", u"bar"])
- io.seek(-4, 2)
- assert io.read(4) == u"\nbar"
-
- pytest.raises(IOError, io.seek, 2, 100)
- io.close()
- pytest.raises(ValueError, io.read)
-
- def test_sentinel_cases(self):
- io = IterIO([])
- strict_eq(io.read(), "")
- io = IterIO([], b"")
- strict_eq(io.read(), b"")
- io = IterIO([], u"")
- strict_eq(io.read(), u"")
-
- io = IterIO([])
- strict_eq(io.read(), "")
- io = IterIO([b""])
- strict_eq(io.read(), b"")
- io = IterIO([u""])
- strict_eq(io.read(), u"")
-
- io = IterIO([])
- strict_eq(io.readline(), "")
- io = IterIO([], b"")
- strict_eq(io.readline(), b"")
- io = IterIO([], u"")
- strict_eq(io.readline(), u"")
-
- io = IterIO([])
- strict_eq(io.readline(), "")
- io = IterIO([b""])
- strict_eq(io.readline(), b"")
- io = IterIO([u""])
- strict_eq(io.readline(), u"")
-
-
-@pytest.mark.skipif(greenlet is None, reason="Greenlet is not installed.")
-class TestIterI(object):
- def test_basic(self):
- def producer(out):
- out.write("1\n")
- out.write("2\n")
- out.flush()
- out.write("3\n")
-
- iterable = IterIO(producer)
- assert next(iterable) == "1\n2\n"
- assert next(iterable) == "3\n"
- pytest.raises(StopIteration, next, iterable)
-
- def test_sentinel_cases(self):
- def producer_dummy_flush(out):
- out.flush()
-
- iterable = IterIO(producer_dummy_flush)
- strict_eq(next(iterable), "")
-
- def producer_empty(out):
- pass
-
- iterable = IterIO(producer_empty)
- pytest.raises(StopIteration, next, iterable)
-
- iterable = IterIO(producer_dummy_flush, b"")
- strict_eq(next(iterable), b"")
- iterable = IterIO(producer_dummy_flush, u"")
- strict_eq(next(iterable), u"")
diff --git a/tests/contrib/test_securecookie.py b/tests/contrib/test_securecookie.py
deleted file mode 100644
index 7231ac889..000000000
--- a/tests/contrib/test_securecookie.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- tests.securecookie
- ~~~~~~~~~~~~~~~~~~
-
- Tests the secure cookie.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import json
-
-import pytest
-
-from werkzeug._compat import to_native
-from werkzeug.contrib.securecookie import SecureCookie
-from werkzeug.utils import parse_cookie
-from werkzeug.wrappers import Request
-from werkzeug.wrappers import Response
-
-
-def test_basic_support():
- c = SecureCookie(secret_key=b"foo")
- assert c.new
- assert not c.modified
- assert not c.should_save
- c["x"] = 42
- assert c.modified
- assert c.should_save
- s = c.serialize()
-
- c2 = SecureCookie.unserialize(s, b"foo")
- assert c is not c2
- assert not c2.new
- assert not c2.modified
- assert not c2.should_save
- assert c2 == c
-
- c3 = SecureCookie.unserialize(s, b"wrong foo")
- assert not c3.modified
- assert not c3.new
- assert c3 == {}
-
- c4 = SecureCookie({"x": 42}, "foo")
- c4_serialized = c4.serialize()
- assert SecureCookie.unserialize(c4_serialized, "foo") == c4
-
-
-def test_wrapper_support():
- req = Request.from_values()
- resp = Response()
- c = SecureCookie.load_cookie(req, secret_key=b"foo")
- assert c.new
- c["foo"] = 42
- assert c.secret_key == b"foo"
- c.save_cookie(resp)
-
- req = Request.from_values(
- headers={
- "Cookie": 'session="%s"'
- % parse_cookie(resp.headers["set-cookie"])["session"]
- }
- )
- c2 = SecureCookie.load_cookie(req, secret_key=b"foo")
- assert not c2.new
- assert c2 == c
-
-
-def test_pickle_deprecated():
- with pytest.warns(UserWarning):
- SecureCookie({"foo": "bar"}, "secret").serialize()
-
-
-def test_json():
- class JSONCompat(object):
- dumps = staticmethod(json.dumps)
-
- @staticmethod
- def loads(s):
- # json on Python < 3.6 fails on bytes
- return json.loads(to_native(s, "utf8"))
-
- class JSONSecureCookie(SecureCookie):
- serialization_method = JSONCompat
-
- secure = JSONSecureCookie({"foo": "bar"}, "secret").serialize()
- data = JSONSecureCookie.unserialize(secure, "secret")
- assert data == {"foo": "bar"}
diff --git a/tests/contrib/test_sessions.py b/tests/contrib/test_sessions.py
deleted file mode 100644
index cab0ae567..000000000
--- a/tests/contrib/test_sessions.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- tests.sessions
- ~~~~~~~~~~~~~~
-
- Added tests for the sessions.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import os
-from tempfile import gettempdir
-
-from werkzeug.contrib.sessions import FilesystemSessionStore
-
-
-def test_default_tempdir():
- store = FilesystemSessionStore()
- assert store.path == gettempdir()
-
-
-def test_basic_fs_sessions(tmpdir):
- store = FilesystemSessionStore(str(tmpdir))
- x = store.new()
- assert x.new
- assert not x.modified
- x["foo"] = [1, 2, 3]
- assert x.modified
- store.save(x)
-
- x2 = store.get(x.sid)
- assert not x2.new
- assert not x2.modified
- assert x2 is not x
- assert x2 == x
- x2["test"] = 3
- assert x2.modified
- assert not x2.new
- store.save(x2)
-
- x = store.get(x.sid)
- store.delete(x)
- x2 = store.get(x.sid)
- # the session is not new when it was used previously.
- assert not x2.new
-
-
-def test_non_urandom(tmpdir):
- urandom = os.urandom
- del os.urandom
- try:
- store = FilesystemSessionStore(str(tmpdir))
- store.new()
- finally:
- os.urandom = urandom
-
-
-def test_renewing_fs_session(tmpdir):
- store = FilesystemSessionStore(str(tmpdir), renew_missing=True)
- x = store.new()
- store.save(x)
- store.delete(x)
- x2 = store.get(x.sid)
- assert x2.new
-
-
-def test_fs_session_lising(tmpdir):
- store = FilesystemSessionStore(str(tmpdir), renew_missing=True)
- sessions = set()
- for _ in range(10):
- sess = store.new()
- store.save(sess)
- sessions.add(sess.sid)
-
- listed_sessions = set(store.list())
- assert sessions == listed_sessions
diff --git a/tests/contrib/test_wrappers.py b/tests/contrib/test_wrappers.py
deleted file mode 100644
index fb49337f7..000000000
--- a/tests/contrib/test_wrappers.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- tests.contrib.wrappers
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Added tests for the sessions.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-from werkzeug import routing
-from werkzeug.contrib import wrappers
-from werkzeug.wrappers import Request
-from werkzeug.wrappers import Response
-
-
-def test_reverse_slash_behavior():
- class MyRequest(wrappers.ReverseSlashBehaviorRequestMixin, Request):
- pass
-
- req = MyRequest.from_values("/foo/bar", "http://example.com/test")
- assert req.url == "http://example.com/test/foo/bar"
- assert req.path == "foo/bar"
- assert req.script_root == "/test/"
-
- # make sure the routing system works with the slashes in
- # reverse order as well.
- map = routing.Map([routing.Rule("/foo/bar", endpoint="foo")])
- adapter = map.bind_to_environ(req.environ)
- assert adapter.match() == ("foo", {})
- adapter = map.bind(req.host, req.script_root)
- assert adapter.match(req.path) == ("foo", {})
-
-
-def test_dynamic_charset_request_mixin():
- class MyRequest(wrappers.DynamicCharsetRequestMixin, Request):
- pass
-
- env = {"CONTENT_TYPE": "text/html"}
- req = MyRequest(env)
- assert req.charset == "latin1"
-
- env = {"CONTENT_TYPE": "text/html; charset=utf-8"}
- req = MyRequest(env)
- assert req.charset == "utf-8"
-
- env = {"CONTENT_TYPE": "application/octet-stream"}
- req = MyRequest(env)
- assert req.charset == "latin1"
- assert req.url_charset == "latin1"
-
- MyRequest.url_charset = "utf-8"
- env = {"CONTENT_TYPE": "application/octet-stream"}
- req = MyRequest(env)
- assert req.charset == "latin1"
- assert req.url_charset == "utf-8"
-
- def return_ascii(x):
- return "ascii"
-
- env = {"CONTENT_TYPE": "text/plain; charset=x-weird-charset"}
- req = MyRequest(env)
- req.unknown_charset = return_ascii
- assert req.charset == "ascii"
- assert req.url_charset == "utf-8"
-
-
-def test_dynamic_charset_response_mixin():
- class MyResponse(wrappers.DynamicCharsetResponseMixin, Response):
- default_charset = "utf-7"
-
- resp = MyResponse(mimetype="text/html")
- assert resp.charset == "utf-7"
- resp.charset = "utf-8"
- assert resp.charset == "utf-8"
- assert resp.mimetype == "text/html"
- assert resp.mimetype_params == {"charset": "utf-8"}
- resp.mimetype_params["charset"] = "iso-8859-15"
- assert resp.charset == "iso-8859-15"
- resp.set_data(u"Hällo Wörld")
- assert b"".join(resp.iter_encoded()) == u"Hällo Wörld".encode("iso-8859-15")
- del resp.headers["content-type"]
- try:
- resp.charset = "utf-8"
- except TypeError:
- pass
- else:
- assert False, "expected type error on charset setting without ct"
diff --git a/tests/hypothesis/__init__.py b/tests/hypothesis/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/tests/hypothesis/test_urls.py b/tests/hypothesis/test_urls.py
deleted file mode 100644
index 61829b3c3..000000000
--- a/tests/hypothesis/test_urls.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import hypothesis
-from hypothesis.strategies import dictionaries
-from hypothesis.strategies import integers
-from hypothesis.strategies import lists
-from hypothesis.strategies import text
-
-from werkzeug import urls
-from werkzeug.datastructures import OrderedMultiDict
-
-
-@hypothesis.given(text())
-def test_quote_unquote_text(t):
- assert t == urls.url_unquote(urls.url_quote(t))
-
-
-@hypothesis.given(dictionaries(text(), text()))
-def test_url_encoding_dict_str_str(d):
- assert OrderedMultiDict(d) == urls.url_decode(urls.url_encode(d))
-
-
-@hypothesis.given(dictionaries(text(), lists(elements=text())))
-def test_url_encoding_dict_str_list(d):
- assert OrderedMultiDict(d) == urls.url_decode(urls.url_encode(d))
-
-
-@hypothesis.given(dictionaries(text(), integers()))
-def test_url_encoding_dict_str_int(d):
- assert OrderedMultiDict({k: str(v) for k, v in d.items()}) == urls.url_decode(
- urls.url_encode(d)
- )
-
-
-@hypothesis.given(text(), text())
-def test_multidict_encode_decode_text(t1, t2):
- d = OrderedMultiDict()
- d.add(t1, t2)
- assert d == urls.url_decode(urls.url_encode(d))
diff --git a/tests/middleware/test_proxy_fix.py b/tests/middleware/test_proxy_fix.py
index f13d10f4b..1dd39971c 100644
--- a/tests/middleware/test_proxy_fix.py
+++ b/tests/middleware/test_proxy_fix.py
@@ -173,14 +173,3 @@ def app(request):
response = Response.from_app(redirect_app, environ)
location = response.headers["Location"]
assert location == url_root + "parrot"
-
-
-def test_proxy_fix_deprecations():
- app = pytest.deprecated_call(ProxyFix, None, 2)
- assert app.x_for == 2
-
- with pytest.deprecated_call():
- assert app.num_proxies == 2
-
- with pytest.deprecated_call():
- assert app.get_remote_addr(["spam", "eggs"]) == "spam"
diff --git a/tests/test_compat.py b/tests/test_compat.py
deleted file mode 100644
index 98851ba28..000000000
--- a/tests/test_compat.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-# flake8: noqa
-"""
- tests.compat
- ~~~~~~~~~~~~
-
- Ensure that old stuff does not break on update.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-from werkzeug.test import create_environ
-from werkzeug.wrappers import Response
-
-
-def test_old_imports():
- from werkzeug.utils import (
- Headers,
- MultiDict,
- CombinedMultiDict,
- Headers,
- EnvironHeaders,
- )
- from werkzeug.http import (
- Accept,
- MIMEAccept,
- CharsetAccept,
- LanguageAccept,
- ETags,
- HeaderSet,
- WWWAuthenticate,
- Authorization,
- )
-
-
-def test_exposed_werkzeug_mod():
- import werkzeug
-
- for key in werkzeug.__all__:
- getattr(werkzeug, key)
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index 511d66de6..b4a865677 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -1374,13 +1374,6 @@ def test_request_method_case_sensitivity():
assert req.method == "GET"
-def test_is_xhr_warning():
- req = wrappers.Request.from_values()
-
- with pytest.warns(DeprecationWarning):
- req.is_xhr
-
-
def test_write_length():
response = wrappers.Response()
length = response.stream.write(b"bar")
From c6dfc4880328fe9c4558d2b2e2325761f4e17f58 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 19 Mar 2019 12:38:06 -0700
Subject: [PATCH 003/733] drop support for Python 3.4
---
.travis.yml | 1 -
CHANGES.rst | 1 +
CONTRIBUTING.rst | 5 ++---
docs/installation.rst | 2 +-
setup.py | 1 -
tox.ini | 2 +-
6 files changed, 5 insertions(+), 7 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index a62f11da6..5677b16f5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,7 +5,6 @@ python:
- "3.7"
- "3.6"
- "3.5"
- - "3.4"
- "2.7"
- "nightly"
- "pypy3.5-6.0"
diff --git a/CHANGES.rst b/CHANGES.rst
index fe4d67011..e79d34bb1 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -5,6 +5,7 @@ Version 1.0.0
Unreleased
+- Drop support for Python 3.4. (:issue:`1478`)
- Remove code that issued deprecation warnings in version 0.15.
(:issue:`1477`)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index cf88893b7..1d89ae60b 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -98,9 +98,8 @@ relevant depends on which part of Werkzeug you're working on. Travis-CI
will run the full suite when you submit your pull request.
The full test suite takes a long time to run because it tests multiple
-combinations of Python and dependencies. You need to have Python 2.7,
-3.4, 3.5, 3.6, and PyPy 2.7, as well as Redis and memcached installed to
-run all of the environments. Then run::
+combinations of Python and dependencies. You need to have all supported
+CPython and PyPy versions installed to run all of the environments. ::
tox
diff --git a/docs/installation.rst b/docs/installation.rst
index 583accf77..2d4acbabd 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -8,7 +8,7 @@ Python Version
--------------
We recommend using the latest version of Python 3. Werkzeug supports
-Python 3.4 and newer and Python 2.7.
+Python 3.5 and newer and Python 2.7.
Dependencies
diff --git a/setup.py b/setup.py
index 47a655bd5..02b460301 100644
--- a/setup.py
+++ b/setup.py
@@ -36,7 +36,6 @@
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
diff --git a/tox.ini b/tox.ini
index 64a6ec57d..e03afad69 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
envlist =
- py{37,36,35,34,27,py3,py}
+ py{37,36,35,27,py3,py}
stylecheck
docs-html
coverage-report
From 3dfd29269e408db5d9a7361112599c835e0c1d84 Mon Sep 17 00:00:00 2001
From: David Baumgold
Date: Fri, 15 Mar 2019 10:32:24 +0100
Subject: [PATCH 004/733] Add utils.invalidate_cached_property()
---
CHANGES.rst | 2 ++
docs/utils.rst | 2 ++
src/werkzeug/utils.py | 25 +++++++++++++++++++++++++
tests/test_utils.py | 26 ++++++++++++++++++++++++++
4 files changed, 55 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index e79d34bb1..ad52ffcde 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -8,6 +8,8 @@ Unreleased
- Drop support for Python 3.4. (:issue:`1478`)
- Remove code that issued deprecation warnings in version 0.15.
(:issue:`1477`)
+- Added ``utils.invalidate_cached_property()`` to invalidate cached
+ properties. (:pr:`1474`)
Version 0.15.0
diff --git a/docs/utils.rst b/docs/utils.rst
index 689c48821..65072fb49 100644
--- a/docs/utils.rst
+++ b/docs/utils.rst
@@ -23,6 +23,8 @@ General Helpers
.. autoclass:: cached_property
:members:
+.. autofunction:: invalidate_cached_property
+
.. autoclass:: environ_property
.. autoclass:: header_property
diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py
index 2504380e9..9a002c6fe 100644
--- a/src/werkzeug/utils.py
+++ b/src/werkzeug/utils.py
@@ -92,6 +92,31 @@ def __get__(self, obj, type=None):
return value
+def invalidate_cached_property(obj, name):
+ """Invalidates the cache for a :class:`cached_property`:
+
+ >>> class Test(object):
+ ... @cached_property
+ ... def magic_number(self):
+ ... print("recalculating...")
+ ... return 42
+ ...
+ >>> var = Test()
+ >>> var.magic_number
+ recalculating...
+ 42
+ >>> var.magic_number
+ 42
+ >>> invalidate_cached_property(var, "magic_number")
+ >>> var.magic_number
+ recalculating...
+ 42
+
+ You must pass the name of the cached property as the second argument.
+ """
+ obj.__dict__[name] = _missing
+
+
class environ_property(_DictAccessorProperty):
"""Maps request attributes to environment variables. This works not only
for the Werzeug request object, but also any other class with an
diff --git a/tests/test_utils.py b/tests/test_utils.py
index f288edead..a1c3f874e 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -105,6 +105,32 @@ def _prop(self):
assert a._prop == "value"
+def test_can_invalidate_cached_property():
+ foo = []
+
+ class A(object):
+ def prop(self):
+ foo.append(42)
+ return 42
+
+ prop = utils.cached_property(prop)
+
+ a = A()
+ p = a.prop
+ q = a.prop
+ assert p == q == 42
+ assert foo == [42]
+
+ utils.invalidate_cached_property(a, "prop")
+ r = a.prop
+ assert r == 42
+ assert foo == [42, 42]
+
+ s = a.prop
+ assert s == 42
+ assert foo == [42, 42]
+
+
def test_inspect_treats_cached_property_as_property():
class A(object):
@utils.cached_property
From 056d879c86c8062766d9deeeb2d29c1945c71e09 Mon Sep 17 00:00:00 2001
From: Jiayuan
Date: Fri, 29 Mar 2019 23:54:09 +0800
Subject: [PATCH 005/733] don't ignore set-cookie keys in cookie header
---
CHANGES.rst | 3 +++
src/werkzeug/_internal.py | 13 +------------
tests/test_http.py | 7 +++++--
3 files changed, 9 insertions(+), 14 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index d885a344b..fd80c3d4a 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,6 +10,9 @@ Unreleased
(:issue:`1477`)
- Added ``utils.invalidate_cached_property()`` to invalidate cached
properties. (:pr:`1474`)
+- Directive keys for the ``Set-Cookie`` response header are not
+ ignored when parsing the ``Cookie`` request header. This allows
+ cookies with names such as "expires" and "version". (:issue:`1495`)
Version 0.15.1
diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py
index d8b833639..fc3770793 100644
--- a/src/werkzeug/_internal.py
+++ b/src/werkzeug/_internal.py
@@ -26,15 +26,6 @@
_logger = None
_signature_cache = WeakKeyDictionary()
_epoch_ord = date(1970, 1, 1).toordinal()
-_cookie_params = {
- b"expires",
- b"path",
- b"comment",
- b"max-age",
- b"secure",
- b"httponly",
- b"version",
-}
_legal_cookie_chars = (
string.ascii_letters + string.digits + u"/=!#$%&'*+-.^_`|~:"
).encode("ascii")
@@ -310,9 +301,7 @@ def _cookie_parse_impl(b):
value = match.group("val") or b""
i = match.end(0)
- # Ignore parameters. We have no interest in them.
- if key.lower() not in _cookie_params:
- yield _cookie_unquote(key), _cookie_unquote(value)
+ yield _cookie_unquote(key), _cookie_unquote(value)
def _encode_idna(domain):
diff --git a/tests/test_http.py b/tests/test_http.py
index c6e8309a9..52275c654 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -483,10 +483,13 @@ def test_empty_keys_are_ignored(self):
def test_cookie_quoting(self):
val = http.dump_cookie("foo", "?foo")
strict_eq(val, 'foo="?foo"; Path=/')
- strict_eq(dict(http.parse_cookie(val)), {"foo": u"?foo"})
-
+ strict_eq(dict(http.parse_cookie(val)), {"foo": u"?foo", "Path": u"/"})
strict_eq(dict(http.parse_cookie(r'foo="foo\054bar"')), {"foo": u"foo,bar"})
+ def test_parse_set_cookie_directive(self):
+ val = 'foo="?foo"; version="0.1";'
+ strict_eq(dict(http.parse_cookie(val)), {"foo": u"?foo", "version": u"0.1"})
+
def test_cookie_domain_resolving(self):
val = http.dump_cookie("foo", "bar", domain=u"\N{SNOWMAN}.com")
strict_eq(val, "foo=bar; Domain=xn--n3h.com; Path=/")
From 2af1b368de3ffaff34ed89168df51698578350f6 Mon Sep 17 00:00:00 2001
From: Lindsay Young
Date: Mon, 6 May 2019 14:03:23 -0400
Subject: [PATCH 006/733] Create CODE_OF_CONDUCT.md
Ticket is referenced in [#Issue 1](https://github.com/pallets/meta/issues/1) in the Meta repo.
I spoke with @davidism and the decision was to use Contributor Covenant. It has easy GitHub integration and quality content.
---
CODE_OF_CONDUCT.md | 76 ++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 76 insertions(+)
create mode 100644 CODE_OF_CONDUCT.md
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..f4ba197de
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,76 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, sex characteristics, gender identity and expression,
+level of experience, education, socio-economic status, nationality, personal
+appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at report@palletsprojects.com. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see
+https://www.contributor-covenant.org/faq
From a68c1dfbbaed9c16a5d68e8eeeea9d17f1058b51 Mon Sep 17 00:00:00 2001
From: Yang Wang
Date: Fri, 3 May 2019 12:33:25 -0400
Subject: [PATCH 007/733] Added charset to HTTPException header content-type
---
CHANGES.rst | 2 ++
src/werkzeug/exceptions.py | 2 +-
tests/test_exceptions.py | 6 ++++++
3 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 9b09e19aa..024c98da1 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -13,6 +13,8 @@ Unreleased
- Directive keys for the ``Set-Cookie`` response header are not
ignored when parsing the ``Cookie`` request header. This allows
cookies with names such as "expires" and "version". (:issue:`1495`)
+- Add ``charset=utf-8`` to an HTTP exception response's
+ ``CONTENT_TYPE`` header. (:pr:`1526`)
Version 0.15.2
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index 5642b99d2..227b41b8f 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -153,7 +153,7 @@ def get_body(self, environ=None):
def get_headers(self, environ=None):
"""Get a list of headers."""
- return [("Content-Type", "text/html")]
+ return [("Content-Type", "text/html; charset=utf-8")]
def get_response(self, environ=None):
"""Get a response object. If one was passed to the exception
diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py
index 616b39c8e..969f7c7cd 100644
--- a/tests/test_exceptions.py
+++ b/tests/test_exceptions.py
@@ -110,3 +110,9 @@ def test_unauthorized_www_authenticate():
exc = exceptions.Unauthorized(www_authenticate=[digest, basic])
h = dict(exc.get_headers({}))
assert h["WWW-Authenticate"] == ", ".join((str(digest), str(basic)))
+
+
+def test_response_header_content_type_should_contain_charset():
+ exc = exceptions.HTTPException("An error message")
+ h = exc.get_response({})
+ assert h.headers["Content-Type"] == "text/html; charset=utf-8"
From 2ceab5849ee32016a4cdffb959be5c0e829d8db3 Mon Sep 17 00:00:00 2001
From: Anthony Sottile
Date: Mon, 6 May 2019 10:53:48 -0700
Subject: [PATCH 008/733] Fix interactive debugger while accessing cell vars
---
src/werkzeug/debug/console.py | 11 ++++++-----
src/werkzeug/debug/tbtools.py | 4 ++--
tests/test_debug.py | 12 ++++++++++++
3 files changed, 20 insertions(+), 7 deletions(-)
diff --git a/src/werkzeug/debug/console.py b/src/werkzeug/debug/console.py
index adbd170b7..5b2f1e608 100644
--- a/src/werkzeug/debug/console.py
+++ b/src/werkzeug/debug/console.py
@@ -148,11 +148,12 @@ def func(source, filename, symbol):
class _InteractiveConsole(code.InteractiveInterpreter):
def __init__(self, globals, locals):
+ locals = dict(globals)
+ locals.update(locals)
+ locals["dump"] = dump
+ locals["help"] = helper
+ locals["__loader__"] = self.loader = _ConsoleLoader()
code.InteractiveInterpreter.__init__(self, locals)
- self.globals = dict(globals)
- self.globals["dump"] = dump
- self.globals["help"] = helper
- self.globals["__loader__"] = self.loader = _ConsoleLoader()
self.more = False
self.buffer = []
_wrap_compiler(self)
@@ -177,7 +178,7 @@ def runsource(self, source):
def runcode(self, code):
try:
- eval(code, self.globals, self.locals)
+ exec(code, self.locals)
except Exception:
self.showtraceback()
diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py
index c8358882d..70a5bef47 100644
--- a/src/werkzeug/debug/tbtools.py
+++ b/src/werkzeug/debug/tbtools.py
@@ -464,8 +464,8 @@ def __init__(self, exc_type, exc_value, tb):
if os.path.isfile(fn):
fn = os.path.realpath(fn)
self.filename = to_unicode(fn, get_filesystem_encoding())
- self.module = self.globals.get("__name__")
- self.loader = self.globals.get("__loader__")
+ self.module = self.globals.get("__name__", self.locals.get("__name__"))
+ self.loader = self.globals.get("__loader__", self.locals.get("__loader__"))
self.code = tb.tb_frame.f_code
# support for paste's traceback extensions
diff --git a/tests/test_debug.py b/tests/test_debug.py
index 84720d666..ac795c785 100644
--- a/tests/test_debug.py
+++ b/tests/test_debug.py
@@ -16,6 +16,7 @@
import requests
from werkzeug._compat import PY2
+from werkzeug.debug import console
from werkzeug.debug import DebuggedApplication
from werkzeug.debug import get_machine_id
from werkzeug.debug.console import HTMLStringO
@@ -356,6 +357,17 @@ def app(environ, start_response):
assert r.text == "hello"
+def test_console_closure_variables(monkeypatch):
+ # restore the original display hook
+ monkeypatch.setattr(sys, "displayhook", console._displayhook)
+ c = console.Console()
+ c.eval("y = 5")
+ c.eval("x = lambda: y")
+ ret = c.eval("x()")
+ expected = ">>> x()\n5" if PY2 else ">>> x()\n5\n"
+ assert ret == expected
+
+
@pytest.mark.skipif(PY2, reason="Python 2 doesn't have chained exceptions.")
@pytest.mark.timeout(2)
def test_chained_exception_cycle():
From 9c4d5e0fd7e5a5659636b1ed544f8ca875826172 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Mon, 20 May 2019 07:15:39 -0700
Subject: [PATCH 009/733] update changelog
---
CHANGES.rst | 3 +++
1 file changed, 3 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 5e11d7e4a..2633fc832 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -15,6 +15,9 @@ Unreleased
cookies with names such as "expires" and "version". (:issue:`1495`)
- Add ``charset=utf-8`` to an HTTP exception response's
``CONTENT_TYPE`` header. (:pr:`1526`)
+- The interactive debugger handles outer variables in nested scopes
+ such as lambdas and comprehensions. :issue:`913`, :issue:`1037`,
+ :pr:`1532`
Version 0.15.5
From 8cd2d9078e79fc06a4a3f1eb661ced2e85f64435 Mon Sep 17 00:00:00 2001
From: Brian Cristante <33549821+brcrista@users.noreply.github.com>
Date: Mon, 6 May 2019 13:52:10 -0400
Subject: [PATCH 010/733] prototype AzP configuration
[skip ci]
---
azure-pipelines.yml | 43 +++++++++++++++++++++++++++++++++++++++++++
1 file changed, 43 insertions(+)
create mode 100644 azure-pipelines.yml
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
new file mode 100644
index 000000000..331f7134e
--- /dev/null
+++ b/azure-pipelines.yml
@@ -0,0 +1,43 @@
+trigger:
+- master
+
+strategy:
+ matrix:
+ Python37Linux:
+ python.version: '3.7'
+ vmImage: 'ubuntu-latest'
+ Python37Windows:
+ python.version: '3.7'
+ vmImage: 'windows-latest'
+ Python37Mac:
+ python.version: '3.7'
+ vmImage: 'macos-latest'
+ # Pypy3Linux:
+ # python.version: 'pypy3'
+ # vmImage: 'ubuntu-latest'
+ # Python36Linux:
+ # python.version: '3.6'
+ # vmImage: 'ubuntu-latest'
+ # Python35Linux:
+ # python.version: '3.5'
+ # vmImage: 'ubuntu-latest'
+ # Python27Linux:
+ # python.version: '2.7'
+ # vmImage: 'ubuntu-latest'
+ # Python27Windows:
+ # python.version: '2.7'
+ # vmImage: 'windows-latest'
+
+pool:
+ vmImage: $[ variables.vmImage ]
+
+steps:
+- task: UsePythonVersion@0
+ inputs:
+ versionSpec: $(python.version)
+
+- script: pip install tox
+ displayName: 'Install tox'
+
+- script: tox -e py
+ displayName: 'Run tox'
\ No newline at end of file
From 6a7fd2b4018d4cf52f6f401ce66f5f7c5ed43566 Mon Sep 17 00:00:00 2001
From: Brian Cristante <33549821+brcrista@users.noreply.github.com>
Date: Mon, 6 May 2019 13:57:50 -0400
Subject: [PATCH 011/733] Upload test results
---
azure-pipelines.yml | 33 ++++++++++++++++++++++-----------
1 file changed, 22 insertions(+), 11 deletions(-)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 331f7134e..5f75f24b5 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -5,28 +5,28 @@ strategy:
matrix:
Python37Linux:
python.version: '3.7'
- vmImage: 'ubuntu-latest'
+ vmImage: ubuntu-latest
Python37Windows:
python.version: '3.7'
- vmImage: 'windows-latest'
+ vmImage: windows-latest
Python37Mac:
python.version: '3.7'
- vmImage: 'macos-latest'
+ vmImage: macos-latest
# Pypy3Linux:
# python.version: 'pypy3'
- # vmImage: 'ubuntu-latest'
+ # vmImage: ubuntu-latest
# Python36Linux:
# python.version: '3.6'
- # vmImage: 'ubuntu-latest'
+ # vmImage: ubuntu-latest
# Python35Linux:
# python.version: '3.5'
- # vmImage: 'ubuntu-latest'
+ # vmImage: ubuntu-latest
# Python27Linux:
# python.version: '2.7'
- # vmImage: 'ubuntu-latest'
+ # vmImage: ubuntu-latest
# Python27Windows:
# python.version: '2.7'
- # vmImage: 'windows-latest'
+ # vmImage: windows-latest
pool:
vmImage: $[ variables.vmImage ]
@@ -35,9 +35,20 @@ steps:
- task: UsePythonVersion@0
inputs:
versionSpec: $(python.version)
+ displayName: Use Python $(python.version)
+
+- script: python -m pip install --upgrade pip
+ displayName: Use latest Pip
- script: pip install tox
- displayName: 'Install tox'
+ displayName: Install tox
+
+- script: tox -e py -- --junit-xml=junit/tox-run-$(python.version).xml
+ displayName: Run tox
-- script: tox -e py
- displayName: 'Run tox'
\ No newline at end of file
+- task: PublishTestResults@2
+ displayName: Publish Test Results
+ inputs:
+ testResultsFiles: junit/*.xml
+ testRunTitle: 'Python $(python.version)'
+ condition: succeededOrFailed()
\ No newline at end of file
From e5d31287a208ca1534b1a130689d48d5dc5da537 Mon Sep 17 00:00:00 2001
From: Brian Cristante <33549821+brcrista@users.noreply.github.com>
Date: Mon, 6 May 2019 14:33:55 -0400
Subject: [PATCH 012/733] Normalize CRLF for text files in .gitattributes
---
.gitattributes | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/.gitattributes b/.gitattributes
index 5946e8238..fb7705c58 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1 +1,7 @@
+# Normalize CRLF to LF for all text files
+* text=auto
+
+# Declare binary file types so they won't be normalized
+*.png binary
+*.jpg binary
tests/**/*.http binary
From 36c678b8c39acc3fbceefabae602b96f73fb271c Mon Sep 17 00:00:00 2001
From: Brian Cristante <33549821+brcrista@users.noreply.github.com>
Date: Mon, 6 May 2019 14:39:46 -0400
Subject: [PATCH 013/733] tests/res/test.txt binary
---
.gitattributes | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitattributes b/.gitattributes
index fb7705c58..764a4428d 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -5,3 +5,4 @@
*.png binary
*.jpg binary
tests/**/*.http binary
+tests/res/test.txt binary
From 15169e6d31ac31a8ceac485413a5804fb1c00045 Mon Sep 17 00:00:00 2001
From: Brian Cristante <33549821+brcrista@users.noreply.github.com>
Date: Mon, 6 May 2019 14:49:30 -0400
Subject: [PATCH 014/733] build full matrix
---
azure-pipelines.yml | 39 +++++++++++++++++++++------------------
1 file changed, 21 insertions(+), 18 deletions(-)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 5f75f24b5..1958b39c0 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -3,30 +3,33 @@ trigger:
strategy:
matrix:
- Python37Linux:
+ Python37-Linux:
python.version: '3.7'
vmImage: ubuntu-latest
- Python37Windows:
+ Python37-Windows:
python.version: '3.7'
vmImage: windows-latest
- Python37Mac:
+ Python37-Mac:
python.version: '3.7'
vmImage: macos-latest
- # Pypy3Linux:
- # python.version: 'pypy3'
- # vmImage: ubuntu-latest
- # Python36Linux:
- # python.version: '3.6'
- # vmImage: ubuntu-latest
- # Python35Linux:
- # python.version: '3.5'
- # vmImage: ubuntu-latest
- # Python27Linux:
- # python.version: '2.7'
- # vmImage: ubuntu-latest
- # Python27Windows:
- # python.version: '2.7'
- # vmImage: windows-latest
+ Python36-Linux:
+ python.version: '3.6'
+ vmImage: ubuntu-latest
+ Python35-Linux:
+ python.version: '3.5'
+ vmImage: ubuntu-latest
+ Python27-Linux:
+ python.version: '2.7'
+ vmImage: ubuntu-latest
+ Python27-Windows:
+ python.version: '2.7'
+ vmImage: windows-latest
+ Pypy3-Linux:
+ python.version: 'pypy3'
+ vmImage: ubuntu-latest
+ Pypy2-Linux:
+ python.version: 'pypy2'
+ vmImage: ubuntu-latest
pool:
vmImage: $[ variables.vmImage ]
From c40a9a764a0c747112ddf9e60c0817680c6f2182 Mon Sep 17 00:00:00 2001
From: Brian Cristante
Date: Mon, 6 May 2019 15:16:19 -0400
Subject: [PATCH 015/733] xfail tests with Python 2 on Windows
---
tests/middleware/test_shared_data.py | 4 ++++
tests/test_serving.py | 2 ++
2 files changed, 6 insertions(+)
diff --git a/tests/middleware/test_shared_data.py b/tests/middleware/test_shared_data.py
index 8a1eee0c8..937712a6f 100644
--- a/tests/middleware/test_shared_data.py
+++ b/tests/middleware/test_shared_data.py
@@ -1,7 +1,10 @@
# -*- coding: utf-8 -*-
import os
+import sys
from contextlib import closing
+import pytest
+
from werkzeug._compat import to_native
from werkzeug.middleware.shared_data import SharedDataMiddleware
from werkzeug.test import create_environ
@@ -13,6 +16,7 @@ def test_get_file_loader():
assert callable(app.get_file_loader("foo"))
+@pytest.mark.xfail(sys.version_info.major == 2 and sys.platform == "win32", reason="TODO fix test for Python 2 on Windows")
def test_shared_data_middleware(tmpdir):
def null_application(environ, start_response):
start_response("404 NOT FOUND", [("Content-Type", "text/plain")])
diff --git a/tests/test_serving.py b/tests/test_serving.py
index 9e3f4b49b..ac6dab9cb 100644
--- a/tests/test_serving.py
+++ b/tests/test_serving.py
@@ -147,6 +147,7 @@ def test_make_ssl_devcert(tmpdir):
@pytest.mark.skipif(watchdog is None, reason="Watchdog not installed.")
+@pytest.mark.xfail(sys.version_info.major == 2 and sys.platform == "win32", reason="TODO fix test for Python 2 on Windows")
def test_reloader_broken_imports(tmpdir, dev_server):
# We explicitly assert that the server reloads on change, even though in
# this case the import could've just been retried. This is to assert
@@ -237,6 +238,7 @@ def real_app(environ, start_response):
@pytest.mark.skipif(watchdog is None, reason="Watchdog not installed.")
+@pytest.mark.xfail(sys.version_info.major == 2 and sys.platform == "win32", reason="TODO fix test for Python 2 on Windows")
def test_reloader_reports_correct_file(tmpdir, dev_server):
real_app = tmpdir.join("real_app.py")
real_app.write(
From c1ec0d62a24ee3bacc8d2d0c41944e717cbc1d70 Mon Sep 17 00:00:00 2001
From: Brian Cristante
Date: Mon, 6 May 2019 15:35:30 -0400
Subject: [PATCH 016/733] azure-pipelines.yml formatting
---
azure-pipelines.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 1958b39c0..239e18207 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -50,8 +50,8 @@ steps:
displayName: Run tox
- task: PublishTestResults@2
- displayName: Publish Test Results
inputs:
testResultsFiles: junit/*.xml
testRunTitle: 'Python $(python.version)'
- condition: succeededOrFailed()
\ No newline at end of file
+ condition: succeededOrFailed()
+ displayName: Publish test results
\ No newline at end of file
From d7a2f2ed1e6e20f61c175e8338177a68ad213bfc Mon Sep 17 00:00:00 2001
From: David Lord
Date: Mon, 20 May 2019 13:04:52 -0700
Subject: [PATCH 017/733] azure pipelines configuration
---
.appveyor.yml | 26 -----------
.azure-pipelines.yml | 64 ++++++++++++++++++++++++++++
.travis.yml | 46 --------------------
azure-pipelines.yml | 57 -------------------------
tests/middleware/test_shared_data.py | 5 ++-
tests/test_serving.py | 10 ++++-
tox.ini | 15 +++----
7 files changed, 83 insertions(+), 140 deletions(-)
delete mode 100644 .appveyor.yml
create mode 100644 .azure-pipelines.yml
delete mode 100644 .travis.yml
delete mode 100644 azure-pipelines.yml
diff --git a/.appveyor.yml b/.appveyor.yml
deleted file mode 100644
index bdd3d40c6..000000000
--- a/.appveyor.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-environment:
- global:
- TOXENV: py,codecov
-
- matrix:
- - PYTHON: C:\Python37-x64
- - PYTHON: C:\Python27-x64
-
-init:
- - SET PATH=%PYTHON%;%PATH%
-
-install:
- - python -m pip install -U tox
-
-build: false
-
-test_script:
- - python -m tox --skip-missing-interpreters false
-
-branches:
- only:
- - master
- - /^\d+(\.\d+)*(\.x)?$/
-
-cache:
- - '%LOCALAPPDATA%\pip\Cache'
diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml
new file mode 100644
index 000000000..c85fe0539
--- /dev/null
+++ b/.azure-pipelines.yml
@@ -0,0 +1,64 @@
+trigger:
+ - master
+ - '*.x'
+
+variables:
+ vmImage: ubuntu-latest
+ python.version: 3.7
+ TOXENV: py,coverage-ci
+ hasTestResults: true
+
+strategy:
+ matrix:
+ Python 3.7 Linux:
+ vmImage: ubuntu-latest
+ Python 3.7 Windows:
+ vmImage: windows-latest
+ Python 3.7 Mac:
+ vmImage: macos-latest
+ PyPy 3 Linux:
+ python.version: pypy3
+ Python 3.6 Linux:
+ python.version: 3.6
+ Python 3.5 Linux:
+ python.version: 3.5
+ Python 2.7 Linux:
+ python.version: 2.7
+ Python 2.7 Windows:
+ python.version: 2.7
+ vmImage: windows-latest
+ Docs:
+ TOXENV: docs-html
+ hasTestResults: false
+ Style:
+ TOXENV: style
+ hasTestResults: false
+
+pool:
+ vmImage: $[ variables.vmImage ]
+
+steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: $(python.version)
+ displayName: Use Python $(python.version)
+
+ - script: pip --disable-pip-version-check install -U tox
+ displayName: Install tox
+
+ - script: tox -s false -- --junit-xml=test-results.xml
+ displayName: Run tox
+
+ - task: PublishTestResults@2
+ inputs:
+ testResultsFiles: test-results.xml
+ testRunTitle: $(Agent.JobName)
+ condition: eq(variables['hasTestResults'], 'true')
+ displayName: Publish test results
+
+ - task: PublishCodeCoverageResults@1
+ inputs:
+ codeCoverageTool: Cobertura
+ summaryFileLocation: coverage.xml
+ condition: eq(variables['hasTestResults'], 'true')
+ displayName: Publish coverage results
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 5677b16f5..000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,46 +0,0 @@
-os: linux
-dist: xenial
-language: python
-python:
- - "3.7"
- - "3.6"
- - "3.5"
- - "2.7"
- - "nightly"
- - "pypy3.5-6.0"
-env: TOXENV=py,codecov
-
-matrix:
- include:
- - env: TOXENV=stylecheck,docs-html
- - os: osx
- language: generic
- env: TOXENV=py3,codecov
- cache:
- directories:
- - $HOME/Library/Caches/Homebrew
- - $HOME/Library/Caches/pip
- allow_failures:
- - python: nightly
- - python: pypy3.5-6.0
- - os: osx
- fast_finish: true
-
-install:
- - pip install -U tox
-
-script:
- - tox --skip-missing-interpreters false
-
-cache:
- directories:
- - $HOME/.cache/pip
- - $HOME/.cache/pre-commit
-
-branches:
- only:
- - master
- - /^\d+(\.\d+)*(\.x)?$/
-
-notifications:
- email: false
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
deleted file mode 100644
index 239e18207..000000000
--- a/azure-pipelines.yml
+++ /dev/null
@@ -1,57 +0,0 @@
-trigger:
-- master
-
-strategy:
- matrix:
- Python37-Linux:
- python.version: '3.7'
- vmImage: ubuntu-latest
- Python37-Windows:
- python.version: '3.7'
- vmImage: windows-latest
- Python37-Mac:
- python.version: '3.7'
- vmImage: macos-latest
- Python36-Linux:
- python.version: '3.6'
- vmImage: ubuntu-latest
- Python35-Linux:
- python.version: '3.5'
- vmImage: ubuntu-latest
- Python27-Linux:
- python.version: '2.7'
- vmImage: ubuntu-latest
- Python27-Windows:
- python.version: '2.7'
- vmImage: windows-latest
- Pypy3-Linux:
- python.version: 'pypy3'
- vmImage: ubuntu-latest
- Pypy2-Linux:
- python.version: 'pypy2'
- vmImage: ubuntu-latest
-
-pool:
- vmImage: $[ variables.vmImage ]
-
-steps:
-- task: UsePythonVersion@0
- inputs:
- versionSpec: $(python.version)
- displayName: Use Python $(python.version)
-
-- script: python -m pip install --upgrade pip
- displayName: Use latest Pip
-
-- script: pip install tox
- displayName: Install tox
-
-- script: tox -e py -- --junit-xml=junit/tox-run-$(python.version).xml
- displayName: Run tox
-
-- task: PublishTestResults@2
- inputs:
- testResultsFiles: junit/*.xml
- testRunTitle: 'Python $(python.version)'
- condition: succeededOrFailed()
- displayName: Publish test results
\ No newline at end of file
diff --git a/tests/middleware/test_shared_data.py b/tests/middleware/test_shared_data.py
index 937712a6f..20bd88cb5 100644
--- a/tests/middleware/test_shared_data.py
+++ b/tests/middleware/test_shared_data.py
@@ -16,7 +16,10 @@ def test_get_file_loader():
assert callable(app.get_file_loader("foo"))
-@pytest.mark.xfail(sys.version_info.major == 2 and sys.platform == "win32", reason="TODO fix test for Python 2 on Windows")
+@pytest.mark.xfail(
+ sys.version_info.major == 2 and sys.platform == "win32",
+ reason="TODO fix test for Python 2 on Windows",
+)
def test_shared_data_middleware(tmpdir):
def null_application(environ, start_response):
start_response("404 NOT FOUND", [("Content-Type", "text/plain")])
diff --git a/tests/test_serving.py b/tests/test_serving.py
index ac6dab9cb..cf31bd2e6 100644
--- a/tests/test_serving.py
+++ b/tests/test_serving.py
@@ -147,7 +147,10 @@ def test_make_ssl_devcert(tmpdir):
@pytest.mark.skipif(watchdog is None, reason="Watchdog not installed.")
-@pytest.mark.xfail(sys.version_info.major == 2 and sys.platform == "win32", reason="TODO fix test for Python 2 on Windows")
+@pytest.mark.xfail(
+ sys.version_info.major == 2 and sys.platform == "win32",
+ reason="TODO fix test for Python 2 on Windows",
+)
def test_reloader_broken_imports(tmpdir, dev_server):
# We explicitly assert that the server reloads on change, even though in
# this case the import could've just been retried. This is to assert
@@ -238,7 +241,10 @@ def real_app(environ, start_response):
@pytest.mark.skipif(watchdog is None, reason="Watchdog not installed.")
-@pytest.mark.xfail(sys.version_info.major == 2 and sys.platform == "win32", reason="TODO fix test for Python 2 on Windows")
+@pytest.mark.xfail(
+ sys.version_info.major == 2 and sys.platform == "win32",
+ reason="TODO fix test for Python 2 on Windows",
+)
def test_reloader_reports_correct_file(tmpdir, dev_server):
real_app = tmpdir.join("real_app.py")
real_app.write(
diff --git a/tox.ini b/tox.ini
index f36a069df..abda16f0e 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,9 +1,9 @@
[tox]
envlist =
py{37,36,35,27,py3,py}
- stylecheck
+ style
docs-html
- coverage-report
+ coverage
skip_missing_interpreters = true
[testenv]
@@ -19,7 +19,7 @@ deps =
watchdog
commands = coverage run -p -m pytest --tb=short --basetemp={envtmpdir} {posargs}
-[testenv:stylecheck]
+[testenv:style]
deps = pre-commit
skip_install = true
commands = pre-commit run --all-files --show-diff-on-failure
@@ -31,7 +31,7 @@ deps =
sphinx-issues
commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html
-[testenv:coverage-report]
+[testenv:coverage]
deps = coverage
skip_install = true
commands =
@@ -39,11 +39,10 @@ commands =
coverage html
coverage report
-[testenv:codecov]
-passenv = CI TRAVIS TRAVIS_* APPVEYOR APPVEYOR_*
-deps = codecov
+[testenv:coverage-ci]
+deps = coverage
skip_install = true
commands =
coverage combine
- codecov
+ coverage xml
coverage report
From d86887b31c95034278156f38b6e0b32bb15c1f33 Mon Sep 17 00:00:00 2001
From: "michal.cyprian"
Date: Thu, 16 May 2019 12:17:32 +0200
Subject: [PATCH 018/733] Prevent utils.invalidate_cached_property from
deleting attributes
Fixes: https://github.com/pallets/werkzeug/issues/1547
---
src/werkzeug/utils.py | 5 +++++
tests/test_utils.py | 10 ++++++++++
2 files changed, 15 insertions(+)
diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py
index 9a002c6fe..196bff560 100644
--- a/src/werkzeug/utils.py
+++ b/src/werkzeug/utils.py
@@ -114,6 +114,11 @@ def invalidate_cached_property(obj, name):
You must pass the name of the cached property as the second argument.
"""
+ if not isinstance(getattr(obj.__class__, name, None), cached_property):
+ raise TypeError(
+ "Attribute {} of object {} is not a cached_property, "
+ "cannot be invalidated".format(name, obj)
+ )
obj.__dict__[name] = _missing
diff --git a/tests/test_utils.py b/tests/test_utils.py
index a1c3f874e..403823036 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -131,6 +131,16 @@ def prop(self):
assert foo == [42, 42]
+def test_invalidate_cached_property_on_non_property():
+ class A(object):
+ def __init__(self):
+ self.prop = 42
+
+ a = A()
+ with pytest.raises(TypeError):
+ utils.invalidate_cached_property(a, "prop")
+
+
def test_inspect_treats_cached_property_as_property():
class A(object):
@utils.cached_property
From 29ffda85b44c3df7cdbd840d1c0a6d9ec3b4f920 Mon Sep 17 00:00:00 2001
From: John Zeringue
Date: Fri, 31 May 2019 09:27:43 -0400
Subject: [PATCH 019/733] Detect opera as browser in user_agent
Now, we'll recognize `OPR` as "Opera" in a user agent string.
Fixes #1556
---
src/werkzeug/useragents.py | 2 +-
tests/test_wrappers.py | 8 ++++++++
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/src/werkzeug/useragents.py b/src/werkzeug/useragents.py
index 9416b0d7a..720a952ae 100644
--- a/src/werkzeug/useragents.py
+++ b/src/werkzeug/useragents.py
@@ -46,7 +46,7 @@ class UserAgentParser(object):
("yahoo", "yahoo"),
("ask jeeves", "ask"),
(r"aol|america\s+online\s+browser", "aol"),
- ("opera", "opera"),
+ (r"opera|opr", "opera"),
("edge", "edge"),
("chrome|crios", "chrome"),
("seamonkey", "seamonkey"),
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index b4a865677..64327c0e2 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -616,6 +616,14 @@ def test_user_agent_mixin():
"3.5.1",
"de",
),
+ (
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36"
+ "(KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36 OPR/60.0.3255.95",
+ "opera",
+ "macos",
+ "60.0.3255.95",
+ None,
+ ),
]
for ua, browser, platform, version, lang in user_agents:
request = wrappers.Request({"HTTP_USER_AGENT": ua})
From 6e7c8bea0f307633e70d979fe0a89e046accc598 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Fri, 31 May 2019 09:47:20 -0400
Subject: [PATCH 020/733] add changelog for #1564
---
CHANGES.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 2633fc832..2ea163421 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -18,6 +18,8 @@ Unreleased
- The interactive debugger handles outer variables in nested scopes
such as lambdas and comprehensions. :issue:`913`, :issue:`1037`,
:pr:`1532`
+- The user agent for Opera 60 on Mac is correctly reported as
+ "opera" instead of "chrome". :issue:`1556`
Version 0.15.5
From 359276baae27acca69cfc51fb31a5318924afc1b Mon Sep 17 00:00:00 2001
From: Joshua Bronson
Date: Fri, 31 May 2019 10:49:39 -0400
Subject: [PATCH 021/733] rm spurious use_memcache=False param
---
examples/coolmagic/utils.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/examples/coolmagic/utils.py b/examples/coolmagic/utils.py
index f4cf20d5b..a1bc1ea31 100644
--- a/examples/coolmagic/utils.py
+++ b/examples/coolmagic/utils.py
@@ -25,7 +25,7 @@
local = Local()
local_manager = LocalManager([local])
template_env = Environment(
- loader=FileSystemLoader(join(dirname(__file__), "templates"), use_memcache=False)
+ loader=FileSystemLoader(join(dirname(__file__), "templates"))
)
exported_views = {}
From f2211e6663f0fd8a97563b32c19c1ad459a8b364 Mon Sep 17 00:00:00 2001
From: Joshua Bronson
Date: Fri, 31 May 2019 11:37:08 -0400
Subject: [PATCH 022/733] Fix __delitem__ signature (does not accept value)
---
examples/cupoftee/db.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/examples/cupoftee/db.py b/examples/cupoftee/db.py
index 7f0412207..e86c071f7 100644
--- a/examples/cupoftee/db.py
+++ b/examples/cupoftee/db.py
@@ -40,7 +40,7 @@ def _load_key(self, key):
def __setitem__(self, key, value):
self._local[key] = value
- def __delitem__(self, key, value):
+ def __delitem__(self, key):
with self._lock:
self._local.pop(key, None)
if key in self._fs:
From c2231cd288dde0f8e780fe76e58a783e9110bf0d Mon Sep 17 00:00:00 2001
From: John Zeringue
Date: Fri, 31 May 2019 12:33:42 -0400
Subject: [PATCH 023/733] Suggest venv named "env" instead of "venv"
"env" is already in our `.gitignore` and is the name suggested in
Flask's `CONTRIBUTING.rst`
---
CONTRIBUTING.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 1d89ae60b..33a2e3129 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -64,7 +64,7 @@ First time setup
- Create a virtualenv::
- python3 -m venv venv
+ python3 -m venv env
. venv/bin/activate
# or "venv\Scripts\activate" on Windows
From 37c640f2800e8e83de0b6cceb1a076bf8f42d984 Mon Sep 17 00:00:00 2001
From: Joshua Bronson
Date: Fri, 31 May 2019 13:25:24 -0400
Subject: [PATCH 024/733] rm superfluous return None
BaseHTTPRequestHandler.handle() returns None,
so there is no need to capture and return its return value.
---
src/werkzeug/serving.py | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index ff9f8805f..dd6224c07 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -321,9 +321,8 @@ def execute(app):
def handle(self):
"""Handles a request ignoring dropped connections."""
- rv = None
try:
- rv = BaseHTTPRequestHandler.handle(self)
+ BaseHTTPRequestHandler.handle(self)
except (_ConnectionError, socket.timeout) as e:
self.connection_dropped(e)
except Exception as e:
@@ -331,7 +330,6 @@ def handle(self):
raise
if self.server.shutdown_signal:
self.initiate_shutdown()
- return rv
def initiate_shutdown(self):
"""A horrible, horrible way to kill the server for Python 2.6 and
From cff0b4303947211fe610bd9f35b155e39c65f3a2 Mon Sep 17 00:00:00 2001
From: Lewis Headden
Date: Fri, 31 May 2019 13:30:43 -0400
Subject: [PATCH 025/733] fix: Parse Crosswalk UA as Android not ChromeOS
(#1024)
---
src/werkzeug/useragents.py | 2 +-
tests/test_wrappers.py | 9 +++++++++
2 files changed, 10 insertions(+), 1 deletion(-)
diff --git a/src/werkzeug/useragents.py b/src/werkzeug/useragents.py
index 720a952ae..74f2fa4fd 100644
--- a/src/werkzeug/useragents.py
+++ b/src/werkzeug/useragents.py
@@ -18,7 +18,7 @@ class UserAgentParser(object):
"""A simple user agent parser. Used by the `UserAgent`."""
platforms = (
- ("cros", "chromeos"),
+ (" cros ", "chromeos"),
("iphone|ios", "iphone"),
("ipad", "ipad"),
(r"darwin|mac|os\s*x", "macos"),
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index 64327c0e2..4a60278d9 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -624,6 +624,15 @@ def test_user_agent_mixin():
"60.0.3255.95",
None,
),
+ (
+ "Mozilla/5.0 (Linux; Android 4.4.4; Google Nexus 7 2013 - 4.4.4 - "
+ "API 19 - 1200x1920 Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/51.0.2704.106 Crosswalk/21.51.546.7 Safari/537.36",
+ "chrome",
+ "android",
+ "51.0.2704.106",
+ None,
+ ),
]
for ua, browser, platform, version, lang in user_agents:
request = wrappers.Request({"HTTP_USER_AGENT": ua})
From bdef20634c5c28a911c0b22eec562ab68c366bdf Mon Sep 17 00:00:00 2001
From: Joshua Bronson
Date: Fri, 31 May 2019 13:39:36 -0400
Subject: [PATCH 026/733] avoid shadowing (for value in value)
---
src/werkzeug/datastructures.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index c2b4c021b..91d1b8343 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -46,8 +46,8 @@ def iter_multi_items(mapping):
elif isinstance(mapping, dict):
for key, value in iteritems(mapping):
if isinstance(value, (tuple, list)):
- for value in value:
- yield key, value
+ for v in value:
+ yield key, v
else:
yield key, value
else:
From 2b581c005309902c4d01029615d5418c0d617c32 Mon Sep 17 00:00:00 2001
From: Joshua Bronson
Date: Fri, 31 May 2019 14:13:00 -0400
Subject: [PATCH 027/733] rm superfluous ` = None` last line
...of (nested) function. This statement has no effect.
---
src/werkzeug/serving.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index ff9f8805f..6127761c3 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -297,7 +297,6 @@ def execute(app):
finally:
if hasattr(application_iter, "close"):
application_iter.close()
- application_iter = None
try:
execute(self.server.app)
From 0a672d311de069b84b36456b1ecc5296c26bad48 Mon Sep 17 00:00:00 2001
From: Lewis Headden
Date: Fri, 31 May 2019 14:46:05 -0400
Subject: [PATCH 028/733] Add CHANGES entry
---
CHANGES.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 2ea163421..ab23dadfe 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -20,6 +20,8 @@ Unreleased
:pr:`1532`
- The user agent for Opera 60 on Mac is correctly reported as
"opera" instead of "chrome". :issue:`1556`
+- The platform for Crosswalk on Android is correctly reported as
+ "android" instead of "chromeos". (:pr:`1572`)
Version 0.15.5
From d6b54e3731c92059083ac6f1ffec69b1cbf8e31b Mon Sep 17 00:00:00 2001
From: Joshua Bronson
Date: Fri, 31 May 2019 19:57:00 +0000
Subject: [PATCH 029/733] have you heard the good news about with statements
---
src/werkzeug/debug/tbtools.py | 9 ++++-----
1 file changed, 4 insertions(+), 5 deletions(-)
diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py
index 70a5bef47..b34403dce 100644
--- a/src/werkzeug/debug/tbtools.py
+++ b/src/werkzeug/debug/tbtools.py
@@ -573,13 +573,12 @@ def sourcelines(self):
if source is None:
try:
- f = open(to_native(self.filename, get_filesystem_encoding()), mode="rb")
+ with open(
+ to_native(self.filename, get_filesystem_encoding()), mode="rb"
+ ) as f:
+ source = f.read()
except IOError:
return []
- try:
- source = f.read()
- finally:
- f.close()
# already unicode? return right away
if isinstance(source, text_type):
From b82ef621773ca88c13ccbf24ab3e2688f0b43a8c Mon Sep 17 00:00:00 2001
From: Mark Roth
Date: Fri, 31 May 2019 14:11:04 -0400
Subject: [PATCH 030/733] warn when current server name doesn't match
configured server name
---
CHANGES.rst | 2 ++
src/werkzeug/routing.py | 8 +++++++-
tests/test_routing.py | 10 ++++++++++
3 files changed, 19 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 5177b3e74..c25dbbdd3 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -22,6 +22,8 @@ Unreleased
"opera" instead of "chrome". :issue:`1556`
- The platform for Crosswalk on Android is correctly reported as
"android" instead of "chromeos". (:pr:`1572`)
+- Issue warning when current server name does not match configured
+ server name. :issue:`760`
Version 0.15.5
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 8ff7df180..c37f73d7e 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -100,6 +100,7 @@
import posixpath
import re
import uuid
+import warnings
from pprint import pformat
from threading import Lock
@@ -1512,10 +1513,15 @@ def bind_to_environ(self, environ, server_name=None, subdomain=None):
offset = -len(real_server_name)
if cur_server_name[offset:] != real_server_name:
# This can happen even with valid configs if the server was
- # accesssed directly by IP address under some situations.
+ # accessed directly by IP address under some situations.
# Instead of raising an exception like in Werkzeug 0.7 or
# earlier we go by an invalid subdomain which will result
# in a 404 error on matching.
+ warnings.warn(
+ "Current server name '{}' doesn't match configured "
+ "server name '{}'".format(wsgi_server_name, real_server_name),
+ stacklevel=2,
+ )
subdomain = ""
else:
subdomain = ".".join(filter(None, cur_server_name[:offset]))
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 537883dbd..56a6be6a5 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -358,6 +358,16 @@ def test_http_host_before_server_name():
assert adapter.build("index") == "http://wiki.example.com/"
+def test_invalid_subdomain_warning():
+ env = create_environ("/foo")
+ env["SERVER_NAME"] = env["HTTP_HOST"] = "foo.example.com"
+ m = r.Map([r.Rule("/foo", endpoint="foo")])
+ with pytest.warns(UserWarning) as record:
+ a = m.bind_to_environ(env, server_name="bar.example.com")
+ assert a.subdomain == ""
+ assert len(record) == 1
+
+
def test_adapter_url_parameter_sorting():
map = r.Map(
[r.Rule("/", endpoint="index")], sort_parameters=True, sort_key=lambda x: x[1]
From da92dc7dc43e39a83ca064cf6b536e1bbe482bec Mon Sep 17 00:00:00 2001
From: David Lord
Date: Mon, 10 Jun 2019 13:59:50 -0700
Subject: [PATCH 031/733] match server names with default scheme and port
---
CHANGES.rst | 7 +++++--
src/werkzeug/routing.py | 31 +++++++++++++++++++++++--------
tests/test_routing.py | 11 +++++++++++
3 files changed, 39 insertions(+), 10 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index c25dbbdd3..8d33e32f9 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -22,8 +22,11 @@ Unreleased
"opera" instead of "chrome". :issue:`1556`
- The platform for Crosswalk on Android is correctly reported as
"android" instead of "chromeos". (:pr:`1572`)
-- Issue warning when current server name does not match configured
- server name. :issue:`760`
+- Issue a warning when the current server name does not match the
+ configured server name. :issue:`760`
+- A configured server name with the default port for a scheme will
+ match the current server name without the port if the current scheme
+ matches. :pr:`1584`
Version 0.15.5
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index c37f73d7e..b7b5a066e 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -1485,32 +1485,47 @@ def bind_to_environ(self, environ, server_name=None, subdomain=None):
:class:`MapAdapter` so that you don't have to pass the path info to
the match method.
- .. versionchanged:: 0.5
- previously this method accepted a bogus `calculate_subdomain`
- parameter that did not have any effect. It was removed because
- of that.
+ .. versionchanged:: 1.0.0
+ If the passed server name specifies port 443, it will match
+ if the incoming scheme is ``https`` without a port.
+
+ .. versionchanged:: 1.0.0
+ A warning is shown when the passed server name does not
+ match the incoming WSGI server name.
.. versionchanged:: 0.8
This will no longer raise a ValueError when an unexpected server
name was passed.
+ .. versionchanged:: 0.5
+ previously this method accepted a bogus `calculate_subdomain`
+ parameter that did not have any effect. It was removed because
+ of that.
+
:param environ: a WSGI environment.
:param server_name: an optional server name hint (see above).
:param subdomain: optionally the current subdomain (see above).
"""
environ = _get_environ(environ)
-
wsgi_server_name = get_host(environ).lower()
+ scheme = environ["wsgi.url_scheme"]
if server_name is None:
server_name = wsgi_server_name
else:
server_name = server_name.lower()
+ # strip standard port to match get_host()
+ if scheme == "http" and server_name.endswith(":80"):
+ server_name = server_name[:-3]
+ elif scheme == "https" and server_name.endswith(":443"):
+ server_name = server_name[:-4]
+
if subdomain is None and not self.host_matching:
cur_server_name = wsgi_server_name.split(".")
real_server_name = server_name.split(".")
offset = -len(real_server_name)
+
if cur_server_name[offset:] != real_server_name:
# This can happen even with valid configs if the server was
# accessed directly by IP address under some situations.
@@ -1518,8 +1533,8 @@ def bind_to_environ(self, environ, server_name=None, subdomain=None):
# earlier we go by an invalid subdomain which will result
# in a 404 error on matching.
warnings.warn(
- "Current server name '{}' doesn't match configured "
- "server name '{}'".format(wsgi_server_name, real_server_name),
+ "Current server name '{}' doesn't match configured"
+ " server name '{}'".format(wsgi_server_name, server_name),
stacklevel=2,
)
subdomain = ""
@@ -1539,7 +1554,7 @@ def _get_wsgi_string(name):
server_name,
script_name,
subdomain,
- environ["wsgi.url_scheme"],
+ scheme,
environ["REQUEST_METHOD"],
path_info,
query_args=query_args,
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 56a6be6a5..07127e8a5 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -368,6 +368,17 @@ def test_invalid_subdomain_warning():
assert len(record) == 1
+@pytest.mark.parametrize(
+ ("base", "name"),
+ (("http://localhost", "localhost:80"), ("https://localhost", "localhost:443")),
+)
+def test_server_name_match_default_port(base, name):
+ environ = create_environ("/foo", base_url=base)
+ map = r.Map([r.Rule("/foo", endpoint="foo")])
+ adapter = map.bind_to_environ(environ, server_name=name)
+ assert adapter.match() == ("foo", {})
+
+
def test_adapter_url_parameter_sorting():
map = r.Map(
[r.Rule("/", endpoint="index")], sort_parameters=True, sort_key=lambda x: x[1]
From bc1b2e2593dea48b631b2410563dd9df9510456f Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 20 Jun 2019 10:35:45 -0700
Subject: [PATCH 032/733] add InternalServerError.original_exception attribute
---
CHANGES.rst | 3 +++
docs/exceptions.rst | 1 +
src/werkzeug/exceptions.py | 12 ++++++++++++
3 files changed, 16 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 8d33e32f9..3f6041cb2 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -27,6 +27,9 @@ Unreleased
- A configured server name with the default port for a scheme will
match the current server name without the port if the current scheme
matches. :pr:`1584`
+- :exc:`~exceptions.InternalServerError` has a ``original_exception``
+ attribute that frameworks can use to track the original cause of the
+ error. :pr:`1590`
Version 0.15.5
diff --git a/docs/exceptions.rst b/docs/exceptions.rst
index f491def57..6c82e64a7 100644
--- a/docs/exceptions.rst
+++ b/docs/exceptions.rst
@@ -53,6 +53,7 @@ The following error classes exist in Werkzeug:
.. autoexception:: RequestHeaderFieldsTooLarge
.. autoexception:: InternalServerError
+ :members:
.. autoexception:: NotImplemented
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index 9343cef38..fba808e9d 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -624,6 +624,9 @@ class InternalServerError(HTTPException):
Raise if an internal server error occurred. This is a good fallback if an
unknown error occurred in the dispatcher.
+
+ .. versionchanged:: 1.0.0
+ Added the :attr:`original_exception` attribute.
"""
code = 500
@@ -633,6 +636,15 @@ class InternalServerError(HTTPException):
" there is an error in the application."
)
+ def __init__(self, description=None, response=None, original_exception=None):
+ #: The original exception that caused this 500 error. Can be
+ #: used by frameworks to provide context when handling
+ #: unexpected errors.
+ self.original_exception = original_exception
+ super(InternalServerError, self).__init__(
+ description=description, response=response
+ )
+
class NotImplemented(HTTPException):
"""*501* `Not Implemented`
From f753a326343cf5d163720f0d954d62aba3c5618c Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 25 Jun 2019 14:13:17 -0700
Subject: [PATCH 033/733] fix server name warnings in tests
---
tests/test_routing.py | 17 ++++++++++++-----
1 file changed, 12 insertions(+), 5 deletions(-)
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 07127e8a5..fe147df01 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -444,8 +444,10 @@ def test_server_name_interpolation():
assert adapter.match() == ("alt", {})
env = create_environ("/", "http://%s/" % server_name)
- adapter = map.bind_to_environ(env, server_name="foo")
- assert adapter.subdomain == ""
+
+ with pytest.warns(UserWarning):
+ adapter = map.bind_to_environ(env, server_name="foo")
+ assert adapter.subdomain == ""
def test_rule_emptying():
@@ -764,8 +766,10 @@ def test_external_building_with_port_bind_to_environ():
def test_external_building_with_port_bind_to_environ_wrong_servername():
map = r.Map([r.Rule("/", endpoint="index")])
environ = create_environ("/", "http://example.org:5000/")
- adapter = map.bind_to_environ(environ, server_name="example.org")
- assert adapter.subdomain == ""
+
+ with pytest.warns(UserWarning):
+ adapter = map.bind_to_environ(environ, server_name="example.org")
+ assert adapter.subdomain == ""
def test_converter_parser():
@@ -916,7 +920,10 @@ def test_server_name_casing():
env["SERVER_NAME"] = "127.0.0.1"
env["SERVER_PORT"] = "5000"
del env["HTTP_HOST"]
- a = m.bind_to_environ(env, server_name="example.com")
+
+ with pytest.warns(UserWarning):
+ a = m.bind_to_environ(env, server_name="example.com")
+
with pytest.raises(r.NotFound):
a.match()
From 701531ea26a36009e197b2c8bda1a6b47f09938c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E1=B4=87s=E1=B4=9B?=
Date: Wed, 3 Jul 2019 17:16:11 +0800
Subject: [PATCH 034/733] fix large cookie warn comment
I thing the word is wrong here
---
src/werkzeug/http.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index 3f40b3080..f32032789 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -1195,9 +1195,9 @@ def dump_cookie(
if not PY2:
rv = rv.decode("latin1")
- # Warn if the final value of the cookie is less than the limit. If the
- # cookie is too large, then it may be silently ignored, which can be quite
- # hard to debug.
+ # Warn if the final value of the cookie is larger than the limit. If the
+ # cookie is too large, then it may be silently ignored by the browser,
+ # which can be quite hard to debug.
cookie_size = len(rv)
if max_size and cookie_size > max_size:
From 9844e0447e63366273c8b9f9f2400e19b45532d1 Mon Sep 17 00:00:00 2001
From: Min ho Kim
Date: Mon, 8 Jul 2019 01:23:43 +1000
Subject: [PATCH 035/733] Fix typos (#1602)
---
examples/README.rst | 2 +-
examples/coolmagic/utils.py | 2 +-
examples/cupoftee/application.py | 2 +-
examples/cupoftee/network.py | 2 +-
examples/cupoftee/templates/serverlist.html | 2 +-
examples/simplewiki/database.py | 2 +-
src/werkzeug/datastructures.py | 2 +-
src/werkzeug/middleware/proxy_fix.py | 2 +-
src/werkzeug/urls.py | 2 +-
src/werkzeug/utils.py | 2 +-
src/werkzeug/wrappers/request.py | 2 +-
11 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/examples/README.rst b/examples/README.rst
index 2b9df866a..31b50ef5e 100644
--- a/examples/README.rst
+++ b/examples/README.rst
@@ -6,7 +6,7 @@ This directory contains various example applications and example code of
Werkzeug powered applications.
Beside the proof of concept applications and code snippets in the partial
-folder they all have external depencencies for template engines or database
+folder they all have external dependencies for template engines or database
adapters (SQLAlchemy only so far). Also, every application has click as
external dependency, used to create the command line interface.
diff --git a/examples/coolmagic/utils.py b/examples/coolmagic/utils.py
index a1bc1ea31..4140eb537 100644
--- a/examples/coolmagic/utils.py
+++ b/examples/coolmagic/utils.py
@@ -5,7 +5,7 @@
This module contains the subclasses of the base request and response
objects provided by werkzeug. The subclasses know about their charset
- and implement some additional functionallity like the ability to link
+ and implement some additional functionality like the ability to link
to view functions.
:copyright: 2007 Pallets
diff --git a/examples/cupoftee/application.py b/examples/cupoftee/application.py
index 540e3f59f..0e4b00235 100644
--- a/examples/cupoftee/application.py
+++ b/examples/cupoftee/application.py
@@ -3,7 +3,7 @@
cupoftee.application
~~~~~~~~~~~~~~~~~~~~
- The WSGI appliction for the cup of tee browser.
+ The WSGI application for the cup of tee browser.
:copyright: 2007 Pallets
:license: BSD-3-Clause
diff --git a/examples/cupoftee/network.py b/examples/cupoftee/network.py
index 74c775aa1..c083efd4a 100644
--- a/examples/cupoftee/network.py
+++ b/examples/cupoftee/network.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""
- cupyoftee.network
+ cupoftee.network
~~~~~~~~~~~~~~~~~
Query the servers for information.
diff --git a/examples/cupoftee/templates/serverlist.html b/examples/cupoftee/templates/serverlist.html
index 05d15f194..308564af1 100644
--- a/examples/cupoftee/templates/serverlist.html
+++ b/examples/cupoftee/templates/serverlist.html
@@ -8,7 +8,7 @@
Server List
This list was last synced on
{{ cup.master.last_sync.strftime('%d %B %Y at %H:%M UTC') }}.
{% else %}
- Syncronization with master server in progress. Reload the page in a minute
+ Synchronization with master server in progress. Reload the page in a minute
or two, to see the server list.
{% endif %}
diff --git a/examples/simplewiki/database.py b/examples/simplewiki/database.py
index f0cec34e2..b808aae0e 100644
--- a/examples/simplewiki/database.py
+++ b/examples/simplewiki/database.py
@@ -115,7 +115,7 @@ def __repr__(self):
class RevisionedPage(Page, Revision):
"""
- Represents a wiki page with a revision. Thanks to multiple inhertiance
+ Represents a wiki page with a revision. Thanks to multiple inheritance
and the ability of SQLAlchemy to map to joins we can combine `Page` and
`Revision` into one class here.
"""
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 91d1b8343..1d5887118 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1222,7 +1222,7 @@ def set(self, _key, _value, **kw):
ikey = _key.lower()
for idx, (old_key, _old_value) in enumerate(listiter):
if old_key.lower() == ikey:
- # replace first ocurrence
+ # replace first occurrence
self._list[idx] = (_key, _value)
break
else:
diff --git a/src/werkzeug/middleware/proxy_fix.py b/src/werkzeug/middleware/proxy_fix.py
index 0046799c8..11f9a6916 100644
--- a/src/werkzeug/middleware/proxy_fix.py
+++ b/src/werkzeug/middleware/proxy_fix.py
@@ -78,7 +78,7 @@ class ProxyFix(object):
Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``.
.. versionchanged:: 0.15
- ``X-Fowarded-Host`` and ``X-Forwarded-Port`` modify
+ ``X-Forwarded-Host`` and ``X-Forwarded-Port`` modify
``SERVER_NAME`` and ``SERVER_PORT``.
"""
diff --git a/src/werkzeug/urls.py b/src/werkzeug/urls.py
index 38e9e5adf..4a1146fb1 100644
--- a/src/werkzeug/urls.py
+++ b/src/werkzeug/urls.py
@@ -578,7 +578,7 @@ def url_unparse(components):
# We generally treat file:///x and file:/x the same which is also
# what browsers seem to do. This also allows us to ignore a schema
- # register for netloc utilization or having to differenciate between
+ # register for netloc utilization or having to differentiate between
# empty and missing netloc.
if netloc or (scheme and path.startswith(s("/"))):
if path and path[:1] != s("/"):
diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py
index 196bff560..59c6f2708 100644
--- a/src/werkzeug/utils.py
+++ b/src/werkzeug/utils.py
@@ -303,7 +303,7 @@ def get_content_type(mimetype, charset):
:param charset: The charset to be appended for text mimetypes.
:return: The content type.
- .. verionchanged:: 0.15
+ .. versionchanged:: 0.15
Any type that ends with ``+xml`` gets a charset, not just those
that start with ``application/``. Known text types such as
``application/javascript`` are also given charsets.
diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py
index d1c71b647..4aafa6fd8 100644
--- a/src/werkzeug/wrappers/request.py
+++ b/src/werkzeug/wrappers/request.py
@@ -25,7 +25,7 @@ class Request(
class StreamOnlyMixin(object):
- """If mixed in before the request object this will change the bahavior
+ """If mixed in before the request object this will change the behavior
of it to disable handling of form parsing. This disables the
:attr:`files`, :attr:`form` attributes and will just provide a
:attr:`stream` attribute that however is always available.
From c13eba6b51c023eae0e405a6c47b3927cfd7f2c1 Mon Sep 17 00:00:00 2001
From: Mark McDonald
Date: Wed, 10 Jul 2019 14:29:09 +0800
Subject: [PATCH 036/733] Implements case-insensitive header equality
---
src/werkzeug/datastructures.py | 7 ++++++-
tests/test_datastructures.py | 14 ++++++++++++++
2 files changed, 20 insertions(+), 1 deletion(-)
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 1d5887118..a109dda38 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -977,7 +977,12 @@ def __getitem__(self, key, _get_mode=False):
raise exceptions.BadRequestKeyError(key)
def __eq__(self, other):
- return other.__class__ is self.__class__ and set(other._list) == set(self._list)
+ def lowered(item):
+ return (item[0].lower(),) + item[1:]
+
+ return other.__class__ is self.__class__ and set(
+ map(lowered, other._list)
+ ) == set(map(lowered, self._list))
__hash__ = None
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index 97b4b205a..bf6b3f7cc 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -800,6 +800,20 @@ def test_to_wsgi_list_bytes(self):
strict_eq(key, u"Key")
strict_eq(value, u"Value")
+ def test_equality(self):
+ # test equality, given keys are case insensitive
+ h1 = self.storage_class()
+ h1.add("X-Foo", "foo")
+ h1.add("X-Bar", "bah")
+ h1.add("X-Bar", "humbug")
+
+ h2 = self.storage_class()
+ h2.add("x-foo", "foo")
+ h2.add("x-bar", "bah")
+ h2.add("x-bar", "humbug")
+
+ assert h1 == h2
+
class TestEnvironHeaders(object):
storage_class = datastructures.EnvironHeaders
From dc670d9b4f68b0509339f565d9cc142631226b7b Mon Sep 17 00:00:00 2001
From: Mark McDonald
Date: Thu, 11 Jul 2019 10:22:09 +0800
Subject: [PATCH 037/733] Add changelog for header eq
---
CHANGES.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index e4a08b698..0f771e93e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -30,6 +30,8 @@ Unreleased
- :exc:`~exceptions.InternalServerError` has a ``original_exception``
attribute that frameworks can use to track the original cause of the
error. :pr:`1590`
+- Headers are tested for equality independent of the header key case,
+ such that ``X-Foo`` is the same as ``x-foo``. :pr:`1605`
Version 0.15.5
From dbd43f90add3d52cc05efc4e893a99814c2c6af3 Mon Sep 17 00:00:00 2001
From: Joey Surls
Date: Sat, 18 May 2019 08:25:05 -0500
Subject: [PATCH 038/733] Add 'None' as a legal value for samesite
---
CHANGES.rst | 2 ++
src/werkzeug/http.py | 15 ++++++++++-----
tests/test_http.py | 17 +++++++++++------
3 files changed, 23 insertions(+), 11 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 0f771e93e..3453eec50 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -32,6 +32,8 @@ Unreleased
error. :pr:`1590`
- Headers are tested for equality independent of the header key case,
such that ``X-Foo`` is the same as ``x-foo``. :pr:`1605`
+- :meth:`http.dump_cookie` accepts ``'None'`` as a value for
+ ``samesite``. :issue:`1549`
Version 0.15.5
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index f32032789..8b16351ed 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -1135,10 +1135,13 @@ def dump_cookie(
:param max_size: Warn if the final header value exceeds this size. The
default, 4093, should be safely `supported by most browsers
`_. Set to 0 to disable this check.
- :param samesite: Limits the scope of the cookie such that it will only
- be attached to requests if those requests are "same-site".
+ :param samesite: Limits the scope of the cookie such that it will
+ only be attached to requests if those requests are same-site.
.. _`cookie`: http://browsercookielimits.squawky.net/
+
+ .. versionchanged:: 1.0.0
+ The string ``'None'`` is accepted for ``samesite``.
"""
key = to_bytes(key, charset)
value = to_bytes(value, charset)
@@ -1154,9 +1157,11 @@ def dump_cookie(
elif max_age is not None and sync_expires:
expires = to_bytes(cookie_date(time() + max_age))
- samesite = samesite.title() if samesite else None
- if samesite not in ("Strict", "Lax", None):
- raise ValueError("invalid SameSite value; must be 'Strict', 'Lax' or None")
+ if samesite is not None:
+ samesite = samesite.title()
+
+ if samesite not in {"Strict", "Lax", "None"}:
+ raise ValueError("SameSite must be 'Strict', 'Lax', or 'None'.")
buf = [key + b"=" + _cookie_quote(value)]
diff --git a/tests/test_http.py b/tests/test_http.py
index 52275c654..400fc9d78 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -543,16 +543,21 @@ def test_cookie_maxsize(self, recwarn):
assert "the limit is 512 bytes" in str(w.message)
@pytest.mark.parametrize(
- "input, expected",
- [
+ ("samesite", "expected"),
+ (
("strict", "foo=bar; Path=/; SameSite=Strict"),
("lax", "foo=bar; Path=/; SameSite=Lax"),
+ ("none", "foo=bar; Path=/; SameSite=None"),
(None, "foo=bar; Path=/"),
- ],
+ ),
)
- def test_cookie_samesite_attribute(self, input, expected):
- val = http.dump_cookie("foo", "bar", samesite=input)
- strict_eq(val, expected)
+ def test_cookie_samesite_attribute(self, samesite, expected):
+ value = http.dump_cookie("foo", "bar", samesite=samesite)
+ assert value == expected
+
+ def test_cookie_samesite_invalid(self):
+ with pytest.raises(ValueError):
+ http.dump_cookie("foo", "bar", samesite="invalid")
class TestRange(object):
From af739bb2a295204ebc0f0fcca136f011652f9b15 Mon Sep 17 00:00:00 2001
From: gokcegrbl
Date: Fri, 31 May 2019 16:18:09 +0000
Subject: [PATCH 039/733] Use @property decorator for properties
---
examples/couchy/utils.py | 29 +++-
examples/cupoftee/application.py | 4 +-
examples/plnt/utils.py | 29 +++-
examples/shorty/utils.py | 29 +++-
src/werkzeug/datastructures.py | 127 +++++++++--------
src/werkzeug/debug/__init__.py | 9 +-
src/werkzeug/local.py | 9 +-
src/werkzeug/test.py | 185 ++++++++++++-------------
src/werkzeug/wrappers/base_response.py | 24 ++--
src/werkzeug/wrappers/etag.py | 26 ++--
10 files changed, 258 insertions(+), 213 deletions(-)
diff --git a/examples/couchy/utils.py b/examples/couchy/utils.py
index 571a7ed98..7a06e690f 100644
--- a/examples/couchy/utils.py
+++ b/examples/couchy/utils.py
@@ -75,8 +75,27 @@ def entries(self):
)
]
- has_previous = property(lambda self: self.page > 1)
- has_next = property(lambda self: self.page < self.pages)
- previous = property(lambda self: url_for(self.endpoint, page=self.page - 1))
- next = property(lambda self: url_for(self.endpoint, page=self.page + 1))
- pages = property(lambda self: max(0, self.count - 1) // self.per_page + 1)
+ @property
+ def has_previous(self):
+ """Return True if there are pages before the current one."""
+ return self.page > 1
+
+ @property
+ def has_next(self):
+ """Return True if there are pages after the current one."""
+ return self.page < self.pages
+
+ @property
+ def previous(self):
+ """Return the URL for the previous page."""
+ return url_for(self.endpoint, page=self.page - 1)
+
+ @property
+ def next(self):
+ """Return the URL for the next page."""
+ return url_for(self.endpoint, page=self.page + 1)
+
+ @property
+ def pages(self):
+ """Return the number of pages."""
+ return max(0, self.count - 1) // self.per_page + 1
diff --git a/examples/cupoftee/application.py b/examples/cupoftee/application.py
index 0e4b00235..17c8e53a9 100644
--- a/examples/cupoftee/application.py
+++ b/examples/cupoftee/application.py
@@ -47,7 +47,9 @@ def __init__(cls, name, bases, d):
Rule(cls.url_rule, endpoint=cls.identifier, **cls.url_arguments)
)
- identifier = property(lambda self: self.__name__.lower())
+ @property
+ def identifier(cls):
+ return cls.__name__.lower()
def _with_metaclass(meta, *bases):
diff --git a/examples/plnt/utils.py b/examples/plnt/utils.py
index 5c6f0d0bb..936d22efd 100644
--- a/examples/plnt/utils.py
+++ b/examples/plnt/utils.py
@@ -135,8 +135,27 @@ def entries(self):
def count(self):
return self.query.count()
- has_previous = property(lambda self: self.page > 1)
- has_next = property(lambda self: self.page < self.pages)
- previous = property(lambda self: url_for(self.endpoint, page=self.page - 1))
- next = property(lambda self: url_for(self.endpoint, page=self.page + 1))
- pages = property(lambda self: max(0, self.count - 1) // self.per_page + 1)
+ @property
+ def has_previous(self):
+ """Return True if there are pages before the current one."""
+ return self.page > 1
+
+ @property
+ def has_next(self):
+ """Return True if there are pages after the current one."""
+ return self.page < self.pages
+
+ @property
+ def previous(self):
+ """Return the URL for the previous page."""
+ return url_for(self.endpoint, page=self.page - 1)
+
+ @property
+ def next(self):
+ """Return the URL for the next page."""
+ return url_for(self.endpoint, page=self.page + 1)
+
+ @property
+ def pages(self):
+ """Return the number of pages."""
+ return max(0, self.count - 1) // self.per_page + 1
diff --git a/examples/shorty/utils.py b/examples/shorty/utils.py
index f61f9ef98..2a2a766c5 100644
--- a/examples/shorty/utils.py
+++ b/examples/shorty/utils.py
@@ -85,8 +85,27 @@ def entries(self):
.all()
)
- has_previous = property(lambda self: self.page > 1)
- has_next = property(lambda self: self.page < self.pages)
- previous = property(lambda self: url_for(self.endpoint, page=self.page - 1))
- next = property(lambda self: url_for(self.endpoint, page=self.page + 1))
- pages = property(lambda self: max(0, self.count - 1) // self.per_page + 1)
+ @property
+ def has_previous(self):
+ """Return True if there are pages before the current one."""
+ return self.page > 1
+
+ @property
+ def has_next(self):
+ """Return True if there are pages after the current one."""
+ return self.page < self.pages
+
+ @property
+ def previous(self):
+ """Return the URL for the previous page."""
+ return url_for(self.endpoint, page=self.page - 1)
+
+ @property
+ def next(self):
+ """Return the URL for the next page."""
+ return url_for(self.endpoint, page=self.page + 1)
+
+ @property
+ def pages(self):
+ """Return the number of pages."""
+ return max(0, self.count - 1) // self.per_page + 1
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index a109dda38..9cfa1eb13 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -2455,70 +2455,77 @@ def __init__(self, auth_type, data=None):
dict.__init__(self, data or {})
self.type = auth_type
- username = property(
- lambda self: self.get("username"),
- doc="""
- The username transmitted. This is set for both basic and digest
- auth all the time.""",
- )
- password = property(
- lambda self: self.get("password"),
- doc="""
- When the authentication type is basic this is the password
- transmitted by the client, else `None`.""",
- )
- realm = property(
- lambda self: self.get("realm"),
- doc="""
- This is the server realm sent back for HTTP digest auth.""",
- )
- nonce = property(
- lambda self: self.get("nonce"),
- doc="""
- The nonce the server sent for digest auth, sent back by the client.
- A nonce should be unique for every 401 response for HTTP digest
- auth.""",
- )
- uri = property(
- lambda self: self.get("uri"),
- doc="""
- The URI from Request-URI of the Request-Line; duplicated because
+ @property
+ def username(self):
+ """The username transmitted. This is set for both basic and digest
+ auth all the time.
+ """
+ return self.get("username")
+
+ @property
+ def password(self):
+ """When the authentication type is basic this is the password
+ transmitted by the client, else `None`.
+ """
+ return self.get("password")
+
+ @property
+ def realm(self):
+ """This is the server realm sent back for HTTP digest auth."""
+ return self.get("realm")
+
+ @property
+ def nonce(self):
+ """The nonce the server sent for digest auth, sent back by the client.
+ A nonce should be unique for every 401 response for HTTP digest auth.
+ """
+ return self.get("nonce")
+
+ @property
+ def uri(self):
+ """The URI from Request-URI of the Request-Line; duplicated because
proxies are allowed to change the Request-Line in transit. HTTP
- digest auth only.""",
- )
- nc = property(
- lambda self: self.get("nc"),
- doc="""
- The nonce count value transmitted by clients if a qop-header is
- also transmitted. HTTP digest auth only.""",
- )
- cnonce = property(
- lambda self: self.get("cnonce"),
- doc="""
- If the server sent a qop-header in the ``WWW-Authenticate``
+ digest auth only.
+ """
+ return self.get("uri")
+
+ @property
+ def nc(self):
+ """The nonce count value transmitted by clients if a qop-header is
+ also transmitted. HTTP digest auth only.
+ """
+ return self.get("nc")
+
+ @property
+ def cnonce(self):
+ """If the server sent a qop-header in the ``WWW-Authenticate``
header, the client has to provide this value for HTTP digest auth.
- See the RFC for more details.""",
- )
- response = property(
- lambda self: self.get("response"),
- doc="""
- A string of 32 hex digits computed as defined in RFC 2617, which
- proves that the user knows a password. Digest auth only.""",
- )
- opaque = property(
- lambda self: self.get("opaque"),
- doc="""
- The opaque header from the server returned unchanged by the client.
+ See the RFC for more details.
+ """
+ return self.get("cnonce")
+
+ @property
+ def response(self):
+ """A string of 32 hex digits computed as defined in RFC 2617, which
+ proves that the user knows a password. Digest auth only.
+ """
+ return self.get("response")
+
+ @property
+ def opaque(self):
+ """The opaque header from the server returned unchanged by the client.
It is recommended that this string be base64 or hexadecimal data.
- Digest auth only.""",
- )
- qop = property(
- lambda self: self.get("qop"),
- doc="""
- Indicates what "quality of protection" the client has applied to
+ Digest auth only.
+ """
+ return self.get("opaque")
+
+ @property
+ def qop(self):
+ """Indicates what "quality of protection" the client has applied to
the message for HTTP digest auth. Note that this is a single token,
- not a quoted list of alternatives as in WWW-Authenticate.""",
- )
+ not a quoted list of alternatives as in WWW-Authenticate.
+ """
+ return self.get("qop")
class WWWAuthenticate(UpdateDictMixin, dict):
diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py
index 381d60f24..bb188017a 100644
--- a/src/werkzeug/debug/__init__.py
+++ b/src/werkzeug/debug/__init__.py
@@ -276,17 +276,16 @@ def __init__(
else:
self.pin = None
- def _get_pin(self):
+ @property
+ def pin(self):
if not hasattr(self, "_pin"):
self._pin, self._pin_cookie = get_pin_and_cookie_name(self.app)
return self._pin
- def _set_pin(self, value):
+ @pin.setter
+ def pin(self, value):
self._pin = value
- pin = property(_get_pin, _set_pin)
- del _get_pin, _set_pin
-
@property
def pin_cookie_name(self):
"""The name of the pin cookie."""
diff --git a/src/werkzeug/local.py b/src/werkzeug/local.py
index 9a6088ccf..626b87b0f 100644
--- a/src/werkzeug/local.py
+++ b/src/werkzeug/local.py
@@ -121,15 +121,14 @@ def __init__(self):
def __release_local__(self):
self._local.__release_local__()
- def _get__ident_func__(self):
+ @property
+ def __ident_func__(self):
return self._local.__ident_func__
- def _set__ident_func__(self, value):
+ @__ident_func__.setter
+ def __ident_func__(self, value):
object.__setattr__(self._local, "__ident_func__", value)
- __ident_func__ = property(_get__ident_func__, _set__ident_func__)
- del _get__ident_func__, _set__ident_func__
-
def __call__(self):
def _lookup():
rv = self.top
diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py
index 5476006a1..672e61c24 100644
--- a/src/werkzeug/test.py
+++ b/src/werkzeug/test.py
@@ -467,162 +467,147 @@ def base_url(self, value):
self.host = netloc
self.url_scheme = scheme
- def _get_content_type(self):
+ @property
+ def content_type(self):
+ """The content type for the request. Reflected from and to
+ the :attr:`headers`. Do not set if you set :attr:`files` or
+ :attr:`form` for auto detection.
+ """
ct = self.headers.get("Content-Type")
if ct is None and not self._input_stream:
if self._files:
return "multipart/form-data"
- elif self._form:
+ if self._form:
return "application/x-www-form-urlencoded"
return None
return ct
- def _set_content_type(self, value):
+ @content_type.setter
+ def content_type(self, value):
if value is None:
self.headers.pop("Content-Type", None)
else:
self.headers["Content-Type"] = value
- content_type = property(
- _get_content_type,
- _set_content_type,
- doc="""The content type for the request. Reflected from and to
- the :attr:`headers`. Do not set if you set :attr:`files` or
- :attr:`form` for auto detection.""",
- )
- del _get_content_type, _set_content_type
-
- def _get_content_length(self):
- return self.headers.get("Content-Length", type=int)
+ @property
+ def mimetype(self):
+ """The mimetype (content type without charset etc.)
- def _get_mimetype(self):
+ .. versionadded:: 0.14
+ """
ct = self.content_type
if ct:
return ct.split(";")[0].strip()
+ return None
- def _set_mimetype(self, value):
+ @mimetype.setter
+ def mimetype(self, value):
self.content_type = get_content_type(value, self.charset)
- def _get_mimetype_params(self):
+ @property
+ def mimetype_params(self):
+ """ The mimetype parameters as dict. For example if the
+ content type is ``text/html; charset=utf-8`` the params would be
+ ``{'charset': 'utf-8'}``.
+
+ .. versionadded:: 0.14
+ """
+
def on_update(d):
self.headers["Content-Type"] = dump_options_header(self.mimetype, d)
d = parse_options_header(self.headers.get("content-type", ""))[1]
return CallbackDict(d, on_update)
- mimetype = property(
- _get_mimetype,
- _set_mimetype,
- doc="""The mimetype (content type without charset etc.)
-
- .. versionadded:: 0.14
- """,
- )
- mimetype_params = property(
- _get_mimetype_params,
- doc=""" The mimetype parameters as dict. For example if the
- content type is ``text/html; charset=utf-8`` the params would be
- ``{'charset': 'utf-8'}``.
-
- .. versionadded:: 0.14
- """,
- )
- del _get_mimetype, _set_mimetype, _get_mimetype_params
+ @property
+ def content_length(self):
+ """The content length as integer. Reflected from and to the
+ :attr:`headers`. Do not set if you set :attr:`files` or
+ :attr:`form` for auto detection.
+ """
+ return self.headers.get("Content-Length", type=int)
- def _set_content_length(self, value):
+ @content_length.setter
+ def content_length(self, value):
if value is None:
self.headers.pop("Content-Length", None)
else:
self.headers["Content-Length"] = str(value)
- content_length = property(
- _get_content_length,
- _set_content_length,
- doc="""The content length as integer. Reflected from and to the
- :attr:`headers`. Do not set if you set :attr:`files` or
- :attr:`form` for auto detection.""",
- )
- del _get_content_length, _set_content_length
-
- def form_property(name, storage, doc): # noqa: B902
- key = "_" + name
-
- def getter(self):
- if self._input_stream is not None:
- raise AttributeError("an input stream is defined")
- rv = getattr(self, key)
- if rv is None:
- rv = storage()
- setattr(self, key, rv)
-
- return rv
-
- def setter(self, value):
- self._input_stream = None
- setattr(self, key, value)
-
- return property(getter, setter, doc=doc)
+ @property
+ def form(self):
+ """A :class:`MultiDict` of form values."""
+ if self.input_stream is not None:
+ raise AttributeError("an input stream is defined")
+ if self._form is None:
+ self._form = MultiDict()
+ return self._form
+
+ @form.setter
+ def form(self, value):
+ self._input_stream = None
+ self._form = value
- form = form_property("form", MultiDict, doc="A :class:`MultiDict` of form values.")
- files = form_property(
- "files",
- FileMultiDict,
- doc="""A :class:`FileMultiDict` of uploaded files. You can use
+ @property
+ def files(self):
+ """A :class:`FileMultiDict` of uploaded files. You can use
the :meth:`~FileMultiDict.add_file` method to add new files to
- the dict.""",
- )
- del form_property
+ the dict.
+ """
+ if self.input_stream is not None:
+ raise AttributeError("an input stream is defined")
+ if self._files is None:
+ self._files = FileMultiDict()
+ return self._files
+
+ @files.setter
+ def files(self, value):
+ self._input_stream = None
+ self._files = value
- def _get_input_stream(self):
+ @property
+ def input_stream(self):
+ """An optional input stream. If you set this it will clear
+ :attr:`form` and :attr:`files`.
+ """
return self._input_stream
- def _set_input_stream(self, value):
+ @input_stream.setter
+ def input_stream(self, value):
self._input_stream = value
- self._form = self._files = None
+ self._form = None
+ self._files = None
- input_stream = property(
- _get_input_stream,
- _set_input_stream,
- doc="""An optional input stream. If you set this it will clear
- :attr:`form` and :attr:`files`.""",
- )
- del _get_input_stream, _set_input_stream
-
- def _get_query_string(self):
+ @property
+ def query_string(self):
+ """The query string. If you set this to a string
+ :attr:`args` will no longer be available.
+ """
if self._query_string is None:
if self._args is not None:
return url_encode(self._args, charset=self.charset)
return ""
return self._query_string
- def _set_query_string(self, value):
+ @query_string.setter
+ def query_string(self, value):
self._query_string = value
self._args = None
- query_string = property(
- _get_query_string,
- _set_query_string,
- doc="""The query string. If you set this to a string
- :attr:`args` will no longer be available.""",
- )
- del _get_query_string, _set_query_string
-
- def _get_args(self):
+ @property
+ def args(self):
+ """The URL arguments as :class:`MultiDict`."""
if self._query_string is not None:
raise AttributeError("a query string is defined")
if self._args is None:
self._args = MultiDict()
return self._args
- def _set_args(self, value):
+ @args.setter
+ def args(self, value):
self._query_string = None
self._args = value
- args = property(
- _get_args, _set_args, doc="The URL arguments as :class:`MultiDict`."
- )
- del _get_args, _set_args
-
@property
def server_name(self):
"""The server name (read-only, use :attr:`host` to set)"""
@@ -634,7 +619,7 @@ def server_port(self):
pieces = self.host.split(":", 1)
if len(pieces) == 2 and pieces[1].isdigit():
return int(pieces[1])
- elif self.url_scheme == "https":
+ if self.url_scheme == "https":
return 443
return 80
diff --git a/src/werkzeug/wrappers/base_response.py b/src/werkzeug/wrappers/base_response.py
index d944a7d22..f473d5b1c 100644
--- a/src/werkzeug/wrappers/base_response.py
+++ b/src/werkzeug/wrappers/base_response.py
@@ -286,25 +286,26 @@ def from_app(cls, app, environ, buffered=False):
"""
return cls(*_run_wsgi_app(app, environ, buffered))
- def _get_status_code(self):
+ @property
+ def status_code(self):
+ """The HTTP Status code as number."""
return self._status_code
- def _set_status_code(self, code):
+ @status_code.setter
+ def status_code(self, code):
self._status_code = code
try:
self._status = "%d %s" % (code, HTTP_STATUS_CODES[code].upper())
except KeyError:
self._status = "%d UNKNOWN" % code
- status_code = property(
- _get_status_code, _set_status_code, doc="The HTTP Status code as number"
- )
- del _get_status_code, _set_status_code
-
- def _get_status(self):
+ @property
+ def status(self):
+ """The HTTP Status code."""
return self._status
- def _set_status(self, value):
+ @status.setter
+ def status(self, value):
try:
self._status = to_native(value)
except AttributeError:
@@ -318,9 +319,6 @@ def _set_status(self, value):
except IndexError:
raise ValueError("Empty status argument")
- status = property(_get_status, _set_status, doc="The HTTP Status code")
- del _get_status, _set_status
-
def get_data(self, as_text=False):
"""The string representation of the request body. Whenever you call
this property the request iterable is encoded and flattened. This
@@ -341,7 +339,7 @@ def get_data(self, as_text=False):
return rv
def set_data(self, value):
- """Sets a new string as response. The value set must either by a
+ """Sets a new string as response. The value set must be either a
unicode or bytestring. If a unicode string is set it's encoded
automatically to the charset of the response (utf-8 by default).
diff --git a/src/werkzeug/wrappers/etag.py b/src/werkzeug/wrappers/etag.py
index 0733506f1..03f5a4f68 100644
--- a/src/werkzeug/wrappers/etag.py
+++ b/src/werkzeug/wrappers/etag.py
@@ -268,7 +268,16 @@ def freeze(self, no_etag=False):
.. versionadded:: 0.7""",
)
- def _get_content_range(self):
+ @property
+ def content_range(self):
+ """The ``Content-Range`` header as
+ :class:`~werkzeug.datastructures.ContentRange` object. Even if
+ the header is not set it will provide such an object for easier
+ manipulation.
+
+ .. versionadded:: 0.7
+ """
+
def on_update(rng):
if not rng:
del self.headers["content-range"]
@@ -283,22 +292,11 @@ def on_update(rng):
rv = ContentRange(None, None, None, on_update=on_update)
return rv
- def _set_content_range(self, value):
+ @content_range.setter
+ def content_range(self, value):
if not value:
del self.headers["content-range"]
elif isinstance(value, string_types):
self.headers["Content-Range"] = value
else:
self.headers["Content-Range"] = value.to_header()
-
- content_range = property(
- _get_content_range,
- _set_content_range,
- doc="""The ``Content-Range`` header as
- :class:`~werkzeug.datastructures.ContentRange` object. Even if
- the header is not set it wil provide such an object for easier
- manipulation.
-
- .. versionadded:: 0.7""",
- )
- del _get_content_range, _set_content_range
From aecf542d5d426ea94f238d8511b9cf888d124db4 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Fri, 12 Jul 2019 09:55:21 -0700
Subject: [PATCH 040/733] extract common test form and files properties
behavior
---
src/werkzeug/test.py | 55 ++++++++++++++++----------
src/werkzeug/wrappers/base_response.py | 4 +-
src/werkzeug/wrappers/etag.py | 7 ++--
3 files changed, 40 insertions(+), 26 deletions(-)
diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py
index 672e61c24..6746c500f 100644
--- a/src/werkzeug/test.py
+++ b/src/werkzeug/test.py
@@ -496,9 +496,7 @@ def mimetype(self):
.. versionadded:: 0.14
"""
ct = self.content_type
- if ct:
- return ct.split(";")[0].strip()
- return None
+ return ct.split(";")[0].strip() if ct else None
@mimetype.setter
def mimetype(self, value):
@@ -534,36 +532,53 @@ def content_length(self, value):
else:
self.headers["Content-Length"] = str(value)
+ def _get_form(self, name, storage):
+ """Common behavior for getting the :attr:`form` and
+ :attr:`files` properties.
+
+ :param name: Name of the internal cached attribute.
+ :param storage: Storage class used for the data.
+ """
+ if self.input_stream is not None:
+ raise AttributeError("an input stream is defined")
+
+ rv = getattr(self, name)
+
+ if rv is None:
+ rv = storage()
+ setattr(self, name, rv)
+
+ return rv
+
+ def _set_form(self, name, value):
+ """Common behavior for setting the :attr:`form` and
+ :attr:`files` properties.
+
+ :param name: Name of the internal cached attribute.
+ :param value: Value to assign to the attribute.
+ """
+ self._input_stream = None
+ setattr(self, name, value)
+
@property
def form(self):
"""A :class:`MultiDict` of form values."""
- if self.input_stream is not None:
- raise AttributeError("an input stream is defined")
- if self._form is None:
- self._form = MultiDict()
- return self._form
+ return self._get_form("_form", MultiDict)
@form.setter
def form(self, value):
- self._input_stream = None
- self._form = value
+ self._set_form("_form", value)
@property
def files(self):
- """A :class:`FileMultiDict` of uploaded files. You can use
- the :meth:`~FileMultiDict.add_file` method to add new files to
- the dict.
+ """A :class:`FileMultiDict` of uploaded files. Use
+ :meth:`~FileMultiDict.add_file` to add new files.
"""
- if self.input_stream is not None:
- raise AttributeError("an input stream is defined")
- if self._files is None:
- self._files = FileMultiDict()
- return self._files
+ return self._get_form("_files", FileMultiDict)
@files.setter
def files(self, value):
- self._input_stream = None
- self._files = value
+ self._set_form("_files", value)
@property
def input_stream(self):
diff --git a/src/werkzeug/wrappers/base_response.py b/src/werkzeug/wrappers/base_response.py
index f473d5b1c..00b9640c5 100644
--- a/src/werkzeug/wrappers/base_response.py
+++ b/src/werkzeug/wrappers/base_response.py
@@ -288,7 +288,7 @@ def from_app(cls, app, environ, buffered=False):
@property
def status_code(self):
- """The HTTP Status code as number."""
+ """The HTTP status code as a number."""
return self._status_code
@status_code.setter
@@ -301,7 +301,7 @@ def status_code(self, code):
@property
def status(self):
- """The HTTP Status code."""
+ """The HTTP status code as a string."""
return self._status
@status.setter
diff --git a/src/werkzeug/wrappers/etag.py b/src/werkzeug/wrappers/etag.py
index 03f5a4f68..ac2860a05 100644
--- a/src/werkzeug/wrappers/etag.py
+++ b/src/werkzeug/wrappers/etag.py
@@ -270,10 +270,9 @@ def freeze(self, no_etag=False):
@property
def content_range(self):
- """The ``Content-Range`` header as
- :class:`~werkzeug.datastructures.ContentRange` object. Even if
- the header is not set it will provide such an object for easier
- manipulation.
+ """The ``Content-Range`` header as a
+ :class:`~werkzeug.datastructures.ContentRange` object. Available
+ even if the header is not set.
.. versionadded:: 0.7
"""
From e4c07a6ef24a398e63e81a30b2e6a451c8e7619a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Batuhan=20Ta=C5=9Fkaya?=
Date: Sat, 11 May 2019 06:51:57 +0300
Subject: [PATCH 041/733] highlight server log with Click instead of termcolor
---
CHANGES.rst | 2 ++
docs/installation.rst | 4 ++--
docs/serving.rst | 11 ++++++++---
setup.py | 1 -
src/werkzeug/serving.py | 22 +++++++++++-----------
5 files changed, 23 insertions(+), 17 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 3453eec50..88b0de9a1 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -34,6 +34,8 @@ Unreleased
such that ``X-Foo`` is the same as ``x-foo``. :pr:`1605`
- :meth:`http.dump_cookie` accepts ``'None'`` as a value for
``samesite``. :issue:`1549`
+- Optional request log highlighting with the development server is
+ handled by Click instead of termcolor. :issue:`1235`
Version 0.15.5
diff --git a/docs/installation.rst b/docs/installation.rst
index 2d4acbabd..913171edb 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -26,13 +26,13 @@ detect and use them if you install them.
* `SimpleJSON`_ is a fast JSON implementation that is compatible with
Python's ``json`` module. It is preferred for JSON operations if it is
installed.
-* `termcolor`_ provides request log highlighting when using the
+* `Click`_ provides request log highlighting when using the
development server.
* `Watchdog`_ provides a faster, more efficient reloader for the
development server.
.. _SimpleJSON: https://simplejson.readthedocs.io/en/latest/
-.. _termcolor: https://pypi.org/project/termcolor/
+.. _Click: https://pypi.org/project/click/
.. _Watchdog: https://pypi.org/project/watchdog/
diff --git a/docs/serving.rst b/docs/serving.rst
index defb33480..e35975dc9 100644
--- a/docs/serving.rst
+++ b/docs/serving.rst
@@ -72,11 +72,16 @@ polling and ``'watchdog'`` forces it to the watchdog backend.
handled by the stat reloader for performance reasons. The watchdog reloader
monitors such files too.
+
Colored Logging
---------------
-Werkzeug is able to color the output of request logs when ran from a terminal, just install the `termcolor
-`_ package. Windows users need to install `colorama
-`_ in addition to termcolor for this to work.
+
+The development server can optionally highlight the request logs in
+different colors based on the status code. Install `Click`_ to enable
+this feature.
+
+.. _Click: https://pypi.org/project/click/
+
Virtual Hosts
-------------
diff --git a/setup.py b/setup.py
index 02b460301..d12c3b6ee 100644
--- a/setup.py
+++ b/setup.py
@@ -54,7 +54,6 @@
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
extras_require={
"watchdog": ["watchdog"],
- "termcolor": ["termcolor"],
"dev": [
"pytest",
"coverage",
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index 8863d4bdf..88ba1cc37 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -72,9 +72,9 @@ def __getattr__(self, name):
ssl = _SslDummy()
try:
- import termcolor
+ import click
except ImportError:
- termcolor = None
+ click = None
def _get_openssl_crypto_module():
@@ -392,23 +392,23 @@ def log_request(self, code="-", size="-"):
code = str(code)
- if termcolor:
- color = termcolor.colored
+ if click:
+ color = click.style
if code[0] == "1": # 1xx - Informational
- msg = color(msg, attrs=["bold"])
+ msg = color(msg, bold=True)
elif code[0] == "2": # 2xx - Success
- msg = color(msg, color="white")
+ msg = color(msg, fg="white")
elif code == "304": # 304 - Resource Not Modified
- msg = color(msg, color="cyan")
+ msg = color(msg, fg="cyan")
elif code[0] == "3": # 3xx - Redirection
- msg = color(msg, color="green")
+ msg = color(msg, fg="green")
elif code == "404": # 404 - Resource Not Found
- msg = color(msg, color="yellow")
+ msg = color(msg, fg="yellow")
elif code[0] == "4": # 4xx - Client Error
- msg = color(msg, color="red", attrs=["bold"])
+ msg = color(msg, fg="red", bold=True)
else: # 5xx, or any other response
- msg = color(msg, color="magenta", attrs=["bold"])
+ msg = color(msg, fg="magenta", bold=True)
self.log("info", '"%s" %s %s', msg, code, size)
From 84c98f2d57406495f9c577dd5ddbf34e54bb55f6 Mon Sep 17 00:00:00 2001
From: alex
Date: Wed, 22 May 2019 08:42:49 +0200
Subject: [PATCH 042/733] use cryptography instead of pyOpenSSL
pyOpenSSL is only a wrapper around cryptography now. It recommends
using cryptography directly for our use case.
---
CHANGES.rst | 2 +
docs/serving.rst | 2 +-
src/werkzeug/serving.py | 101 ++++++++++++++++++++++------------------
tests/test_serving.py | 12 +++--
tox.ini | 2 +-
5 files changed, 66 insertions(+), 53 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 88b0de9a1..1ca865da7 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -36,6 +36,8 @@ Unreleased
``samesite``. :issue:`1549`
- Optional request log highlighting with the development server is
handled by Click instead of termcolor. :issue:`1235`
+- Optional ad-hoc TLS support for the development server is handled
+ by cryptography instead of pyOpenSSL. :pr:`1555`
Version 0.15.5
diff --git a/docs/serving.rst b/docs/serving.rst
index e35975dc9..7016ec9b3 100644
--- a/docs/serving.rst
+++ b/docs/serving.rst
@@ -229,7 +229,7 @@ certificate each time the server is reloaded. Adhoc certificates are
discouraged because modern browsers do a bad job at supporting them for
security reasons.
-This feature requires the pyOpenSSL library to be installed.
+This feature requires the cryptography library to be installed.
Unix Sockets
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index 88ba1cc37..9ebd125f9 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -40,6 +40,8 @@
import signal
import socket
import sys
+from datetime import datetime as dt
+from datetime import timedelta
import werkzeug
from ._compat import PY2
@@ -77,15 +79,6 @@ def __getattr__(self, name):
click = None
-def _get_openssl_crypto_module():
- try:
- from OpenSSL import crypto
- except ImportError:
- raise TypeError("Using ad-hoc certificates requires the pyOpenSSL library.")
- else:
- return crypto
-
-
ThreadingMixIn = socketserver.ThreadingMixIn
can_fork = hasattr(os, "fork")
@@ -481,32 +474,39 @@ def get_header_items(self):
def generate_adhoc_ssl_pair(cn=None):
- from random import random
-
- crypto = _get_openssl_crypto_module()
+ try:
+ from cryptography import x509
+ from cryptography.x509.oid import NameOID
+ from cryptography.hazmat.backends import default_backend
+ from cryptography.hazmat.primitives import hashes
+ from cryptography.hazmat.primitives.asymmetric import rsa
+ except ImportError:
+ raise TypeError("Using ad-hoc certificates requires the cryptography library.")
+ pkey = rsa.generate_private_key(
+ public_exponent=65537, key_size=2048, backend=default_backend()
+ )
# pretty damn sure that this is not actually accepted by anyone
if cn is None:
- cn = "*"
-
- cert = crypto.X509()
- cert.set_serial_number(int(random() * sys.maxsize))
- cert.gmtime_adj_notBefore(0)
- cert.gmtime_adj_notAfter(60 * 60 * 24 * 365)
+ cn = u"*"
- subject = cert.get_subject()
- subject.CN = cn
- subject.O = "Dummy Certificate" # noqa: E741
-
- issuer = cert.get_issuer()
- issuer.CN = subject.CN
- issuer.O = subject.O # noqa: E741
-
- pkey = crypto.PKey()
- pkey.generate_key(crypto.TYPE_RSA, 2048)
- cert.set_pubkey(pkey)
- cert.sign(pkey, "sha256")
+ subject = x509.Name(
+ [
+ x509.NameAttribute(NameOID.ORGANIZATION_NAME, u"Dummy Certificate"),
+ x509.NameAttribute(NameOID.COMMON_NAME, cn),
+ ]
+ )
+ cert = (
+ x509.CertificateBuilder()
+ .subject_name(subject)
+ .issuer_name(subject)
+ .public_key(pkey.public_key())
+ .serial_number(x509.random_serial_number())
+ .not_valid_before(dt.utcnow())
+ .not_valid_after(dt.utcnow() + timedelta(days=365))
+ .sign(pkey, hashes.SHA256(), default_backend())
+ )
return cert, pkey
@@ -528,37 +528,54 @@ def make_ssl_devcert(base_path, host=None, cn=None):
for the `cn`.
:param cn: the `CN` to use.
"""
- from OpenSSL import crypto
if host is not None:
- cn = "*.%s/CN=%s" % (host, host)
+ cn = u"*.%s/CN=%s" % (host, host)
cert, pkey = generate_adhoc_ssl_pair(cn=cn)
+ from cryptography.hazmat.primitives import serialization
+
cert_file = base_path + ".crt"
pkey_file = base_path + ".key"
with open(cert_file, "wb") as f:
- f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
+ f.write(cert.public_bytes(serialization.Encoding.PEM))
with open(pkey_file, "wb") as f:
- f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
+ f.write(
+ pkey.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption(),
+ )
+ )
return cert_file, pkey_file
def generate_adhoc_ssl_context():
"""Generates an adhoc SSL context for the development server."""
- crypto = _get_openssl_crypto_module()
import tempfile
import atexit
cert, pkey = generate_adhoc_ssl_pair()
+
+ from cryptography.hazmat.primitives import serialization
+
cert_handle, cert_file = tempfile.mkstemp()
pkey_handle, pkey_file = tempfile.mkstemp()
atexit.register(os.remove, pkey_file)
atexit.register(os.remove, cert_file)
- os.write(cert_handle, crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
- os.write(pkey_handle, crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
+ os.write(cert_handle, cert.public_bytes(serialization.Encoding.PEM))
+ os.write(
+ pkey_handle,
+ pkey.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption(),
+ ),
+ )
+
os.close(cert_handle)
os.close(pkey_handle)
ctx = load_ssl_context(cert_file, pkey_file)
@@ -611,17 +628,9 @@ def wrap_socket(self, sock, **kwargs):
def is_ssl_error(error=None):
"""Checks if the given error (or the current one) is an SSL error."""
- exc_types = (ssl.SSLError,)
- try:
- from OpenSSL.SSL import Error
-
- exc_types += (Error,)
- except ImportError:
- pass
-
if error is None:
error = sys.exc_info()[1]
- return isinstance(error, exc_types)
+ return isinstance(error, ssl.SSLError)
def select_address_family(host, port):
diff --git a/tests/test_serving.py b/tests/test_serving.py
index cf31bd2e6..59ee43aba 100644
--- a/tests/test_serving.py
+++ b/tests/test_serving.py
@@ -24,9 +24,9 @@
from werkzeug import serving
try:
- import OpenSSL
+ import cryptography
except ImportError:
- OpenSSL = None
+ cryptography = None
try:
import watchdog
@@ -101,7 +101,9 @@ def app(environ, start_response):
not hasattr(ssl, "SSLContext"),
reason="Missing PEP 466 (Python 2.7.9+) or Python 3.",
)
-@pytest.mark.skipif(OpenSSL is None, reason="OpenSSL is required for cert generation.")
+@pytest.mark.skipif(
+ cryptography is None, reason="cryptography is required for cert generation."
+)
def test_stdlib_ssl_contexts(dev_server, tmpdir):
certificate, private_key = serving.make_ssl_devcert(str(tmpdir.mkdir("certs")))
@@ -124,7 +126,7 @@ def app(environ, start_response):
assert r.content == b"hello"
-@pytest.mark.skipif(OpenSSL is None, reason="OpenSSL is not installed.")
+@pytest.mark.skipif(cryptography is None, reason="cryptography is not installed.")
def test_ssl_context_adhoc(dev_server):
server = dev_server(
"""
@@ -139,7 +141,7 @@ def app(environ, start_response):
assert r.content == b"hello"
-@pytest.mark.skipif(OpenSSL is None, reason="OpenSSL is not installed.")
+@pytest.mark.skipif(cryptography is None, reason="cryptography is not installed.")
def test_make_ssl_devcert(tmpdir):
certificate, private_key = serving.make_ssl_devcert(str(tmpdir))
assert os.path.isfile(certificate)
diff --git a/tox.ini b/tox.ini
index abda16f0e..bdcb6b0c2 100644
--- a/tox.ini
+++ b/tox.ini
@@ -14,7 +14,7 @@ deps =
pytest-xprocess
requests
requests_unixsocket
- pyopenssl
+ cryptography
greenlet
watchdog
commands = coverage run -p -m pytest --tb=short --basetemp={envtmpdir} {posargs}
From 679ec2b53da4f4b620acfd5cabb73291fb1ac872 Mon Sep 17 00:00:00 2001
From: linchiwei123 <40888469+linchiwei123@users.noreply.github.com>
Date: Sun, 14 Jul 2019 14:46:29 +0800
Subject: [PATCH 043/733] update docstring of class ResponseStreamMixin
---
src/werkzeug/wrappers/response.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/werkzeug/wrappers/response.py b/src/werkzeug/wrappers/response.py
index cd86cacdb..58707a6ae 100644
--- a/src/werkzeug/wrappers/response.py
+++ b/src/werkzeug/wrappers/response.py
@@ -51,7 +51,7 @@ def encoding(self):
class ResponseStreamMixin(object):
- """Mixin for :class:`BaseRequest` subclasses. Classes that inherit from
+ """Mixin for :class:`BaseResponse` subclasses. Classes that inherit from
this mixin will automatically get a :attr:`stream` property that provides
a write-only interface to the response iterable.
"""
From a00ece1bae2c06167a2a42051e56e73a3e1aa0d4 Mon Sep 17 00:00:00 2001
From: Philip Jones
Date: Tue, 23 Jul 2019 10:09:29 +0100
Subject: [PATCH 044/733] Minor bugfix to CacheControl datastructure
This ensures that CacheControl directives can be set to None (removed)
multiple times without erroring i.e.,
cc = CacheControl()
cc.no_cache = None
cc.no_cache = None # Previously this would raise a KeyError
---
src/werkzeug/datastructures.py | 2 +-
tests/test_datastructures.py | 6 ++++++
2 files changed, 7 insertions(+), 1 deletion(-)
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 9cfa1eb13..d43517e95 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1940,7 +1940,7 @@ def _set_cache_value(self, key, value, type):
self.pop(key, None)
else:
if value is None:
- self.pop(key)
+ self.pop(key, None)
elif value is True:
self[key] = None
else:
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index bf6b3f7cc..c930a67e5 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -978,6 +978,12 @@ def test_repr(self):
cc = datastructures.RequestCacheControl([("max-age", "0"), ("private", "True")])
assert repr(cc) == ""
+ def test_set_none(self):
+ cc = datastructures.ResponseCacheControl([("max-age", "0")])
+ assert cc.no_cache is None
+ cc.no_cache = None
+ assert cc.no_cache is None
+
class TestAccept(object):
storage_class = datastructures.Accept
From 1e88b5baafa5326b66bc83ee9451da7975380cfa Mon Sep 17 00:00:00 2001
From: David Lord
Date: Wed, 24 Jul 2019 07:36:42 -0700
Subject: [PATCH 045/733] fix deprecated top-level imports
---
src/werkzeug/__init__.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index 952cd7518..7b8e14e5c 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -66,8 +66,6 @@
"redirect",
"cached_property",
"import_string",
- "dump_cookie",
- "parse_cookie",
"unescape",
"format_string",
"find_modules",
@@ -146,6 +144,8 @@
"unquote_header_value",
"quote_header_value",
"HTTP_STATUS_CODES",
+ "dump_cookie",
+ "parse_cookie",
],
"werkzeug.wrappers": [
"BaseResponse",
From 3ad8635342eef81397a65bf224325aa2b4792a31 Mon Sep 17 00:00:00 2001
From: Kyle
Date: Fri, 30 Aug 2019 18:27:04 +0900
Subject: [PATCH 046/733] Fix mispelled `rooute` var in test
---
tests/test_routing.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/test_routing.py b/tests/test_routing.py
index fe147df01..ef2605867 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -607,7 +607,7 @@ class MyMap(r.Map):
def test_uuid_converter():
m = r.Map([r.Rule("/a/", endpoint="a")])
a = m.bind("example.org", "/")
- rooute, kwargs = a.match("/a/a8098c1a-f86e-11da-bd1a-00112444be1e")
+ route, kwargs = a.match("/a/a8098c1a-f86e-11da-bd1a-00112444be1e")
assert type(kwargs["a_uuid"]) == uuid.UUID
From 6c3f85118646d64f67d995d802cac14db0fb40ee Mon Sep 17 00:00:00 2001
From: frostming
Date: Fri, 16 Aug 2019 09:37:36 +0800
Subject: [PATCH 047/733] Fix the bug of reloader with windows path
---
CHANGES.rst | 2 ++
src/werkzeug/_reloader.py | 2 +-
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index b3faa0e03..02317cc79 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,5 +1,7 @@
.. currentmodule:: werkzeug
+- Fix a bug that the reloader doesn't populate the path correctly on
+ Windows. :issue:`1614`
Version 0.15.5
--------------
diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py
index b04432012..9b8c11f27 100644
--- a/src/werkzeug/_reloader.py
+++ b/src/werkzeug/_reloader.py
@@ -72,7 +72,7 @@ def _get_args_for_reloading():
# The value of __package__ indicates how Python was called. It may
# not exist if a setuptools script is installed as an egg.
- if getattr(__main__, "__package__", None) is None:
+ if not getattr(__main__, "__package__", None):
# Executed a file, like "python app.py".
py_script = os.path.abspath(py_script)
From 87da8da23a162f978e552b724006dc3a39246f8b Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sat, 31 Aug 2019 12:49:38 -0700
Subject: [PATCH 048/733] make reloader workaround more specific
---
CHANGES.rst | 13 +++++++++++--
src/werkzeug/_reloader.py | 12 +++++++++---
2 files changed, 20 insertions(+), 5 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 02317cc79..c6007d68a 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,7 +1,16 @@
.. currentmodule:: werkzeug
-- Fix a bug that the reloader doesn't populate the path correctly on
- Windows. :issue:`1614`
+Version 0.15.6
+--------------
+
+Unreleased
+
+- Work around a bug in pip that caused the reloader to fail on
+ Windows when the script was an entry point. This fixes the issue
+ with Flask's `flask run` command failing with "No module named
+ Scripts\flask". :issue:`1614`
+
+
Version 0.15.5
--------------
diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py
index 9b8c11f27..c2a9c8e05 100644
--- a/src/werkzeug/_reloader.py
+++ b/src/werkzeug/_reloader.py
@@ -71,8 +71,14 @@ def _get_args_for_reloading():
__main__ = sys.modules["__main__"]
# The value of __package__ indicates how Python was called. It may
- # not exist if a setuptools script is installed as an egg.
- if not getattr(__main__, "__package__", None):
+ # not exist if a setuptools script is installed as an egg. It may be
+ # set incorrectly for entry points created with pip on Windows.
+ if getattr(__main__, "__package__", None) is None or (
+ os.name == "nt"
+ and __main__.__package__ == ""
+ and not os.path.exists(py_script)
+ and os.path.exists(py_script + ".exe")
+ ):
# Executed a file, like "python app.py".
py_script = os.path.abspath(py_script)
@@ -83,7 +89,7 @@ def _get_args_for_reloading():
py_script += ".exe"
if (
- os.path.splitext(rv[0])[1] == ".exe"
+ os.path.splitext(sys.executable)[1] == ".exe"
and os.path.splitext(py_script)[1] == ".exe"
):
rv.pop(0)
From c78b97998f4a6a27c3a9bf2b419573c620b3a64d Mon Sep 17 00:00:00 2001
From: Pieter van Beek
Date: Mon, 2 Sep 2019 20:37:42 +0200
Subject: [PATCH 049/733] Added missing HTTPException subclasses to docs.
---
docs/exceptions.rst | 10 ++++++++++
1 file changed, 10 insertions(+)
diff --git a/docs/exceptions.rst b/docs/exceptions.rst
index 6c82e64a7..8f4ab3725 100644
--- a/docs/exceptions.rst
+++ b/docs/exceptions.rst
@@ -44,6 +44,10 @@ The following error classes exist in Werkzeug:
.. autoexception:: ImATeapot
+.. autoexception:: UnprocessableEntity
+
+.. autoexception:: Locked
+
.. autoexception:: FailedDependency
.. autoexception:: PreconditionRequired
@@ -52,6 +56,8 @@ The following error classes exist in Werkzeug:
.. autoexception:: RequestHeaderFieldsTooLarge
+.. autoexception:: UnavailableForLegalReasons
+
.. autoexception:: InternalServerError
:members:
@@ -61,6 +67,10 @@ The following error classes exist in Werkzeug:
.. autoexception:: ServiceUnavailable
+.. autoexception:: GatewayTimeout
+
+.. autoexception:: HTTPVersionNotSupported
+
.. exception:: HTTPUnicodeError
This exception is used to signal unicode decode errors of request
From 92d5f5f0c03f201577291e613fafb1687cbca744 Mon Sep 17 00:00:00 2001
From: Tim Gates
Date: Tue, 3 Sep 2019 17:02:38 +1000
Subject: [PATCH 050/733] Fix simple typo: underlaying -> underlying
---
src/werkzeug/wsgi.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/werkzeug/wsgi.py b/src/werkzeug/wsgi.py
index 807b462ad..aa4e7139b 100644
--- a/src/werkzeug/wsgi.py
+++ b/src/werkzeug/wsgi.py
@@ -964,7 +964,7 @@ def readline(self, size=None):
def readlines(self, size=None):
"""Reads a file into a list of strings. It calls :meth:`readline`
until the file is read to the end. It does support the optional
- `size` argument if the underlaying stream supports it for
+ `size` argument if the underlying stream supports it for
`readline`.
"""
last_pos = self._pos
From 04ff06241c010025d01ed5a7a601aeb4eaa8dc6d Mon Sep 17 00:00:00 2001
From: David Lord
Date: Wed, 24 Jul 2019 07:36:42 -0700
Subject: [PATCH 051/733] fix deprecated top-level imports
---
src/werkzeug/__init__.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index 0a518acb3..e5990c19d 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -66,8 +66,6 @@
"redirect",
"cached_property",
"import_string",
- "dump_cookie",
- "parse_cookie",
"unescape",
"format_string",
"find_modules",
@@ -146,6 +144,8 @@
"unquote_header_value",
"quote_header_value",
"HTTP_STATUS_CODES",
+ "dump_cookie",
+ "parse_cookie",
],
"werkzeug.wrappers": [
"BaseResponse",
From 8da65dd9e9ba0f8800c3fbb44ac1fa245424fa5b Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 3 Sep 2019 08:42:45 -0700
Subject: [PATCH 052/733] ProxyFix.x_proto defaults to 1
num_proxies sets x_proto and x_host as well
---
CHANGES.rst | 6 ++++++
src/werkzeug/middleware/proxy_fix.py | 8 ++++++--
tests/middleware/test_proxy_fix.py | 5 ++++-
3 files changed, 16 insertions(+), 3 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index c6007d68a..fe7e7393d 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -9,6 +9,12 @@ Unreleased
Windows when the script was an entry point. This fixes the issue
with Flask's `flask run` command failing with "No module named
Scripts\flask". :issue:`1614`
+- ``ProxyFix`` trusts the ``X-Forwarded-Proto`` header by default.
+ :issue:`1630`
+- The deprecated ``num_proxies`` argument to ``ProxyFix`` sets
+ ``x_for``, ``x_proto``, and ``x_host`` to match 0.14 behavior. This
+ is intended to make intermediate upgrades less disruptive, but the
+ argument will still be removed in 1.0. :issue:`1630`
Version 0.15.5
diff --git a/src/werkzeug/middleware/proxy_fix.py b/src/werkzeug/middleware/proxy_fix.py
index dc1dacc8c..bbe181401 100644
--- a/src/werkzeug/middleware/proxy_fix.py
+++ b/src/werkzeug/middleware/proxy_fix.py
@@ -77,7 +77,7 @@ class ProxyFix(object):
"""
def __init__(
- self, app, num_proxies=None, x_for=1, x_proto=0, x_host=0, x_port=0, x_prefix=0
+ self, app, num_proxies=None, x_for=1, x_proto=1, x_host=0, x_port=0, x_prefix=0
):
self.app = app
self.x_for = x_for
@@ -112,11 +112,15 @@ def num_proxies(self, value):
if value is not None:
warnings.warn(
"'num_proxies' is deprecated as of version 0.15 and"
- " will be removed in version 1.0. Use 'x_for' instead.",
+ " will be removed in version 1.0. Use"
+ " 'x_for={value}, x_proto={value}, x_host={value}'"
+ " instead.".format(value=value),
DeprecationWarning,
stacklevel=2,
)
self.x_for = value
+ self.x_proto = value
+ self.x_host = value
def get_remote_addr(self, forwarded_for):
"""Get the real ``remote_addr`` by looking backwards ``x_for``
diff --git a/tests/middleware/test_proxy_fix.py b/tests/middleware/test_proxy_fix.py
index f13d10f4b..bcd64ba9e 100644
--- a/tests/middleware/test_proxy_fix.py
+++ b/tests/middleware/test_proxy_fix.py
@@ -18,8 +18,9 @@
"REMOTE_ADDR": "192.168.0.2",
"HTTP_HOST": "spam",
"HTTP_X_FORWARDED_FOR": "192.168.0.1",
+ "HTTP_X_FORWARDED_PROTO": "https",
},
- "http://spam/",
+ "https://spam/",
id="for",
),
pytest.param(
@@ -178,6 +179,8 @@ def app(request):
def test_proxy_fix_deprecations():
app = pytest.deprecated_call(ProxyFix, None, 2)
assert app.x_for == 2
+ assert app.x_proto == 2
+ assert app.x_host == 2
with pytest.deprecated_call():
assert app.num_proxies == 2
From 71eab19be2c83fb476de51275e2f9bdf69d5cc10 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Wed, 4 Sep 2019 13:09:26 -0700
Subject: [PATCH 053/733] release version 0.15.6
---
CHANGES.rst | 2 +-
src/werkzeug/__init__.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index fe7e7393d..7c2c9076b 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -3,7 +3,7 @@
Version 0.15.6
--------------
-Unreleased
+Released 2019-09-04
- Work around a bug in pip that caused the reloader to fail on
Windows when the script was an entry point. This fixes the issue
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index e5990c19d..2ce1a3cb0 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -17,7 +17,7 @@
import sys
from types import ModuleType
-__version__ = "0.15.5"
+__version__ = "0.15.6"
# This import magic raises concerns quite often which is why the implementation
# and motivation is explained here in detail now.
From 5336ebd104aa13a4d31428262092eee9af129f95 Mon Sep 17 00:00:00 2001
From: Philip Jones
Date: Mon, 22 Jul 2019 14:09:24 +0100
Subject: [PATCH 054/733] Add a ContentSecurityPolicy datastructure
This should help make CSP headers easier to construct and read, by
adding structure for the directives. It is based on today's version of
https://w3c.github.io/webappsec-csp/ .
---
src/werkzeug/datastructures.py | 92 +++++++++++++++++++++
src/werkzeug/http.py | 40 +++++++++
src/werkzeug/wrappers/common_descriptors.py | 10 +++
tests/test_datastructures.py | 20 +++++
tests/test_http.py | 8 ++
5 files changed, 170 insertions(+)
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index d43517e95..70a961fa3 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -2012,6 +2012,97 @@ class ResponseCacheControl(_CacheControl):
_CacheControl.cache_property = staticmethod(cache_property)
+def csp_property(key):
+ """Return a new property object for a content security policy header.
+ Useful if you want to add support for a csp extension in a
+ subclass.
+ """
+ return property(
+ lambda x: x._get_value(key),
+ lambda x, v: x._set_value(key, v),
+ lambda x: x._del_value(key),
+ "accessor for %r" % key,
+ )
+
+
+class ContentSecurityPolicy(UpdateDictMixin, dict):
+ """Subclass of a dict that stores values for a Content Security Policy
+ header. It has accessors for all the level 3 policies.
+
+ Because the csp directives in the HTTP header use dashes the
+ python descriptors use underscores for that.
+
+ To get a header of the :class:`ContentSecuirtyPolicy` object again
+ you can convert the object into a string or call the
+ :meth:`to_header` method. If you plan to subclass it and add your
+ own items have a look at the sourcecode for that class.
+
+ .. versionadded:: 1.0.0
+ Support for Content Security Policy headers was added.
+
+ """
+
+ base_uri = csp_property("base-uri")
+ child_src = csp_property("child-src")
+ connect_src = csp_property("connect-src")
+ default_src = csp_property("default-src")
+ font_src = csp_property("font-src")
+ form_action = csp_property("form-action")
+ frame_ancestors = csp_property("frame-ancestors")
+ frame_src = csp_property("frame-src")
+ img_src = csp_property("img-src")
+ manifest_src = csp_property("manifest-src")
+ media_src = csp_property("media-src")
+ navigate_to = csp_property("navigate-to")
+ object_src = csp_property("object-src")
+ prefetch_src = csp_property("prefetch-src")
+ plugin_types = csp_property("plugin-types")
+ report_to = csp_property("report-to")
+ report_uri = csp_property("report-uri")
+ sandbox = csp_property("sandbox")
+ script_src = csp_property("script-src")
+ script_src_attr = csp_property("script-src-attr")
+ script_src_elem = csp_property("script-src-elem")
+ style_src = csp_property("style-src")
+ style_src_attr = csp_property("style-src-attr")
+ style_src_elem = csp_property("style-src-elem")
+ worker_src = csp_property("worker-src")
+
+ def __init__(self, values=(), on_update=None):
+ dict.__init__(self, values or ())
+ self.on_update = on_update
+ self.provided = values is not None
+
+ def _get_value(self, key):
+ """Used internally by the accessor properties."""
+ return self.get(key)
+
+ def _set_value(self, key, value):
+ """Used internally by the accessor properties."""
+ if value is None:
+ self.pop(key, None)
+ else:
+ self[key] = value
+
+ def _del_value(self, key):
+ """Used internally by the accessor properties."""
+ if key in self:
+ del self[key]
+
+ def to_header(self):
+ """Convert the stored values into a cache control header."""
+ return dump_csp_header(self)
+
+ def __str__(self):
+ return self.to_header()
+
+ def __repr__(self):
+ return "<%s %s>" % (
+ self.__class__.__name__,
+ " ".join("%s=%r" % (k, v) for k, v in sorted(self.items())),
+ )
+
+
class CallbackDict(UpdateDictMixin, dict):
"""A dict that calls a function passed every time something is changed.
The function is passed the dict instance.
@@ -2837,6 +2928,7 @@ def __repr__(self):
# circular dependencies
from . import exceptions
+from .http import dump_csp_header
from .http import dump_header
from .http import dump_options_header
from .http import generate_etag
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index 8b16351ed..6d85ffb20 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -304,6 +304,19 @@ def dump_header(iterable, allow_token=True):
return ", ".join(items)
+def dump_csp_header(header):
+ """Dump a Content Security Policy header.
+
+ These are structured into policies such as "default-src 'self';
+ script-src 'self'".
+
+ .. versionadded:: 1.0.0
+ Support for Content Security Policy headers was added.
+
+ """
+ return "; ".join("%s %s" % (key, value) for key, value in iteritems(header))
+
+
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
@@ -504,6 +517,32 @@ def parse_cache_control_header(value, on_update=None, cls=None):
return cls(parse_dict_header(value), on_update)
+def parse_csp_header(value, on_update=None, cls=None):
+ """Parse a Content Security Policy header.
+
+ .. versionadded:: 1.0.0
+ Support for Content Security Policy headers was added.
+
+ :param value: a csp header to be parsed.
+ :param on_update: an optional callable that is called every time a value
+ on the object is changed.
+ :param cls: the class for the returned object. By default
+ :class:`~werkzeug.datastructures.ContentSecurityPolicy` is used.
+ :return: a `cls` object.
+ """
+
+ if cls is None:
+ cls = ContentSecurityPolicy
+ items = []
+ for policy in value.split(";"):
+ policy = policy.strip()
+ # Ignore badly formatted policies (no space)
+ if " " in policy:
+ directive, value = policy.strip().split(" ", 1)
+ items.append((directive.strip(), value.strip()))
+ return cls(items, on_update)
+
+
def parse_set_header(value, on_update=None):
"""Parse a set-like header and return a
:class:`~werkzeug.datastructures.HeaderSet` object:
@@ -1244,6 +1283,7 @@ def is_byte_range_valid(start, stop, length):
from .datastructures import Accept
from .datastructures import Authorization
from .datastructures import ContentRange
+from .datastructures import ContentSecurityPolicy
from .datastructures import ETags
from .datastructures import HeaderSet
from .datastructures import IfRange
diff --git a/src/werkzeug/wrappers/common_descriptors.py b/src/werkzeug/wrappers/common_descriptors.py
index e4107ee01..1479248d8 100644
--- a/src/werkzeug/wrappers/common_descriptors.py
+++ b/src/werkzeug/wrappers/common_descriptors.py
@@ -4,10 +4,12 @@
from .._compat import string_types
from ..datastructures import CallbackDict
from ..http import dump_age
+from ..http import dump_csp_header
from ..http import dump_header
from ..http import dump_options_header
from ..http import http_date
from ..http import parse_age
+from ..http import parse_csp_header
from ..http import parse_date
from ..http import parse_options_header
from ..http import parse_set_header
@@ -220,6 +222,14 @@ def on_update(d):
modification of the entity-body in transit, but is not proof
against malicious attacks.)""",
)
+ content_security_policy = header_property(
+ "Content-Security-Policy",
+ None,
+ parse_csp_header,
+ dump_csp_header,
+ doc="""The Content-Security-Policy header adds an additional layer of
+ security to help detect and mitigate certain types of attacks.""",
+ )
date = header_property(
"Date",
None,
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index c930a67e5..c55903480 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -985,6 +985,26 @@ def test_set_none(self):
assert cc.no_cache is None
+class TestContentSecurityPolicy(object):
+ def test_construct(self):
+ csp = datastructures.ContentSecurityPolicy(
+ [("font-src", "'self'"), ("media-src", "*")]
+ )
+ assert csp.font_src == "'self'"
+ assert csp.media_src == "*"
+ policies = [policy.strip() for policy in csp.to_header().split(";")]
+ assert "font-src 'self'" in policies
+ assert "media-src *" in policies
+
+ def test_properties(self):
+ csp = datastructures.ContentSecurityPolicy()
+ csp.default_src = "* 'self' quart.com"
+ csp.img_src = "'none'"
+ policies = [policy.strip() for policy in csp.to_header().split(";")]
+ assert "default-src * 'self' quart.com" in policies
+ assert "img-src 'none'" in policies
+
+
class TestAccept(object):
storage_class = datastructures.Accept
diff --git a/tests/test_http.py b/tests/test_http.py
index 400fc9d78..3709daf08 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -118,6 +118,14 @@ def test_cache_control_header(self):
assert c.private is None
assert c.to_header() == "no-cache"
+ def test_csp_header(self):
+ csp = http.parse_csp_header(
+ "default-src 'self'; script-src 'unsafe-inline' *; img-src"
+ )
+ assert csp.default_src == "'self'"
+ assert csp.script_src == "'unsafe-inline' *"
+ assert csp.img_src is None
+
def test_authorization_header(self):
a = http.parse_authorization_header("Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
assert a.type == "basic"
From e513bcd5f7101adde935f2f4f62ff321e15a01a8 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Thu, 8 Aug 2019 23:58:00 +0100
Subject: [PATCH 055/733] Add a CSP Report Only header property to responses
This follows the CSP header and allows for monitoring rather than
enforcement of content security policies.
---
src/werkzeug/wrappers/common_descriptors.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/src/werkzeug/wrappers/common_descriptors.py b/src/werkzeug/wrappers/common_descriptors.py
index 1479248d8..f169959bb 100644
--- a/src/werkzeug/wrappers/common_descriptors.py
+++ b/src/werkzeug/wrappers/common_descriptors.py
@@ -230,6 +230,15 @@ def on_update(d):
doc="""The Content-Security-Policy header adds an additional layer of
security to help detect and mitigate certain types of attacks.""",
)
+ content_security_policy_report_only = header_property(
+ "Content-Security-Policy-Report-Only",
+ None,
+ parse_csp_header,
+ dump_csp_header,
+ doc="""The Content-Security-Policy-Report-Only header adds a csp policy
+ that is not enforced but is reported thereby helping detect
+ certain types of attacks.""",
+ )
date = header_property(
"Date",
None,
From 6f462cad6c3313f3901442060658995d0a0640c7 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 5 Sep 2019 06:19:28 -0700
Subject: [PATCH 056/733] add changelog for content security policy
---
CHANGES.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index e08883f0a..1edbccf97 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -34,6 +34,8 @@ Unreleased
such that ``X-Foo`` is the same as ``x-foo``. :pr:`1605`
- :meth:`http.dump_cookie` accepts ``'None'`` as a value for
``samesite``. :issue:`1549`
+- Support the Content Security Policy header through the
+ `Response.content_security_policy` data structure. :pr:`1617`
- Optional request log highlighting with the development server is
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
From 9fc4f95ff0613f707b8def9d7e2d10f4d24b14f9 Mon Sep 17 00:00:00 2001
From: Charles Ross
Date: Sat, 6 Apr 2019 08:45:43 -0700
Subject: [PATCH 057/733] Add venv to gitignore
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index 9b312a490..d2d81e7f2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -20,3 +20,4 @@ htmlcov
test_uwsgi_failed
.idea
.pytest_cache/
+venv/
From 6f699b8229b0d36cc3b0bfdfb70b0c46865b22b6 Mon Sep 17 00:00:00 2001
From: Charles Ross
Date: Sat, 6 Apr 2019 08:46:57 -0700
Subject: [PATCH 058/733] LanguageAccept falls back to language only
---
src/werkzeug/datastructures.py | 13 +++++++++++++
tests/test_datastructures.py | 29 +++++++++++++++++++++++++++++
2 files changed, 42 insertions(+)
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 70a961fa3..61b26ee01 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1849,6 +1849,19 @@ def _normalize(language):
return item == "*" or _normalize(value) == _normalize(item)
+ def best_match(self, matches, default=None):
+ """Calls the super version of `best_match` and if it returns none,
+ attempts to fall back to language only matches.
+
+ :param matches: a list of matches to check for
+ :param default: the value that is returned if none match
+ """
+ result = super(LanguageAccept, self).best_match(matches)
+ if result is not None:
+ return result
+ fallback = Accept([(item[0][0:2], item[1]) for item in self])
+ return fallback.best_match(matches, default=default)
+
class CharsetAccept(Accept):
"""Like :class:`Accept` but with normalization for charsets."""
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index c55903480..304ed5def 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -1100,6 +1100,35 @@ def test_accept_wildcard_specificity(self):
assert accept.best_match(["text/plain", "image/png"]) == "image/png"
+class TestLanguageAccept(object):
+ storage_class = datastructures.LanguageAccept
+
+ def test_best_match_fallback(self):
+ accept = self.storage_class([("en-us", 1)])
+ assert accept.best_match(["en"]) == "en"
+
+ accept = self.storage_class([("de_AT", 1), ("de", 0.9)])
+ assert accept.best_match(["en"]) is None
+
+ accept = self.storage_class([("de_AT", 1), ("de", 0.9), ("en-US", 0.8)])
+ assert accept.best_match(["de", "en"]) == "de"
+
+ accept = self.storage_class([("de_AT", 0.9), ("en-US", 1)])
+ assert accept.best_match(["en"]) == "en"
+
+ accept = self.storage_class([("en-us", 1)])
+ assert accept.best_match(["en-us"]) == "en-us"
+
+ accept = self.storage_class([("en-us", 1)])
+ assert accept.best_match(["en-us", "en"]) == "en-us"
+
+ accept = self.storage_class([("en-GB", 1)])
+ assert accept.best_match(["en-US", "en"], default="en-US") == "en"
+
+ accept = self.storage_class([("de-AT", 1)])
+ assert accept.best_match(["en-US", "en"], default="en-US") == "en-US"
+
+
class TestFileStorage(object):
storage_class = datastructures.FileStorage
From 9bfca43636ef87b6e1d58c4918d8e4be601fefab Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 5 Sep 2019 08:26:54 -0700
Subject: [PATCH 059/733] extend LanguageAccept fallback
If no exact match is found, first tries modifying the accepted values to
use primary tags only, then tries modifying the matched values to use
primary tags only.
If the client only accepts "en-US", "en" will match. If the client only
accepts "en", "en-US" will match. 2 and 3 lettter codes are supported.
Fallback matching is not performed with other subtags.
---
CHANGES.rst | 3 ++
src/werkzeug/datastructures.py | 58 +++++++++++++++++++++++++++-------
tests/test_datastructures.py | 58 ++++++++++++++++------------------
3 files changed, 77 insertions(+), 42 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 1edbccf97..63fda5361 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -36,6 +36,9 @@ Unreleased
``samesite``. :issue:`1549`
- Support the Content Security Policy header through the
`Response.content_security_policy` data structure. :pr:`1617`
+- ``AcceptLanguage`` will fall back to matching "en" for "en-US" or
+ "en-US" for "en" to better support clients or translations that
+ only match at the primary language tag. :issue:`450`, :pr:`1507`
- Optional request log highlighting with the development server is
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 61b26ee01..aa371cd76 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1840,27 +1840,63 @@ def accept_json(self):
return "application/json" in self
+def _normalize_lang(value):
+ """Process a language tag for matching."""
+ return _locale_delim_re.split(value.lower())
+
+
class LanguageAccept(Accept):
- """Like :class:`Accept` but with normalization for languages."""
+ """Like :class:`Accept` but with normalization for language tags."""
def _value_matches(self, value, item):
- def _normalize(language):
- return _locale_delim_re.split(language.lower())
-
- return item == "*" or _normalize(value) == _normalize(item)
+ return item == "*" or _normalize_lang(value) == _normalize_lang(item)
def best_match(self, matches, default=None):
- """Calls the super version of `best_match` and if it returns none,
- attempts to fall back to language only matches.
+ """Given a list of supported values, finds the best match from
+ the list of accepted values.
- :param matches: a list of matches to check for
- :param default: the value that is returned if none match
+ Language tags are normalized for the purpose of matching, but
+ are returned unchanged.
+
+ If no exact match is found, this will fall back to matching
+ the first subtag (primary language only), first with the
+ accepted values then with the match values. This partial is not
+ applied to any other language subtags.
+
+ The default is returned if no exact or fallback match is found.
+
+ :param matches: A list of supported languages to find a match.
+ :param default: The value that is returned if none match.
"""
+ # Look for an exact match first. If a client accepts "en-US",
+ # "en-US" is a valid match at this point.
result = super(LanguageAccept, self).best_match(matches)
+
if result is not None:
return result
- fallback = Accept([(item[0][0:2], item[1]) for item in self])
- return fallback.best_match(matches, default=default)
+
+ # Fall back to accepting primary tags. If a client accepts
+ # "en-US", "en" is a valid match at this point. Need to use
+ # re.split to account for 2 or 3 letter codes.
+ fallback = Accept(
+ [(_locale_delim_re.split(item[0], 1)[0], item[1]) for item in self]
+ )
+ result = fallback.best_match(matches)
+
+ if result is not None:
+ return result
+
+ # Fall back to matching primary tags. If the client accepts
+ # "en", "en-US" is a valid match at this point.
+ fallback_matches = [_locale_delim_re.split(item, 1)[0] for item in matches]
+ result = super(LanguageAccept, self).best_match(fallback_matches)
+
+ # Return a value from the original match list. Find the first
+ # original value that starts with the matched primary tag.
+ if result is not None:
+ return next(item for item in matches if item.startswith(result))
+
+ return default
class CharsetAccept(Accept):
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index 304ed5def..6ae16b137 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -37,6 +37,7 @@
from werkzeug._compat import itervalues
from werkzeug._compat import PY2
from werkzeug._compat import text_type
+from werkzeug.datastructures import LanguageAccept
from werkzeug.datastructures import Range
from werkzeug.exceptions import BadRequestKeyError
@@ -911,12 +912,12 @@ def make_call_asserter(func=None):
:param func: Additional callback for each function call.
- >>> assert_calls, func = make_call_asserter()
- >>> with assert_calls(2):
- ... func()
- ... func()
+ .. code-block:: python
+ assert_calls, func = make_call_asserter()
+ with assert_calls(2):
+ func()
+ func()
"""
-
calls = [0]
@contextmanager
@@ -1101,32 +1102,27 @@ def test_accept_wildcard_specificity(self):
class TestLanguageAccept(object):
- storage_class = datastructures.LanguageAccept
-
- def test_best_match_fallback(self):
- accept = self.storage_class([("en-us", 1)])
- assert accept.best_match(["en"]) == "en"
-
- accept = self.storage_class([("de_AT", 1), ("de", 0.9)])
- assert accept.best_match(["en"]) is None
-
- accept = self.storage_class([("de_AT", 1), ("de", 0.9), ("en-US", 0.8)])
- assert accept.best_match(["de", "en"]) == "de"
-
- accept = self.storage_class([("de_AT", 0.9), ("en-US", 1)])
- assert accept.best_match(["en"]) == "en"
-
- accept = self.storage_class([("en-us", 1)])
- assert accept.best_match(["en-us"]) == "en-us"
-
- accept = self.storage_class([("en-us", 1)])
- assert accept.best_match(["en-us", "en"]) == "en-us"
-
- accept = self.storage_class([("en-GB", 1)])
- assert accept.best_match(["en-US", "en"], default="en-US") == "en"
-
- accept = self.storage_class([("de-AT", 1)])
- assert accept.best_match(["en-US", "en"], default="en-US") == "en-US"
+ @pytest.mark.parametrize(
+ ("values", "matches", "default", "expect"),
+ (
+ ([("en-us", 1)], ["en"], None, "en"),
+ ([("en", 1)], ["en_US"], None, "en_US"),
+ ([("en-GB", 1)], ["en-US"], None, None),
+ ([("de_AT", 1), ("de", 0.9)], ["en"], None, None),
+ ([("de_AT", 1), ("de", 0.9), ("en-US", 0.8)], ["de", "en"], None, "de"),
+ ([("de_AT", 0.9), ("en-US", 1)], ["en"], None, "en"),
+ ([("en-us", 1)], ["en-us"], None, "en-us"),
+ ([("en-us", 1)], ["en-us", "en"], None, "en-us"),
+ ([("en-GB", 1)], ["en-US", "en"], "en-US", "en"),
+ ([("de_AT", 1)], ["en-US", "en"], "en-US", "en-US"),
+ ([("aus-EN", 1)], ["aus"], None, "aus"),
+ ([("aus", 1)], ["aus-EN"], None, "aus-EN"),
+ ),
+ )
+ def test_best_match_fallback(self, values, matches, default, expect):
+ accept = LanguageAccept(values)
+ best = accept.best_match(matches, default=default)
+ assert best == expect
class TestFileStorage(object):
From f8442efa50369bc65427c7cfaee00c65ac906edd Mon Sep 17 00:00:00 2001
From: John Zeringue
Date: Fri, 31 May 2019 14:05:31 -0400
Subject: [PATCH 060/733] Use MIME parameters to match Accept header
Previously, we didn't parse MIME parameters or use them to match or
prioritize content types. Now, we'll normalize and compare MIME
parameters and consider a MIME type with parameters higher priority than
one without them.
Fixes #458
---
CHANGES.rst | 1 +
src/werkzeug/datastructures.py | 17 ++++++++++++-----
tests/test_datastructures.py | 4 ++++
tests/test_http.py | 7 +++++++
4 files changed, 24 insertions(+), 5 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 63fda5361..1a4f5f3f8 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -20,6 +20,7 @@ Unreleased
:pr:`1532`
- The user agent for Opera 60 on Mac is correctly reported as
"opera" instead of "chrome". :issue:`1556`
+- Use MIME parameters to better match Accept header. :issue:`458`
- The platform for Crosswalk on Android is correctly reported as
"android" instead of "chromeos". (:pr:`1572`)
- Issue a warning when the current server name does not match the
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index aa371cd76..383f370a4 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1785,30 +1785,37 @@ def best(self):
return self[0][0]
+_mime_re = re.compile(r"/|(?:\s*;\s*)")
+
+
class MIMEAccept(Accept):
"""Like :class:`Accept` but with special methods and behavior for
mimetypes.
"""
def _specificity(self, value):
- return tuple(x != "*" for x in value.split("/", 1))
+ return tuple(x != "*" for x in _mime_re.split(value))
def _value_matches(self, value, item):
def _normalize(x):
x = x.lower()
- return ("*", "*") if x == "*" else x.split("/", 1)
+ return _mime_re.split(x)
# this is from the application which is trusted. to avoid developer
# frustration we actually check these for valid values
if "/" not in value:
raise ValueError("invalid mimetype %r" % value)
- value_type, value_subtype = _normalize(value)
+ normalized_value = _normalize(value)
+ value_type, value_subtype = normalized_value[:2]
+ value_params = sorted(normalized_value[2:])
if value_type == "*" and value_subtype != "*":
raise ValueError("invalid mimetype %r" % value)
if "/" not in item:
return False
- item_type, item_subtype = _normalize(item)
+ normalized_item = _normalize(item)
+ item_type, item_subtype = normalized_item[:2]
+ item_params = sorted(normalized_item[2:])
if item_type == "*" and item_subtype != "*":
return False
return (
@@ -1818,7 +1825,7 @@ def _normalize(x):
and (
item_subtype == "*"
or value_subtype == "*"
- or item_subtype == value_subtype
+ or (item_subtype == value_subtype and item_params == value_params)
)
)
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index 6ae16b137..e7a05826c 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -1099,6 +1099,10 @@ def test_accept_wildcard_specificity(self):
accept = self.storage_class([("*/*", 1), ("text/html", 1), ("image/*", 1)])
assert accept.best_match(["image/png", "text/html"]) == "text/html"
assert accept.best_match(["text/plain", "image/png"]) == "image/png"
+ accept = self.storage_class([("text/html", 1), ("text/html; level=1", 1)])
+ assert (
+ accept.best_match(["text/html", "text/html;level=1"]) == "text/html;level=1"
+ )
class TestLanguageAccept(object):
diff --git a/tests/test_http.py b/tests/test_http.py
index 3709daf08..7fbef44f7 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -61,6 +61,13 @@ def test_accept_matches(self):
assert a.best_match(["foo/bar", "bar/foo"], default="foo/bar") == "foo/bar"
assert a.best_match(["application/xml", "text/xml"]) == "application/xml"
+ def test_accept_mime_specificity(self):
+ a = http.parse_accept_header(
+ "text/*, text/html, text/html;level=1, */*", datastructures.MIMEAccept
+ )
+ assert a.best_match(["text/html; version=1", "text/html"]) == "text/html"
+ assert a.best_match(["text/html", "text/html; level=1"]) == "text/html; level=1"
+
def test_charset_accept(self):
a = http.parse_accept_header(
"ISO-8859-1,utf-8;q=0.7,*;q=0.7", datastructures.CharsetAccept
From 37b3c48f6b6c384e9a1fabe6342c3708f227ed20 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 5 Sep 2019 10:35:52 -0700
Subject: [PATCH 061/733] add changelog, parametrize test
---
CHANGES.rst | 5 +--
src/werkzeug/datastructures.py | 40 ++++++++++++++--------
tests/test_datastructures.py | 62 +++++++++++++++++++++++-----------
3 files changed, 72 insertions(+), 35 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 1a4f5f3f8..01f386f90 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -20,7 +20,6 @@ Unreleased
:pr:`1532`
- The user agent for Opera 60 on Mac is correctly reported as
"opera" instead of "chrome". :issue:`1556`
-- Use MIME parameters to better match Accept header. :issue:`458`
- The platform for Crosswalk on Android is correctly reported as
"android" instead of "chromeos". (:pr:`1572`)
- Issue a warning when the current server name does not match the
@@ -37,9 +36,11 @@ Unreleased
``samesite``. :issue:`1549`
- Support the Content Security Policy header through the
`Response.content_security_policy` data structure. :pr:`1617`
-- ``AcceptLanguage`` will fall back to matching "en" for "en-US" or
+- ``LanguageAccept`` will fall back to matching "en" for "en-US" or
"en-US" for "en" to better support clients or translations that
only match at the primary language tag. :issue:`450`, :pr:`1507`
+- ``MIMEAccept`` uses MIME parameters for specificity when matching.
+ :issue:`458`, :pr:`1574`
- Optional request log highlighting with the development server is
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 383f370a4..f22d38de4 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -29,8 +29,6 @@
from ._internal import _missing
from .filesystem import get_filesystem_encoding
-_locale_delim_re = re.compile(r"[_-]")
-
def is_immutable(self):
raise TypeError("%r objects are immutable" % self.__class__.__name__)
@@ -1785,7 +1783,11 @@ def best(self):
return self[0][0]
-_mime_re = re.compile(r"/|(?:\s*;\s*)")
+_mime_split_re = re.compile(r"/|(?:\s*;\s*)")
+
+
+def _normalize_mime(value):
+ return _mime_split_re.split(value.lower())
class MIMEAccept(Accept):
@@ -1794,32 +1796,39 @@ class MIMEAccept(Accept):
"""
def _specificity(self, value):
- return tuple(x != "*" for x in _mime_re.split(value))
+ return tuple(x != "*" for x in _mime_split_re.split(value))
def _value_matches(self, value, item):
- def _normalize(x):
- x = x.lower()
- return _mime_re.split(x)
+ # item comes from the client, can't match if it's invalid.
+ if "/" not in item:
+ return False
- # this is from the application which is trusted. to avoid developer
- # frustration we actually check these for valid values
+ # value comes from the application, tell the developer when it
+ # doesn't look valid.
if "/" not in value:
raise ValueError("invalid mimetype %r" % value)
- normalized_value = _normalize(value)
+
+ # Split the match value into type, subtype, and a sorted list of parameters.
+ normalized_value = _normalize_mime(value)
value_type, value_subtype = normalized_value[:2]
value_params = sorted(normalized_value[2:])
+
+ # "*/*" is the only valid value that can start with "*".
if value_type == "*" and value_subtype != "*":
raise ValueError("invalid mimetype %r" % value)
- if "/" not in item:
- return False
- normalized_item = _normalize(item)
+ # Split the accept item into type, subtype, and parameters.
+ normalized_item = _normalize_mime(item)
item_type, item_subtype = normalized_item[:2]
item_params = sorted(normalized_item[2:])
+
+ # "*/not-*" from the client is invalid, can't match.
if item_type == "*" and item_subtype != "*":
return False
+
return (
- item_type == item_subtype == "*" or value_type == value_subtype == "*"
+ (item_type == "*" and item_subtype == "*")
+ or (value_type == "*" and value_subtype == "*")
) or (
item_type == value_type
and (
@@ -1847,6 +1856,9 @@ def accept_json(self):
return "application/json" in self
+_locale_delim_re = re.compile(r"[_-]")
+
+
def _normalize_lang(value):
"""Process a language tag for matching."""
return _locale_delim_re.split(value.lower())
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index e7a05826c..800da86b0 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -38,6 +38,7 @@
from werkzeug._compat import PY2
from werkzeug._compat import text_type
from werkzeug.datastructures import LanguageAccept
+from werkzeug.datastructures import MIMEAccept
from werkzeug.datastructures import Range
from werkzeug.exceptions import BadRequestKeyError
@@ -1084,25 +1085,48 @@ def test_accept_wildcard_specificity(self):
class TestMIMEAccept(object):
- storage_class = datastructures.MIMEAccept
-
- def test_accept_wildcard_subtype(self):
- accept = self.storage_class([("text/*", 1)])
- assert accept.best_match(["text/html"], default=None) == "text/html"
- assert accept.best_match(["image/png", "text/plain"]) == "text/plain"
- assert accept.best_match(["image/png"], default=None) is None
-
- def test_accept_wildcard_specificity(self):
- accept = self.storage_class([("*/*", 1), ("text/html", 1)])
- assert accept.best_match(["image/png", "text/html"]) == "text/html"
- assert accept.best_match(["image/png", "text/plain"]) == "image/png"
- accept = self.storage_class([("*/*", 1), ("text/html", 1), ("image/*", 1)])
- assert accept.best_match(["image/png", "text/html"]) == "text/html"
- assert accept.best_match(["text/plain", "image/png"]) == "image/png"
- accept = self.storage_class([("text/html", 1), ("text/html; level=1", 1)])
- assert (
- accept.best_match(["text/html", "text/html;level=1"]) == "text/html;level=1"
- )
+ @pytest.mark.parametrize(
+ ("values", "matches", "default", "expect"),
+ [
+ ([("text/*", 1)], ["text/html"], None, "text/html"),
+ ([("text/*", 1)], ["image/png"], "text/plain", "text/plain"),
+ ([("text/*", 1)], ["image/png"], None, None),
+ (
+ [("*/*", 1), ("text/html", 1)],
+ ["image/png", "text/html"],
+ None,
+ "text/html",
+ ),
+ (
+ [("*/*", 1), ("text/html", 1)],
+ ["image/png", "text/plain"],
+ None,
+ "image/png",
+ ),
+ (
+ [("*/*", 1), ("text/html", 1), ("image/*", 1)],
+ ["image/png", "text/html"],
+ None,
+ "text/html",
+ ),
+ (
+ [("*/*", 1), ("text/html", 1), ("image/*", 1)],
+ ["text/plain", "image/png"],
+ None,
+ "image/png",
+ ),
+ (
+ [("text/html", 1), ("text/html; level=1", 1)],
+ ["text/html;level=1"],
+ None,
+ "text/html;level=1",
+ ),
+ ],
+ )
+ def test_mime_accept(self, values, matches, default, expect):
+ accept = MIMEAccept(values)
+ match = accept.best_match(matches, default=default)
+ assert match == expect
class TestLanguageAccept(object):
From 4fb73773e95f708278ea9507c1c14a9628ebaa65 Mon Sep 17 00:00:00 2001
From: Steve Genoud
Date: Thu, 14 Feb 2019 14:44:06 +0100
Subject: [PATCH 062/733] Multiple cookies can have the same name
---
src/werkzeug/wrappers/base_request.py | 5 ++---
tests/test_wrappers.py | 11 +++++++----
2 files changed, 9 insertions(+), 7 deletions(-)
diff --git a/src/werkzeug/wrappers/base_request.py b/src/werkzeug/wrappers/base_request.py
index d4655a4fa..24ed43f55 100644
--- a/src/werkzeug/wrappers/base_request.py
+++ b/src/werkzeug/wrappers/base_request.py
@@ -9,7 +9,6 @@
from ..datastructures import EnvironHeaders
from ..datastructures import ImmutableList
from ..datastructures import ImmutableMultiDict
-from ..datastructures import ImmutableTypeConversionDict
from ..datastructures import iter_multi_items
from ..datastructures import MultiDict
from ..formparser import default_stream_factory
@@ -118,11 +117,11 @@ class Request(BaseRequest, ETagRequestMixin):
#: the type to be used for dict values from the incoming WSGI environment.
#: By default an
- #: :class:`~werkzeug.datastructures.ImmutableTypeConversionDict` is used
+ #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used
#: (for example for :attr:`cookies`).
#:
#: .. versionadded:: 0.6
- dict_storage_class = ImmutableTypeConversionDict
+ dict_storage_class = ImmutableMultiDict
#: The form data parser that shoud be used. Can be replaced to customize
#: the form date parsing.
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index 030053257..b0ade1032 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -28,8 +28,8 @@
from werkzeug.datastructures import CombinedMultiDict
from werkzeug.datastructures import Headers
from werkzeug.datastructures import ImmutableList
+from werkzeug.datastructures import ImmutableMultiDict
from werkzeug.datastructures import ImmutableOrderedMultiDict
-from werkzeug.datastructures import ImmutableTypeConversionDict
from werkzeug.datastructures import LanguageAccept
from werkzeug.datastructures import MIMEAccept
from werkzeug.datastructures import MultiDict
@@ -1247,9 +1247,12 @@ class MyRequest(wrappers.Request):
assert type(req.values) is CombinedMultiDict
assert req.values["foo"] == u"baz"
- req = wrappers.Request.from_values(headers={"Cookie": "foo=bar"})
- assert type(req.cookies) is ImmutableTypeConversionDict
- assert req.cookies == {"foo": "bar"}
+ req = wrappers.Request.from_values(headers={"Cookie": "foo=bar;foo=baz"})
+ assert type(req.cookies) is ImmutableMultiDict
+ assert req.cookies.to_dict() == {"foo": "bar"}
+
+ # it is possible to have multiple cookies with the same name
+ assert req.cookies.getlist("foo") == ["bar", "baz"]
assert type(req.access_route) is ImmutableList
MyRequest.list_storage_class = tuple
From 0ccb76a6422f82bd0eb064caeafecbabfabf3cad Mon Sep 17 00:00:00 2001
From: David Lord
Date: Fri, 6 Sep 2019 07:31:20 -0700
Subject: [PATCH 063/733] add changelog for multiple cookies
---
CHANGES.rst | 4 ++++
src/werkzeug/wrappers/base_request.py | 10 ++++++----
2 files changed, 10 insertions(+), 4 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 01f386f90..c312d6bb5 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -13,6 +13,10 @@ Unreleased
- Directive keys for the ``Set-Cookie`` response header are not
ignored when parsing the ``Cookie`` request header. This allows
cookies with names such as "expires" and "version". (:issue:`1495`)
+- Cookies are parsed into a ``MultiDict`` to capture all values for
+ cookies with the same key. ``cookies[key]`` returns the first value
+ received, rather than the last. Use ``cookies.getlist(key)`` to get
+ all values. :issue:`1562`, :pr:`1458`
- Add ``charset=utf-8`` to an HTTP exception response's
``CONTENT_TYPE`` header. (:pr:`1526`)
- The interactive debugger handles outer variables in nested scopes
diff --git a/src/werkzeug/wrappers/base_request.py b/src/werkzeug/wrappers/base_request.py
index 24ed43f55..e18949b33 100644
--- a/src/werkzeug/wrappers/base_request.py
+++ b/src/werkzeug/wrappers/base_request.py
@@ -115,10 +115,12 @@ class Request(BaseRequest, ETagRequestMixin):
#: .. versionadded:: 0.6
list_storage_class = ImmutableList
- #: the type to be used for dict values from the incoming WSGI environment.
- #: By default an
- #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used
- #: (for example for :attr:`cookies`).
+ #: The type to be used for dict values from the incoming WSGI
+ #: environment. (For example for :attr:`cookies`.) By default an
+ #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used.
+ #:
+ #: .. versionchanged:: 1.0.0
+ #: Changed to ``ImmutableMultiDict`` to support multiple values.
#:
#: .. versionadded:: 0.6
dict_storage_class = ImmutableMultiDict
From 386fdae96b63f0f8b44cc0de0adaa73b456308c7 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Fri, 6 Sep 2019 08:03:56 -0700
Subject: [PATCH 064/733] parse_cookie uses MultiDict
---
CHANGES.rst | 9 ++---
src/werkzeug/http.py | 42 ++++++++++++-----------
tests/test_http.py | 79 +++++++++++++++++++++-----------------------
3 files changed, 64 insertions(+), 66 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index c312d6bb5..12da41a96 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -13,10 +13,11 @@ Unreleased
- Directive keys for the ``Set-Cookie`` response header are not
ignored when parsing the ``Cookie`` request header. This allows
cookies with names such as "expires" and "version". (:issue:`1495`)
-- Cookies are parsed into a ``MultiDict`` to capture all values for
- cookies with the same key. ``cookies[key]`` returns the first value
- received, rather than the last. Use ``cookies.getlist(key)`` to get
- all values. :issue:`1562`, :pr:`1458`
+- Request cookies are parsed into a ``MultiDict`` to capture all
+ values for cookies with the same key. ``cookies[key]`` returns the
+ first value rather than the last. Use ``cookies.getlist(key)`` to
+ get all values. ``parse_cookie`` also defaults to a ``MultiDict``.
+ :issue:`1562`, :pr:`1458`
- Add ``charset=utf-8`` to an HTTP exception response's
``CONTENT_TYPE`` header. (:pr:`1526`)
- The interactive debugger handles outer variables in nested scopes
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index 6d85ffb20..d5490bccf 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -1078,38 +1078,40 @@ def is_hop_by_hop_header(header):
def parse_cookie(header, charset="utf-8", errors="replace", cls=None):
- """Parse a cookie. Either from a string or WSGI environ.
+ """Parse a cookie from a string or WSGI environ.
- Per default encoding errors are ignored. If you want a different behavior
- you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
- :exc:`HTTPUnicodeError` is raised.
+ The same key can be provided multiple times, the values are stored
+ in-order. The default :class:`MultiDict` will have the first value
+ first, and all values can be retrieved with
+ :meth:`MultiDict.getlist`.
+
+ :param header: The cookie header as a string, or a WSGI environ dict
+ with a ``HTTP_COOKIE`` key.
+ :param charset: The charset for the cookie values.
+ :param errors: The error behavior for the charset decoding.
+ :param cls: A dict-like class to store the parsed cookies in.
+ Defaults to :class:`MultiDict`.
+
+ .. versionchanged:: 1.0.0
+ Returns a :class:`MultiDict` instead of a
+ ``TypeConversionDict``.
.. versionchanged:: 0.5
- This function now returns a :class:`TypeConversionDict` instead of a
- regular dict. The `cls` parameter was added.
-
- :param header: the header to be used to parse the cookie. Alternatively
- this can be a WSGI environment.
- :param charset: the charset for the cookie values.
- :param errors: the error behavior for the charset decoding.
- :param cls: an optional dict class to use. If this is not specified
- or `None` the default :class:`TypeConversionDict` is
- used.
+ Returns a :class:`TypeConversionDict` instead of a regular dict.
+ The ``cls`` parameter was added.
"""
if isinstance(header, dict):
header = header.get("HTTP_COOKIE", "")
elif header is None:
header = ""
- # If the value is an unicode string it's mangled through latin1. This
- # is done because on PEP 3333 on Python 3 all headers are assumed latin1
- # which however is incorrect for cookies, which are sent in page encoding.
- # As a result we
+ # On Python 3, PEP 3333 sends headers through the environ as latin1
+ # decoded strings. Encode strings back to bytes for parsing.
if isinstance(header, text_type):
header = header.encode("latin1", "replace")
if cls is None:
- cls = TypeConversionDict
+ cls = MultiDict
def _parse_pairs():
for key, val in _cookie_parse_impl(header):
@@ -1287,8 +1289,8 @@ def is_byte_range_valid(start, stop, length):
from .datastructures import ETags
from .datastructures import HeaderSet
from .datastructures import IfRange
+from .datastructures import MultiDict
from .datastructures import Range
from .datastructures import RequestCacheControl
-from .datastructures import TypeConversionDict
from .datastructures import WWWAuthenticate
from .urls import iri_to_uri
diff --git a/tests/test_http.py b/tests/test_http.py
index 7fbef44f7..61386b34c 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -442,22 +442,22 @@ def test_date_formatting(self):
assert http.http_date(0) == "Thu, 01 Jan 1970 00:00:00 GMT"
assert http.http_date(datetime(1970, 1, 1)) == "Thu, 01 Jan 1970 00:00:00 GMT"
- def test_cookies(self):
- strict_eq(
- dict(
- http.parse_cookie(
- "dismiss-top=6; CP=null*; PHPSESSID=0a539d42abc001cd"
- 'c762809248d4beed; a=42; b="\\";"'
- )
- ),
- {
- "CP": u"null*",
- "PHPSESSID": u"0a539d42abc001cdc762809248d4beed",
- "a": u"42",
- "dismiss-top": u"6",
- "b": u'";',
- },
- )
+ def test_parse_cookie(self):
+ cookies = http.parse_cookie(
+ "dismiss-top=6; CP=null*; PHPSESSID=0a539d42abc001cdc762809248d4beed;"
+ ' a=42; b="\\";"; ; fo234{=bar;blub=Blah;'
+ )
+ assert cookies.to_dict() == {
+ "CP": u"null*",
+ "PHPSESSID": u"0a539d42abc001cdc762809248d4beed",
+ "a": u"42",
+ "dismiss-top": u"6",
+ "b": u'";',
+ "fo234{": u"bar",
+ "blub": u"Blah",
+ }
+
+ def test_dump_cookie(self):
rv = http.dump_cookie(
"foo", "bar baz blub", 360, httponly=True, sync_expires=False
)
@@ -468,42 +468,37 @@ def test_cookies(self):
"Path=/",
'foo="bar baz blub"',
}
-
- strict_eq(
- dict(http.parse_cookie("fo234{=bar; blub=Blah")),
- {"fo234{": u"bar", "blub": u"Blah"},
- )
-
- strict_eq(http.dump_cookie("key", "xxx/"), "key=xxx/; Path=/")
- strict_eq(http.dump_cookie("key", "xxx="), "key=xxx=; Path=/")
+ assert http.dump_cookie("key", "xxx/") == "key=xxx/; Path=/"
+ assert http.dump_cookie("key", "xxx=") == "key=xxx=; Path=/"
def test_bad_cookies(self):
- strict_eq(
- dict(
- http.parse_cookie(
- "first=IamTheFirst ; a=1; oops ; a=2 ;second = andMeTwo;"
- )
- ),
- {"first": u"IamTheFirst", "a": u"2", "oops": u"", "second": u"andMeTwo"},
- )
+ cookies = http.parse_cookie(
+ "first=IamTheFirst ; a=1; oops ; a=2 ;second = andMeTwo;"
+ )
+ expect = {
+ "first": [u"IamTheFirst"],
+ "a": [u"1", u"2"],
+ "oops": [u""],
+ "second": [u"andMeTwo"],
+ }
+ assert cookies.to_dict(flat=False) == expect
+ assert cookies["a"] == u"1"
+ assert cookies.getlist("a") == [u"1", u"2"]
def test_empty_keys_are_ignored(self):
- strict_eq(
- dict(
- http.parse_cookie("first=IamTheFirst ; a=1; a=2 ;second=andMeTwo; ; ")
- ),
- {"first": u"IamTheFirst", "a": u"2", "second": u"andMeTwo"},
- )
+ cookies = http.parse_cookie("spam=ham; duck=mallard; ; ")
+ expect = {"spam": u"ham", "duck": u"mallard"}
+ assert cookies.to_dict() == expect
def test_cookie_quoting(self):
val = http.dump_cookie("foo", "?foo")
- strict_eq(val, 'foo="?foo"; Path=/')
- strict_eq(dict(http.parse_cookie(val)), {"foo": u"?foo", "Path": u"/"})
- strict_eq(dict(http.parse_cookie(r'foo="foo\054bar"')), {"foo": u"foo,bar"})
+ assert val == 'foo="?foo"; Path=/'
+ assert http.parse_cookie(val).to_dict() == {"foo": u"?foo", "Path": u"/"}
+ assert http.parse_cookie(r'foo="foo\054bar"').to_dict(), {"foo": u"foo,bar"}
def test_parse_set_cookie_directive(self):
val = 'foo="?foo"; version="0.1";'
- strict_eq(dict(http.parse_cookie(val)), {"foo": u"?foo", "version": u"0.1"})
+ assert http.parse_cookie(val).to_dict() == {"foo": u"?foo", "version": u"0.1"}
def test_cookie_domain_resolving(self):
val = http.dump_cookie("foo", "bar", domain=u"\N{SNOWMAN}.com")
From 9896e6c1e2146c058423962dc627e87001c86bbd Mon Sep 17 00:00:00 2001
From: Lucas Barsand
Date: Fri, 27 Oct 2017 13:02:27 -0200
Subject: [PATCH 065/733] implement TLS peer authentication
---
src/werkzeug/serving.py | 21 ++++++++++++++++++++-
1 file changed, 20 insertions(+), 1 deletion(-)
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index 9ebd125f9..137060480 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -225,6 +225,17 @@ def shutdown_server():
if request_url.scheme and request_url.netloc:
environ["HTTP_HOST"] = request_url.netloc
+ try:
+ peer_cert = self.connection.getpeercert()
+ if peer_cert is not None:
+ environ["SSL_CLIENT_CERT"] = peer_cert
+ except ValueError:
+ self.server.log("error", "Cannot fetch SSL peer certificate info")
+ except AttributeError:
+ # This error indicates that no TLS setup was made, and it is
+ # raised because socket will not have such function getpeercert()
+ pass
+
return environ
def run_wsgi(self):
@@ -716,17 +727,25 @@ def __init__(
self.server_address = self.socket.getsockname()
if ssl_context is not None:
+ ssl_kwargs = {"server_side": True}
if isinstance(ssl_context, tuple):
ssl_context = load_ssl_context(*ssl_context)
+ if isinstance(ssl_context, dict):
+ cert_file = ssl_context.pop("cert_file")
+ pkey_file = ssl_context.pop("pkey_file")
+ for key in ssl_context:
+ ssl_kwargs[key] = ssl_context[key]
+ ssl_context = load_ssl_context(cert_file, pkey_file)
if ssl_context == "adhoc":
ssl_context = generate_adhoc_ssl_context()
+
# If we are on Python 2 the return value from socket.fromfd
# is an internal socket object but what we need for ssl wrap
# is the wrapper around it :(
sock = self.socket
if PY2 and not isinstance(sock, socket.socket):
sock = socket.socket(sock.family, sock.type, sock.proto, sock)
- self.socket = ssl_context.wrap_socket(sock, server_side=True)
+ self.socket = ssl_context.wrap_socket(sock, **ssl_kwargs)
self.ssl_context = ssl_context
else:
self.ssl_context = None
From 320abb6faed3139abd235e1a7746277cdc6c44b8 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sat, 7 Sep 2019 11:20:45 -0700
Subject: [PATCH 066/733] remove passing ssl_context a dict
---
CHANGES.rst | 4 ++++
src/werkzeug/serving.py | 26 +++++++++++++-------------
2 files changed, 17 insertions(+), 13 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 12da41a96..89fb3d084 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -46,6 +46,10 @@ Unreleased
only match at the primary language tag. :issue:`450`, :pr:`1507`
- ``MIMEAccept`` uses MIME parameters for specificity when matching.
:issue:`458`, :pr:`1574`
+- If the development server is started with an ``SSLContext``
+ configured to verify client certificates, the certificate in PEM
+ format will be available as ``environ["SSL_CLIENT_CERT"]``.
+ :pr:`1469`
- Optional request log highlighting with the development server is
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index 137060480..436fd3a9f 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -226,14 +226,17 @@ def shutdown_server():
environ["HTTP_HOST"] = request_url.netloc
try:
- peer_cert = self.connection.getpeercert()
+ # binary_form=False gives nicer information, but wouldn't be compatible with
+ # what Nginx or Apache could return.
+ peer_cert = self.connection.getpeercert(binary_form=True)
if peer_cert is not None:
- environ["SSL_CLIENT_CERT"] = peer_cert
+ # Nginx and Apache use PEM format.
+ environ["SSL_CLIENT_CERT"] = ssl.DER_cert_to_PEM_cert(peer_cert)
except ValueError:
+ # SSL handshake hasn't finished.
self.server.log("error", "Cannot fetch SSL peer certificate info")
except AttributeError:
- # This error indicates that no TLS setup was made, and it is
- # raised because socket will not have such function getpeercert()
+ # Not using TLS, the socket will not have getpeercert().
pass
return environ
@@ -605,7 +608,11 @@ def load_ssl_context(cert_file, pkey_file=None, protocol=None):
module. Defaults to ``PROTOCOL_SSLv23``.
"""
if protocol is None:
- protocol = ssl.PROTOCOL_SSLv23
+ try:
+ protocol = ssl.PROTOCOL_TLS_SERVER
+ except AttributeError:
+ # Python <= 3.5 compat
+ protocol = ssl.PROTOCOL_SSLv23
ctx = _SSLContext(protocol)
ctx.load_cert_chain(cert_file, pkey_file)
return ctx
@@ -727,15 +734,8 @@ def __init__(
self.server_address = self.socket.getsockname()
if ssl_context is not None:
- ssl_kwargs = {"server_side": True}
if isinstance(ssl_context, tuple):
ssl_context = load_ssl_context(*ssl_context)
- if isinstance(ssl_context, dict):
- cert_file = ssl_context.pop("cert_file")
- pkey_file = ssl_context.pop("pkey_file")
- for key in ssl_context:
- ssl_kwargs[key] = ssl_context[key]
- ssl_context = load_ssl_context(cert_file, pkey_file)
if ssl_context == "adhoc":
ssl_context = generate_adhoc_ssl_context()
@@ -745,7 +745,7 @@ def __init__(
sock = self.socket
if PY2 and not isinstance(sock, socket.socket):
sock = socket.socket(sock.family, sock.type, sock.proto, sock)
- self.socket = ssl_context.wrap_socket(sock, **ssl_kwargs)
+ self.socket = ssl_context.wrap_socket(sock, server_side=True)
self.ssl_context = ssl_context
else:
self.ssl_context = None
From aa022fb1e7df5d0727b2f4099f776997b61f16ce Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Lafr=C3=A9choux?=
Date: Fri, 6 Sep 2019 21:51:58 +0200
Subject: [PATCH 067/733] Update "Test Utilities" doc to Python 3
---
docs/test.rst | 17 +++++++++--------
1 file changed, 9 insertions(+), 8 deletions(-)
diff --git a/docs/test.rst b/docs/test.rst
index c7e213f85..bbeadcdcc 100644
--- a/docs/test.rst
+++ b/docs/test.rst
@@ -32,9 +32,9 @@ in test functionality.
>>> resp.status_code
200
>>> resp.headers
-Headers([('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '8339')])
+Headers([('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '6658')])
>>> resp.data.splitlines()[0]
-'>> status
'200 OK'
>>> headers
-[('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '8339')]
->>> ''.join(app_iter).splitlines()[0]
+[('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '6658')]
+>>> ''.join(i.decode() for i in app_iter).splitlines()[0]
'>> env = builder.get_environ()
The resulting environment is a regular WSGI environment that can be used for
@@ -72,11 +72,11 @@ further processing:
>>> from werkzeug.wrappers import Request
>>> req = Request(env)
>>> req.form['foo']
-u'this is some text'
+'this is some text'
>>> req.files['file']
>>> req.files['file'].read()
-'my file contents'
+b'my file contents'
The :class:`EnvironBuilder` figures out the content type automatically if you
pass a dict to the constructor as `data`. If you provide a string or an
@@ -85,6 +85,7 @@ input stream you have to do that yourself.
By default it will try to use ``application/x-www-form-urlencoded`` and only
use ``multipart/form-data`` if files are uploaded:
+>>> from io import StringIO
>>> builder = EnvironBuilder(method='POST', data={'foo': 'bar'})
>>> builder.content_type
'application/x-www-form-urlencoded'
From 9c0d934bdd59d3653c5b37e235ac4fba251f4148 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Mon, 9 Sep 2019 07:38:49 -0700
Subject: [PATCH 068/733] more test.rst py3 compat
---
docs/test.rst | 11 +++++------
1 file changed, 5 insertions(+), 6 deletions(-)
diff --git a/docs/test.rst b/docs/test.rst
index bbeadcdcc..d726123f3 100644
--- a/docs/test.rst
+++ b/docs/test.rst
@@ -43,9 +43,9 @@ Or without a wrapper defined:
>>> status
'200 OK'
>>> headers
-[('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', '6658')]
->>> ''.join(i.decode() for i in app_iter).splitlines()[0]
-'>> b''.join(app_iter).splitlines()[0]
+b'>> env = builder.get_environ()
The resulting environment is a regular WSGI environment that can be used for
@@ -85,11 +85,10 @@ input stream you have to do that yourself.
By default it will try to use ``application/x-www-form-urlencoded`` and only
use ``multipart/form-data`` if files are uploaded:
->>> from io import StringIO
>>> builder = EnvironBuilder(method='POST', data={'foo': 'bar'})
>>> builder.content_type
'application/x-www-form-urlencoded'
->>> builder.files['foo'] = StringIO('contents')
+>>> builder.files['foo'] = BytesIO('contents'.encode("utf8"))
>>> builder.content_type
'multipart/form-data'
From c43bb51106d0f3d21c5860c830fa34afe11b5dec Mon Sep 17 00:00:00 2001
From: David Lord
Date: Mon, 9 Sep 2019 08:03:31 -0700
Subject: [PATCH 069/733] is_resource_modified works for all methods
---
CHANGES.rst | 2 ++
src/werkzeug/http.py | 5 +++--
tests/test_http.py | 4 ++--
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 89fb3d084..2d37eaa20 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -50,6 +50,8 @@ Unreleased
configured to verify client certificates, the certificate in PEM
format will be available as ``environ["SSL_CLIENT_CERT"]``.
:pr:`1469`
+- ``is_resource_modified`` will run for methods other than ``GET`` and
+ ``HEAD``, rather than always returning ``False``. :issue:`409`
- Optional request log highlighting with the development server is
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index d5490bccf..7fc6bd75d 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -967,13 +967,14 @@ def is_resource_modified(
:param ignore_if_range: If `False`, `If-Range` header will be taken into
account.
:return: `True` if the resource was modified, otherwise `False`.
+
+ .. versionchanged:: 1.0.0
+ The check is run for methods other than ``GET`` and ``HEAD``.
"""
if etag is None and data is not None:
etag = generate_etag(data)
elif data is not None:
raise TypeError("both data and etag given")
- if environ["REQUEST_METHOD"] not in ("GET", "HEAD"):
- return False
unmodified = False
if isinstance(last_modified, string_types):
diff --git a/tests/test_http.py b/tests/test_http.py
index 61386b34c..86359609b 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -386,9 +386,9 @@ def test_dump_header(self):
def test_is_resource_modified(self):
env = create_environ()
- # ignore POST
+ # any method is allowed
env["REQUEST_METHOD"] = "POST"
- assert not http.is_resource_modified(env, etag="testing")
+ assert http.is_resource_modified(env, etag="testing")
env["REQUEST_METHOD"] = "GET"
# etagify from data
From 487ab5846932cf15d688451a3d61014981cd6e1e Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 12 Sep 2019 10:32:56 -0700
Subject: [PATCH 070/733] remove lazy importer, fix circular imports
---
src/werkzeug/__init__.py | 360 ++++++++++++---------------------
src/werkzeug/datastructures.py | 2 +-
src/werkzeug/exceptions.py | 16 +-
src/werkzeug/formparser.py | 4 +-
src/werkzeug/http.py | 5 +-
src/werkzeug/urls.py | 8 +-
6 files changed, 150 insertions(+), 245 deletions(-)
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index 7b8e14e5c..e16fbdcc9 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -1,233 +1,141 @@
-# -*- coding: utf-8 -*-
"""
- werkzeug
- ~~~~~~~~
+werkzeug
+~~~~~~~~
- Werkzeug is the Swiss Army knife of Python web development.
+Werkzeug is the Swiss Army knife of Python web development.
- It provides useful classes and functions for any WSGI application to make
- the life of a python web developer much easier. All of the provided
- classes are independent from each other so you can mix it with any other
- library.
+It provides useful classes and functions for any WSGI application to
+make the life of a python web developer much easier. All of the provided
+classes are independent from each other so you can mix it with any other
+library.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
+:copyright: 2007 Pallets
+:license: BSD-3-Clause
"""
-import sys
-from types import ModuleType
+from . import exceptions
+from . import routing
+from ._internal import _easteregg
+from .datastructures import Accept
+from .datastructures import Authorization
+from .datastructures import CallbackDict
+from .datastructures import CharsetAccept
+from .datastructures import CombinedMultiDict
+from .datastructures import EnvironHeaders
+from .datastructures import ETags
+from .datastructures import FileMultiDict
+from .datastructures import FileStorage
+from .datastructures import Headers
+from .datastructures import HeaderSet
+from .datastructures import ImmutableDict
+from .datastructures import ImmutableList
+from .datastructures import ImmutableMultiDict
+from .datastructures import ImmutableOrderedMultiDict
+from .datastructures import ImmutableTypeConversionDict
+from .datastructures import LanguageAccept
+from .datastructures import MIMEAccept
+from .datastructures import MultiDict
+from .datastructures import OrderedMultiDict
+from .datastructures import RequestCacheControl
+from .datastructures import ResponseCacheControl
+from .datastructures import TypeConversionDict
+from .datastructures import WWWAuthenticate
+from .debug import DebuggedApplication
+from .exceptions import abort
+from .exceptions import Aborter
+from .formparser import parse_form_data
+from .http import cookie_date
+from .http import dump_cookie
+from .http import dump_header
+from .http import dump_options_header
+from .http import generate_etag
+from .http import http_date
+from .http import HTTP_STATUS_CODES
+from .http import is_entity_header
+from .http import is_hop_by_hop_header
+from .http import is_resource_modified
+from .http import parse_accept_header
+from .http import parse_authorization_header
+from .http import parse_cache_control_header
+from .http import parse_cookie
+from .http import parse_date
+from .http import parse_dict_header
+from .http import parse_etags
+from .http import parse_list_header
+from .http import parse_options_header
+from .http import parse_set_header
+from .http import parse_www_authenticate_header
+from .http import quote_etag
+from .http import quote_header_value
+from .http import remove_entity_headers
+from .http import remove_hop_by_hop_headers
+from .http import unquote_etag
+from .http import unquote_header_value
+from .local import Local
+from .local import LocalManager
+from .local import LocalProxy
+from .local import LocalStack
+from .local import release_local
+from .middleware.dispatcher import DispatcherMiddleware
+from .middleware.shared_data import SharedDataMiddleware
+from .security import check_password_hash
+from .security import generate_password_hash
+from .serving import run_simple
+from .test import Client
+from .test import create_environ
+from .test import EnvironBuilder
+from .test import run_wsgi_app
+from .testapp import test_app
+from .urls import Href
+from .urls import iri_to_uri
+from .urls import uri_to_iri
+from .urls import url_decode
+from .urls import url_encode
+from .urls import url_fix
+from .urls import url_quote
+from .urls import url_quote_plus
+from .urls import url_unquote
+from .urls import url_unquote_plus
+from .useragents import UserAgent
+from .utils import append_slash_redirect
+from .utils import ArgumentValidationError
+from .utils import bind_arguments
+from .utils import cached_property
+from .utils import environ_property
+from .utils import escape
+from .utils import find_modules
+from .utils import format_string
+from .utils import header_property
+from .utils import html
+from .utils import HTMLBuilder
+from .utils import import_string
+from .utils import redirect
+from .utils import secure_filename
+from .utils import unescape
+from .utils import validate_arguments
+from .utils import xhtml
+from .wrappers import AcceptMixin
+from .wrappers import AuthorizationMixin
+from .wrappers import BaseRequest
+from .wrappers import BaseResponse
+from .wrappers import CommonRequestDescriptorsMixin
+from .wrappers import CommonResponseDescriptorsMixin
+from .wrappers import ETagRequestMixin
+from .wrappers import ETagResponseMixin
+from .wrappers import Request
+from .wrappers import Response
+from .wrappers import ResponseStreamMixin
+from .wrappers import UserAgentMixin
+from .wrappers import WWWAuthenticateMixin
+from .wsgi import ClosingIterator
+from .wsgi import extract_path_info
+from .wsgi import FileWrapper
+from .wsgi import get_current_url
+from .wsgi import get_host
+from .wsgi import LimitedStream
+from .wsgi import make_line_iter
+from .wsgi import peek_path_info
+from .wsgi import pop_path_info
+from .wsgi import responder
+from .wsgi import wrap_file
__version__ = "1.0.0.dev0"
-
-# This import magic raises concerns quite often which is why the implementation
-# and motivation is explained here in detail now.
-#
-# The majority of the functions and classes provided by Werkzeug work on the
-# HTTP and WSGI layer. There is no useful grouping for those which is why
-# they are all importable from "werkzeug" instead of the modules where they are
-# implemented. The downside of that is, that now everything would be loaded at
-# once, even if unused.
-#
-# The implementation of a lazy-loading module in this file replaces the
-# werkzeug package when imported from within. Attribute access to the werkzeug
-# module will then lazily import from the modules that implement the objects.
-
-# import mapping to objects in other modules
-all_by_module = {
- "werkzeug.debug": ["DebuggedApplication"],
- "werkzeug.local": [
- "Local",
- "LocalManager",
- "LocalProxy",
- "LocalStack",
- "release_local",
- ],
- "werkzeug.serving": ["run_simple"],
- "werkzeug.test": ["Client", "EnvironBuilder", "create_environ", "run_wsgi_app"],
- "werkzeug.testapp": ["test_app"],
- "werkzeug.exceptions": ["abort", "Aborter"],
- "werkzeug.urls": [
- "url_decode",
- "url_encode",
- "url_quote",
- "url_quote_plus",
- "url_unquote",
- "url_unquote_plus",
- "url_fix",
- "Href",
- "iri_to_uri",
- "uri_to_iri",
- ],
- "werkzeug.formparser": ["parse_form_data"],
- "werkzeug.utils": [
- "escape",
- "environ_property",
- "append_slash_redirect",
- "redirect",
- "cached_property",
- "import_string",
- "unescape",
- "format_string",
- "find_modules",
- "header_property",
- "html",
- "xhtml",
- "HTMLBuilder",
- "validate_arguments",
- "ArgumentValidationError",
- "bind_arguments",
- "secure_filename",
- ],
- "werkzeug.wsgi": [
- "get_current_url",
- "get_host",
- "pop_path_info",
- "peek_path_info",
- "ClosingIterator",
- "FileWrapper",
- "make_line_iter",
- "LimitedStream",
- "responder",
- "wrap_file",
- "extract_path_info",
- ],
- "werkzeug.datastructures": [
- "MultiDict",
- "CombinedMultiDict",
- "Headers",
- "EnvironHeaders",
- "ImmutableList",
- "ImmutableDict",
- "ImmutableMultiDict",
- "TypeConversionDict",
- "ImmutableTypeConversionDict",
- "Accept",
- "MIMEAccept",
- "CharsetAccept",
- "LanguageAccept",
- "RequestCacheControl",
- "ResponseCacheControl",
- "ETags",
- "HeaderSet",
- "WWWAuthenticate",
- "Authorization",
- "FileMultiDict",
- "CallbackDict",
- "FileStorage",
- "OrderedMultiDict",
- "ImmutableOrderedMultiDict",
- ],
- "werkzeug.useragents": ["UserAgent"],
- "werkzeug.http": [
- "parse_etags",
- "parse_date",
- "http_date",
- "cookie_date",
- "parse_cache_control_header",
- "is_resource_modified",
- "parse_accept_header",
- "parse_set_header",
- "quote_etag",
- "unquote_etag",
- "generate_etag",
- "dump_header",
- "parse_list_header",
- "parse_dict_header",
- "parse_authorization_header",
- "parse_www_authenticate_header",
- "remove_entity_headers",
- "is_entity_header",
- "remove_hop_by_hop_headers",
- "parse_options_header",
- "dump_options_header",
- "is_hop_by_hop_header",
- "unquote_header_value",
- "quote_header_value",
- "HTTP_STATUS_CODES",
- "dump_cookie",
- "parse_cookie",
- ],
- "werkzeug.wrappers": [
- "BaseResponse",
- "BaseRequest",
- "Request",
- "Response",
- "AcceptMixin",
- "ETagRequestMixin",
- "ETagResponseMixin",
- "ResponseStreamMixin",
- "CommonResponseDescriptorsMixin",
- "UserAgentMixin",
- "AuthorizationMixin",
- "WWWAuthenticateMixin",
- "CommonRequestDescriptorsMixin",
- ],
- "werkzeug.middleware.dispatcher": ["DispatcherMiddleware"],
- "werkzeug.middleware.shared_data": ["SharedDataMiddleware"],
- "werkzeug.security": ["generate_password_hash", "check_password_hash"],
- # the undocumented easteregg ;-)
- "werkzeug._internal": ["_easteregg"],
-}
-
-# modules that should be imported when accessed as attributes of werkzeug
-attribute_modules = frozenset(["exceptions", "routing"])
-
-object_origins = {}
-for module, items in all_by_module.items():
- for item in items:
- object_origins[item] = module
-
-
-class module(ModuleType):
- """Automatically import objects from the modules."""
-
- def __getattr__(self, name):
- if name in object_origins:
- module = __import__(object_origins[name], None, None, [name])
- for extra_name in all_by_module[module.__name__]:
- setattr(self, extra_name, getattr(module, extra_name))
- return getattr(module, name)
- elif name in attribute_modules:
- __import__("werkzeug." + name)
- return ModuleType.__getattribute__(self, name)
-
- def __dir__(self):
- """Just show what we want to show."""
- result = list(new_module.__all__)
- result.extend(
- (
- "__file__",
- "__doc__",
- "__all__",
- "__docformat__",
- "__name__",
- "__path__",
- "__package__",
- "__version__",
- )
- )
- return result
-
-
-# keep a reference to this module so that it's not garbage collected
-old_module = sys.modules["werkzeug"]
-
-
-# setup the new module and patch it into the dict of loaded modules
-new_module = sys.modules["werkzeug"] = module("werkzeug")
-new_module.__dict__.update(
- {
- "__file__": __file__,
- "__package__": "werkzeug",
- "__path__": __path__,
- "__doc__": __doc__,
- "__version__": __version__,
- "__all__": tuple(object_origins) + tuple(attribute_modules),
- "__docformat__": "restructuredtext en",
- }
-)
-
-
-# Due to bootstrapping issues we need to import exceptions here.
-# Don't ask :-(
-__import__("werkzeug.exceptions")
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index f22d38de4..2286f8dd4 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -14,6 +14,7 @@
from copy import deepcopy
from itertools import repeat
+from . import exceptions
from ._compat import BytesIO
from ._compat import collections_abc
from ._compat import integer_types
@@ -2995,7 +2996,6 @@ def __repr__(self):
# circular dependencies
-from . import exceptions
from .http import dump_csp_header
from .http import dump_header
from .http import dump_options_header
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index 26d03c6ae..c337830d4 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -59,18 +59,12 @@ def application(environ, start_response):
"""
import sys
-import werkzeug
-
-# Because of bootstrapping reasons we need to manually patch ourselves
-# onto our parent module.
-werkzeug.exceptions = sys.modules[__name__]
-
from ._compat import implements_to_string
from ._compat import integer_types
from ._compat import iteritems
from ._compat import text_type
from ._internal import _get_environ
-from .wrappers import Response
+from .utils import escape
@implements_to_string
@@ -141,6 +135,8 @@ def description(self, value):
@property
def name(self):
"""The status name."""
+ from .http import HTTP_STATUS_CODES
+
return HTTP_STATUS_CODES.get(self.code, "Unknown Error")
def get_description(self, environ=None):
@@ -176,6 +172,8 @@ def get_response(self, environ=None):
on how the request looked like.
:return: a :class:`Response` object or a subclass thereof.
"""
+ from .wrappers.response import Response
+
if self.response is not None:
return self.response
if environ is not None:
@@ -792,7 +790,3 @@ def abort(status, *args, **kwargs):
#: An exception that is used to signal both a :exc:`KeyError` and a
#: :exc:`BadRequest`. Used by many of the datastructures.
BadRequestKeyError = BadRequest.wrap(KeyError)
-
-# imported here because of circular dependencies of werkzeug.utils
-from .http import HTTP_STATUS_CODES
-from .utils import escape
diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py
index 0ddc5c8ff..ffdb9b0f1 100644
--- a/src/werkzeug/formparser.py
+++ b/src/werkzeug/formparser.py
@@ -16,6 +16,7 @@
from itertools import repeat
from itertools import tee
+from . import exceptions
from ._compat import BytesIO
from ._compat import text_type
from ._compat import to_native
@@ -581,6 +582,3 @@ def parse(self, file, boundary, content_length):
form = (p[1] for p in formstream if p[0] == "form")
files = (p[1] for p in filestream if p[0] == "file")
return self.cls(form), self.cls(files)
-
-
-from . import exceptions
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index 7fc6bd75d..56dbb0fe5 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -1189,6 +1189,8 @@ def dump_cookie(
value = to_bytes(value, charset)
if path is not None:
+ from .urls import iri_to_uri
+
path = iri_to_uri(path, charset)
domain = _make_cookie_domain(domain)
if isinstance(max_age, timedelta):
@@ -1282,7 +1284,7 @@ def is_byte_range_valid(start, stop, length):
return 0 <= start < length
-# circular dependency fun
+# circular dependencies
from .datastructures import Accept
from .datastructures import Authorization
from .datastructures import ContentRange
@@ -1294,4 +1296,3 @@ def is_byte_range_valid(start, stop, length):
from .datastructures import Range
from .datastructures import RequestCacheControl
from .datastructures import WWWAuthenticate
-from .urls import iri_to_uri
diff --git a/src/werkzeug/urls.py b/src/werkzeug/urls.py
index 4a1146fb1..d5e487b3a 100644
--- a/src/werkzeug/urls.py
+++ b/src/werkzeug/urls.py
@@ -31,8 +31,6 @@
from ._compat import try_coerce_native
from ._internal import _decode_idna
from ._internal import _encode_idna
-from .datastructures import iter_multi_items
-from .datastructures import MultiDict
# A regular expression for what a valid schema looks like
_scheme_re = re.compile(r"^[a-zA-Z0-9+-.]+$")
@@ -415,6 +413,8 @@ def _unquote_to_bytes(string, unsafe=""):
def _url_encode_impl(obj, charset, encode_keys, sort, key):
+ from .datastructures import iter_multi_items
+
iterable = iter_multi_items(obj)
if sort:
iterable = sorted(iterable, key=key)
@@ -825,6 +825,8 @@ def url_decode(
or `None` the default :class:`MultiDict` is used.
"""
if cls is None:
+ from .datastructures import MultiDict
+
cls = MultiDict
if isinstance(s, text_type) and not isinstance(separator, text_type):
separator = separator.decode(charset or "ascii")
@@ -884,6 +886,8 @@ def url_decode_stream(
return decoder
if cls is None:
+ from .datastructures import MultiDict
+
cls = MultiDict
return cls(decoder)
From 08536c457c7125c05e0947e62487fbc4bcf51717 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sat, 14 Sep 2019 10:08:54 -0700
Subject: [PATCH 071/733] deprecate top-level imports
---
CHANGES.rst | 8 +
src/werkzeug/__init__.py | 320 ++++++++++++++++++++++++---------------
2 files changed, 206 insertions(+), 122 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 2d37eaa20..b71c28790 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -8,6 +8,14 @@ Unreleased
- Drop support for Python 3.4. (:issue:`1478`)
- Remove code that issued deprecation warnings in version 0.15.
(:issue:`1477`)
+- Deprecate most top-level attributes provided by the ``werkzeug``
+ module in favor of direct imports. For example, instead of
+ ``import werkzeug; werkzeug.url_quote``, do
+ ``from werkzeug.urls import url_quote. A deprecation warning will
+ show the correct import to use. ``werkzeug.exceptions`` and
+ ``werkzeug.routing`` should also be imported instead of accessed,
+ but for technical reasons can't show a warning.
+ :issue:`2`, :pr:`1640`
- Added ``utils.invalidate_cached_property()`` to invalidate cached
properties. (:pr:`1474`)
- Directive keys for the ``Set-Cookie`` response header are not
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index e16fbdcc9..b67e90f16 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -5,137 +5,213 @@
Werkzeug is the Swiss Army knife of Python web development.
It provides useful classes and functions for any WSGI application to
-make the life of a python web developer much easier. All of the provided
+make the life of a Python web developer much easier. All of the provided
classes are independent from each other so you can mix it with any other
library.
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
-from . import exceptions
-from . import routing
-from ._internal import _easteregg
-from .datastructures import Accept
-from .datastructures import Authorization
-from .datastructures import CallbackDict
-from .datastructures import CharsetAccept
-from .datastructures import CombinedMultiDict
-from .datastructures import EnvironHeaders
-from .datastructures import ETags
-from .datastructures import FileMultiDict
-from .datastructures import FileStorage
-from .datastructures import Headers
-from .datastructures import HeaderSet
-from .datastructures import ImmutableDict
-from .datastructures import ImmutableList
-from .datastructures import ImmutableMultiDict
-from .datastructures import ImmutableOrderedMultiDict
-from .datastructures import ImmutableTypeConversionDict
-from .datastructures import LanguageAccept
-from .datastructures import MIMEAccept
-from .datastructures import MultiDict
-from .datastructures import OrderedMultiDict
-from .datastructures import RequestCacheControl
-from .datastructures import ResponseCacheControl
-from .datastructures import TypeConversionDict
-from .datastructures import WWWAuthenticate
-from .debug import DebuggedApplication
-from .exceptions import abort
-from .exceptions import Aborter
-from .formparser import parse_form_data
-from .http import cookie_date
-from .http import dump_cookie
-from .http import dump_header
-from .http import dump_options_header
-from .http import generate_etag
-from .http import http_date
-from .http import HTTP_STATUS_CODES
-from .http import is_entity_header
-from .http import is_hop_by_hop_header
-from .http import is_resource_modified
-from .http import parse_accept_header
-from .http import parse_authorization_header
-from .http import parse_cache_control_header
-from .http import parse_cookie
-from .http import parse_date
-from .http import parse_dict_header
-from .http import parse_etags
-from .http import parse_list_header
-from .http import parse_options_header
-from .http import parse_set_header
-from .http import parse_www_authenticate_header
-from .http import quote_etag
-from .http import quote_header_value
-from .http import remove_entity_headers
-from .http import remove_hop_by_hop_headers
-from .http import unquote_etag
-from .http import unquote_header_value
-from .local import Local
-from .local import LocalManager
-from .local import LocalProxy
-from .local import LocalStack
-from .local import release_local
-from .middleware.dispatcher import DispatcherMiddleware
-from .middleware.shared_data import SharedDataMiddleware
-from .security import check_password_hash
-from .security import generate_password_hash
+import sys
+from types import ModuleType
+
from .serving import run_simple
from .test import Client
-from .test import create_environ
-from .test import EnvironBuilder
-from .test import run_wsgi_app
-from .testapp import test_app
-from .urls import Href
-from .urls import iri_to_uri
-from .urls import uri_to_iri
-from .urls import url_decode
-from .urls import url_encode
-from .urls import url_fix
-from .urls import url_quote
-from .urls import url_quote_plus
-from .urls import url_unquote
-from .urls import url_unquote_plus
-from .useragents import UserAgent
-from .utils import append_slash_redirect
-from .utils import ArgumentValidationError
-from .utils import bind_arguments
-from .utils import cached_property
-from .utils import environ_property
-from .utils import escape
-from .utils import find_modules
-from .utils import format_string
-from .utils import header_property
-from .utils import html
-from .utils import HTMLBuilder
-from .utils import import_string
-from .utils import redirect
-from .utils import secure_filename
-from .utils import unescape
-from .utils import validate_arguments
-from .utils import xhtml
-from .wrappers import AcceptMixin
-from .wrappers import AuthorizationMixin
-from .wrappers import BaseRequest
-from .wrappers import BaseResponse
-from .wrappers import CommonRequestDescriptorsMixin
-from .wrappers import CommonResponseDescriptorsMixin
-from .wrappers import ETagRequestMixin
-from .wrappers import ETagResponseMixin
from .wrappers import Request
from .wrappers import Response
-from .wrappers import ResponseStreamMixin
-from .wrappers import UserAgentMixin
-from .wrappers import WWWAuthenticateMixin
-from .wsgi import ClosingIterator
-from .wsgi import extract_path_info
-from .wsgi import FileWrapper
-from .wsgi import get_current_url
-from .wsgi import get_host
-from .wsgi import LimitedStream
-from .wsgi import make_line_iter
-from .wsgi import peek_path_info
-from .wsgi import pop_path_info
-from .wsgi import responder
-from .wsgi import wrap_file
__version__ = "1.0.0.dev0"
+
+__all__ = ["run_simple", "Client", "Request", "Response", "__version__"]
+
+
+class DeprecatedImportModule(ModuleType):
+ """Wrap a module in order to raise """
+
+ def __init__(self, name, available, removed_in):
+ super(DeprecatedImportModule, self).__init__(name) # noqa F821
+ self._real_module = sys.modules[name] # noqa F821
+ self._removed_in = removed_in
+ self._origin = {item: mod for mod, items in available.items() for item in items}
+ self.__all__ = sorted(self._real_module.__all__ + list(self._origin))
+
+ def __getattr__(self, item):
+ # Don't export internal variables.
+ if item in {"_real_module", "_origin", "_removed_in"}:
+ raise AttributeError(item)
+
+ if item in self._origin:
+ from importlib import import_module
+
+ origin = self._origin[item]
+
+ if origin == ".":
+ # No warning for the "submodule as attribute" case, it's way too messy
+ # and unreliable to try to distinguish 'from werkzueug import
+ # exceptions' and 'import werkzeug; werkzeug.exceptions'.
+ value = import_module(origin + item, self.__name__)
+ else:
+ from warnings import warn
+
+ # Import the module, get the attribute, and show a warning about where
+ # to correctly import it from.
+ mod = import_module(origin, self.__name__)
+ value = getattr(mod, item)
+ warn(
+ "The top-level '{name}.{item}' is deprecated and will be removed in"
+ " {removed_in}. Use 'from {name}{origin} import {item}'"
+ " instead.".format(
+ name=self.__name__,
+ item=item,
+ removed_in=self._removed_in,
+ origin=origin,
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ value = getattr(self._real_module, item)
+
+ # Cache the value so it won't go through this process on subsequent accesses.
+ setattr(self, item, value)
+ return value
+
+ def __dir__(self):
+ return sorted(dir(self._real_module) + list(self._origin))
+
+
+sys.modules["werkzeug"] = DeprecatedImportModule(
+ "werkzeug",
+ {
+ ".": ["exceptions", "routing"],
+ "._internal": ["_easteregg"],
+ ".datastructures": [
+ "Accept",
+ "Authorization",
+ "CallbackDict",
+ "CharsetAccept",
+ "CombinedMultiDict",
+ "EnvironHeaders",
+ "ETags",
+ "FileMultiDict",
+ "FileStorage",
+ "Headers",
+ "HeaderSet",
+ "ImmutableDict",
+ "ImmutableList",
+ "ImmutableMultiDict",
+ "ImmutableOrderedMultiDict",
+ "ImmutableTypeConversionDict",
+ "LanguageAccept",
+ "MIMEAccept",
+ "MultiDict",
+ "OrderedMultiDict",
+ "RequestCacheControl",
+ "ResponseCacheControl",
+ "TypeConversionDict",
+ "WWWAuthenticate",
+ ],
+ ".debug": ["DebuggedApplication"],
+ ".exceptions": ["abort", "Aborter"],
+ ".formparser": ["parse_form_data"],
+ ".http": [
+ "cookie_date",
+ "dump_cookie",
+ "dump_header",
+ "dump_options_header",
+ "generate_etag",
+ "http_date",
+ "HTTP_STATUS_CODES",
+ "is_entity_header",
+ "is_hop_by_hop_header",
+ "is_resource_modified",
+ "parse_accept_header",
+ "parse_authorization_header",
+ "parse_cache_control_header",
+ "parse_cookie",
+ "parse_date",
+ "parse_dict_header",
+ "parse_etags",
+ "parse_list_header",
+ "parse_options_header",
+ "parse_set_header",
+ "parse_www_authenticate_header",
+ "quote_etag",
+ "quote_header_value",
+ "remove_entity_headers",
+ "remove_hop_by_hop_headers",
+ "unquote_etag",
+ "unquote_header_value",
+ ],
+ ".local": [
+ "Local",
+ "LocalManager",
+ "LocalProxy",
+ "LocalStack",
+ "release_local",
+ ],
+ ".middleware.dispatcher": ["DispatcherMiddleware"],
+ ".middleware.shared_data": ["SharedDataMiddleware"],
+ ".security": ["check_password_hash", "generate_password_hash"],
+ ".test": ["create_environ", "EnvironBuilder", "run_wsgi_app"],
+ ".testapp": ["test_app"],
+ ".urls": [
+ "Href",
+ "iri_to_uri",
+ "uri_to_iri",
+ "url_decode",
+ "url_encode",
+ "url_fix",
+ "url_quote",
+ "url_quote_plus",
+ "url_unquote",
+ "url_unquote_plus",
+ ],
+ ".useragents": ["UserAgent"],
+ ".utils": [
+ "append_slash_redirect",
+ "ArgumentValidationError",
+ "bind_arguments",
+ "cached_property",
+ "environ_property",
+ "escape",
+ "find_modules",
+ "format_string",
+ "header_property",
+ "html",
+ "HTMLBuilder",
+ "import_string",
+ "redirect",
+ "secure_filename",
+ "unescape",
+ "validate_arguments",
+ "xhtml",
+ ],
+ ".wrappers.accept": ["AcceptMixin"],
+ ".wrappers.auth": ["AuthorizationMixin", "WWWAuthenticateMixin"],
+ ".wrappers.base_request": ["BaseRequest"],
+ ".wrappers.base_response": ["BaseResponse"],
+ ".wrappers.common_descriptors": [
+ "CommonRequestDescriptorsMixin",
+ "CommonResponseDescriptorsMixin",
+ ],
+ ".wrappers.etag": ["ETagRequestMixin", "ETagResponseMixin"],
+ ".wrappers.response": ["ResponseStreamMixin"],
+ ".wrappers.user_agent": ["UserAgentMixin"],
+ ".wsgi": [
+ "ClosingIterator",
+ "extract_path_info",
+ "FileWrapper",
+ "get_current_url",
+ "get_host",
+ "LimitedStream",
+ "make_line_iter",
+ "peek_path_info",
+ "pop_path_info",
+ "responder",
+ "wrap_file",
+ ],
+ },
+ "Werkzeug 2.0",
+)
+del sys, ModuleType, DeprecatedImportModule
From 54939182e9f207b420fe5f676ac58456e2cad2c8 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 12 Sep 2019 10:32:56 -0700
Subject: [PATCH 072/733] remove lazy importer, fix circular imports
(cherry picked from commit 487ab5846932cf15d688451a3d61014981cd6e1e)
deprecate top-level imports
(cherry picked from commit 08536c457c7125c05e0947e62487fbc4bcf51717)
---
CHANGES.rst | 18 ++
src/werkzeug/__init__.py | 430 ++++++++++++++--------------
src/werkzeug/contrib/sessions.py | 4 +-
src/werkzeug/datastructures.py | 2 +-
src/werkzeug/exceptions.py | 17 +-
src/werkzeug/formparser.py | 4 +-
src/werkzeug/http.py | 64 +----
src/werkzeug/serving.py | 5 +-
src/werkzeug/testapp.py | 4 +-
src/werkzeug/urls.py | 8 +-
src/werkzeug/useragents.py | 20 +-
src/werkzeug/utils.py | 94 ++----
src/werkzeug/wrappers/user_agent.py | 3 +-
src/werkzeug/wsgi.py | 74 +----
tests/contrib/test_securecookie.py | 2 +-
tests/test_compat.py | 40 ---
16 files changed, 290 insertions(+), 499 deletions(-)
delete mode 100644 tests/test_compat.py
diff --git a/CHANGES.rst b/CHANGES.rst
index 7c2c9076b..f0ad588af 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,5 +1,23 @@
.. currentmodule:: werkzeug
+Version 0.16.0
+--------------
+
+Unreleased
+
+- Deprecate most top-level attributes provided by the ``werkzeug``
+ module in favor of direct imports. The deprecated imports will be
+ removed in version 1.0.
+
+ For example, instead of ``import werkzeug; werkzeug.url_quote``, do
+ ``from werkzeug.urls import url_quote. A deprecation warning will
+ show the correct import to use. ``werkzeug.exceptions`` and
+ ``werkzeug.routing`` should also be imported instead of accessed,
+ but for technical reasons can't show a warning.
+
+ :issue:`2`, :pr:`1640`
+
+
Version 0.15.6
--------------
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index 2ce1a3cb0..49907e93c 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -1,233 +1,221 @@
-# -*- coding: utf-8 -*-
"""
- werkzeug
- ~~~~~~~~
+werkzeug
+~~~~~~~~
- Werkzeug is the Swiss Army knife of Python web development.
+Werkzeug is the Swiss Army knife of Python web development.
- It provides useful classes and functions for any WSGI application to make
- the life of a python web developer much easier. All of the provided
- classes are independent from each other so you can mix it with any other
- library.
+It provides useful classes and functions for any WSGI application to
+make the life of a Python web developer much easier. All of the provided
+classes are independent from each other so you can mix it with any other
+library.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
+:copyright: 2007 Pallets
+:license: BSD-3-Clause
"""
-import sys
from types import ModuleType
-__version__ = "0.15.6"
-
-# This import magic raises concerns quite often which is why the implementation
-# and motivation is explained here in detail now.
-#
-# The majority of the functions and classes provided by Werkzeug work on the
-# HTTP and WSGI layer. There is no useful grouping for those which is why
-# they are all importable from "werkzeug" instead of the modules where they are
-# implemented. The downside of that is, that now everything would be loaded at
-# once, even if unused.
-#
-# The implementation of a lazy-loading module in this file replaces the
-# werkzeug package when imported from within. Attribute access to the werkzeug
-# module will then lazily import from the modules that implement the objects.
-
-# import mapping to objects in other modules
-all_by_module = {
- "werkzeug.debug": ["DebuggedApplication"],
- "werkzeug.local": [
- "Local",
- "LocalManager",
- "LocalProxy",
- "LocalStack",
- "release_local",
- ],
- "werkzeug.serving": ["run_simple"],
- "werkzeug.test": ["Client", "EnvironBuilder", "create_environ", "run_wsgi_app"],
- "werkzeug.testapp": ["test_app"],
- "werkzeug.exceptions": ["abort", "Aborter"],
- "werkzeug.urls": [
- "url_decode",
- "url_encode",
- "url_quote",
- "url_quote_plus",
- "url_unquote",
- "url_unquote_plus",
- "url_fix",
- "Href",
- "iri_to_uri",
- "uri_to_iri",
- ],
- "werkzeug.formparser": ["parse_form_data"],
- "werkzeug.utils": [
- "escape",
- "environ_property",
- "append_slash_redirect",
- "redirect",
- "cached_property",
- "import_string",
- "unescape",
- "format_string",
- "find_modules",
- "header_property",
- "html",
- "xhtml",
- "HTMLBuilder",
- "validate_arguments",
- "ArgumentValidationError",
- "bind_arguments",
- "secure_filename",
- ],
- "werkzeug.wsgi": [
- "get_current_url",
- "get_host",
- "pop_path_info",
- "peek_path_info",
- "ClosingIterator",
- "FileWrapper",
- "make_line_iter",
- "LimitedStream",
- "responder",
- "wrap_file",
- "extract_path_info",
- ],
- "werkzeug.datastructures": [
- "MultiDict",
- "CombinedMultiDict",
- "Headers",
- "EnvironHeaders",
- "ImmutableList",
- "ImmutableDict",
- "ImmutableMultiDict",
- "TypeConversionDict",
- "ImmutableTypeConversionDict",
- "Accept",
- "MIMEAccept",
- "CharsetAccept",
- "LanguageAccept",
- "RequestCacheControl",
- "ResponseCacheControl",
- "ETags",
- "HeaderSet",
- "WWWAuthenticate",
- "Authorization",
- "FileMultiDict",
- "CallbackDict",
- "FileStorage",
- "OrderedMultiDict",
- "ImmutableOrderedMultiDict",
- ],
- "werkzeug.useragents": ["UserAgent"],
- "werkzeug.http": [
- "parse_etags",
- "parse_date",
- "http_date",
- "cookie_date",
- "parse_cache_control_header",
- "is_resource_modified",
- "parse_accept_header",
- "parse_set_header",
- "quote_etag",
- "unquote_etag",
- "generate_etag",
- "dump_header",
- "parse_list_header",
- "parse_dict_header",
- "parse_authorization_header",
- "parse_www_authenticate_header",
- "remove_entity_headers",
- "is_entity_header",
- "remove_hop_by_hop_headers",
- "parse_options_header",
- "dump_options_header",
- "is_hop_by_hop_header",
- "unquote_header_value",
- "quote_header_value",
- "HTTP_STATUS_CODES",
- "dump_cookie",
- "parse_cookie",
- ],
- "werkzeug.wrappers": [
- "BaseResponse",
- "BaseRequest",
- "Request",
- "Response",
- "AcceptMixin",
- "ETagRequestMixin",
- "ETagResponseMixin",
- "ResponseStreamMixin",
- "CommonResponseDescriptorsMixin",
- "UserAgentMixin",
- "AuthorizationMixin",
- "WWWAuthenticateMixin",
- "CommonRequestDescriptorsMixin",
- ],
- "werkzeug.middleware.dispatcher": ["DispatcherMiddleware"],
- "werkzeug.middleware.shared_data": ["SharedDataMiddleware"],
- "werkzeug.security": ["generate_password_hash", "check_password_hash"],
- # the undocumented easteregg ;-)
- "werkzeug._internal": ["_easteregg"],
-}
-
-# modules that should be imported when accessed as attributes of werkzeug
-attribute_modules = frozenset(["exceptions", "routing"])
-
-object_origins = {}
-for module, items in all_by_module.items():
- for item in items:
- object_origins[item] = module
-
-
-class module(ModuleType):
- """Automatically import objects from the modules."""
-
- def __getattr__(self, name):
- if name in object_origins:
- module = __import__(object_origins[name], None, None, [name])
- for extra_name in all_by_module[module.__name__]:
- setattr(self, extra_name, getattr(module, extra_name))
- return getattr(module, name)
- elif name in attribute_modules:
- __import__("werkzeug." + name)
- return ModuleType.__getattribute__(self, name)
+__version__ = "0.16.0.dev0"
+
+__all__ = ["run_simple", "Client", "Request", "Response", "__version__"]
+
+
+class _DeprecatedImportModule(ModuleType):
+ """Wrap a module in order to raise """
+
+ def __init__(self, name, available, removed_in):
+ import sys
+
+ super(_DeprecatedImportModule, self).__init__(name) # noqa F821
+ self._real_module = sys.modules[name] # noqa F821
+ sys.modules[name] = self
+ self._removed_in = removed_in
+ self._origin = {item: mod for mod, items in available.items() for item in items}
+ mod_all = getattr(self._real_module, "__all__", dir(self._real_module))
+ self.__all__ = sorted(mod_all + list(self._origin))
+
+ def __getattr__(self, item):
+ # Don't export internal variables.
+ if item in {"_real_module", "_origin", "_removed_in"}:
+ raise AttributeError(item)
+
+ if item in self._origin:
+ from importlib import import_module
+
+ origin = self._origin[item]
+
+ if origin == ".":
+ # No warning for the "submodule as attribute" case, it's way too messy
+ # and unreliable to try to distinguish 'from werkzueug import
+ # exceptions' and 'import werkzeug; werkzeug.exceptions'.
+ value = import_module(origin + item, self.__name__)
+ else:
+ from warnings import warn
+
+ # Import the module, get the attribute, and show a warning about where
+ # to correctly import it from.
+ mod = import_module(origin, self.__name__.rsplit(".")[0])
+ value = getattr(mod, item)
+ warn(
+ "The import '{name}.{item}' is deprecated and will be removed in"
+ " {removed_in}. Use 'from {name}{origin} import {item}'"
+ " instead.".format(
+ name=self.__name__,
+ item=item,
+ removed_in=self._removed_in,
+ origin=origin,
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ value = getattr(self._real_module, item)
+
+ # Cache the value so it won't go through this process on subsequent accesses.
+ setattr(self, item, value)
+ return value
def __dir__(self):
- """Just show what we want to show."""
- result = list(new_module.__all__)
- result.extend(
- (
- "__file__",
- "__doc__",
- "__all__",
- "__docformat__",
- "__name__",
- "__path__",
- "__package__",
- "__version__",
- )
- )
- return result
-
-
-# keep a reference to this module so that it's not garbage collected
-old_module = sys.modules["werkzeug"]
-
-
-# setup the new module and patch it into the dict of loaded modules
-new_module = sys.modules["werkzeug"] = module("werkzeug")
-new_module.__dict__.update(
+ return sorted(dir(self._real_module) + list(self._origin))
+
+
+del ModuleType
+
+_DeprecatedImportModule(
+ __name__,
{
- "__file__": __file__,
- "__package__": "werkzeug",
- "__path__": __path__,
- "__doc__": __doc__,
- "__version__": __version__,
- "__all__": tuple(object_origins) + tuple(attribute_modules),
- "__docformat__": "restructuredtext en",
- }
+ ".": ["exceptions", "routing"],
+ "._internal": ["_easteregg"],
+ ".datastructures": [
+ "Accept",
+ "Authorization",
+ "CallbackDict",
+ "CharsetAccept",
+ "CombinedMultiDict",
+ "EnvironHeaders",
+ "ETags",
+ "FileMultiDict",
+ "FileStorage",
+ "Headers",
+ "HeaderSet",
+ "ImmutableDict",
+ "ImmutableList",
+ "ImmutableMultiDict",
+ "ImmutableOrderedMultiDict",
+ "ImmutableTypeConversionDict",
+ "LanguageAccept",
+ "MIMEAccept",
+ "MultiDict",
+ "OrderedMultiDict",
+ "RequestCacheControl",
+ "ResponseCacheControl",
+ "TypeConversionDict",
+ "WWWAuthenticate",
+ ],
+ ".debug": ["DebuggedApplication"],
+ ".exceptions": ["abort", "Aborter"],
+ ".formparser": ["parse_form_data"],
+ ".http": [
+ "cookie_date",
+ "dump_cookie",
+ "dump_header",
+ "dump_options_header",
+ "generate_etag",
+ "http_date",
+ "HTTP_STATUS_CODES",
+ "is_entity_header",
+ "is_hop_by_hop_header",
+ "is_resource_modified",
+ "parse_accept_header",
+ "parse_authorization_header",
+ "parse_cache_control_header",
+ "parse_cookie",
+ "parse_date",
+ "parse_dict_header",
+ "parse_etags",
+ "parse_list_header",
+ "parse_options_header",
+ "parse_set_header",
+ "parse_www_authenticate_header",
+ "quote_etag",
+ "quote_header_value",
+ "remove_entity_headers",
+ "remove_hop_by_hop_headers",
+ "unquote_etag",
+ "unquote_header_value",
+ ],
+ ".local": [
+ "Local",
+ "LocalManager",
+ "LocalProxy",
+ "LocalStack",
+ "release_local",
+ ],
+ ".middleware.dispatcher": ["DispatcherMiddleware"],
+ ".middleware.shared_data": ["SharedDataMiddleware"],
+ ".security": ["check_password_hash", "generate_password_hash"],
+ ".test": ["create_environ", "EnvironBuilder", "run_wsgi_app"],
+ ".testapp": ["test_app"],
+ ".urls": [
+ "Href",
+ "iri_to_uri",
+ "uri_to_iri",
+ "url_decode",
+ "url_encode",
+ "url_fix",
+ "url_quote",
+ "url_quote_plus",
+ "url_unquote",
+ "url_unquote_plus",
+ ],
+ ".useragents": ["UserAgent"],
+ ".utils": [
+ "append_slash_redirect",
+ "ArgumentValidationError",
+ "bind_arguments",
+ "cached_property",
+ "environ_property",
+ "escape",
+ "find_modules",
+ "format_string",
+ "header_property",
+ "html",
+ "HTMLBuilder",
+ "import_string",
+ "redirect",
+ "secure_filename",
+ "unescape",
+ "validate_arguments",
+ "xhtml",
+ ],
+ ".wrappers.accept": ["AcceptMixin"],
+ ".wrappers.auth": ["AuthorizationMixin", "WWWAuthenticateMixin"],
+ ".wrappers.base_request": ["BaseRequest"],
+ ".wrappers.base_response": ["BaseResponse"],
+ ".wrappers.common_descriptors": [
+ "CommonRequestDescriptorsMixin",
+ "CommonResponseDescriptorsMixin",
+ ],
+ ".wrappers.etag": ["ETagRequestMixin", "ETagResponseMixin"],
+ ".wrappers.response": ["ResponseStreamMixin"],
+ ".wrappers.user_agent": ["UserAgentMixin"],
+ ".wsgi": [
+ "ClosingIterator",
+ "extract_path_info",
+ "FileWrapper",
+ "get_current_url",
+ "get_host",
+ "LimitedStream",
+ "make_line_iter",
+ "peek_path_info",
+ "pop_path_info",
+ "responder",
+ "wrap_file",
+ ],
+ },
+ "Werkzeug 1.0",
)
-
-# Due to bootstrapping issues we need to import exceptions here.
-# Don't ask :-(
-__import__("werkzeug.exceptions")
+from .serving import run_simple
+from .test import Client
+from .wrappers import Request
+from .wrappers import Response
diff --git a/src/werkzeug/contrib/sessions.py b/src/werkzeug/contrib/sessions.py
index 866e827c1..53567a1cc 100644
--- a/src/werkzeug/contrib/sessions.py
+++ b/src/werkzeug/contrib/sessions.py
@@ -67,9 +67,9 @@ def application(environ, start_response):
from .._compat import text_type
from ..datastructures import CallbackDict
from ..filesystem import get_filesystem_encoding
+from ..http import dump_cookie
+from ..http import parse_cookie
from ..posixemulation import rename
-from ..utils import dump_cookie
-from ..utils import parse_cookie
from ..wsgi import ClosingIterator
warnings.warn(
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 9643db96c..0b8097def 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -14,6 +14,7 @@
from copy import deepcopy
from itertools import repeat
+from . import exceptions
from ._compat import BytesIO
from ._compat import collections_abc
from ._compat import integer_types
@@ -2839,7 +2840,6 @@ def __repr__(self):
# circular dependencies
-from . import exceptions
from .http import dump_header
from .http import dump_options_header
from .http import generate_etag
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index bfd20dc1d..a7295ca7c 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -59,18 +59,12 @@ def application(environ, start_response):
"""
import sys
-import werkzeug
-
-# Because of bootstrapping reasons we need to manually patch ourselves
-# onto our parent module.
-werkzeug.exceptions = sys.modules[__name__]
-
from ._compat import implements_to_string
from ._compat import integer_types
from ._compat import iteritems
from ._compat import text_type
from ._internal import _get_environ
-from .wrappers import Response
+from .utils import escape
@implements_to_string
@@ -141,6 +135,8 @@ def description(self, value):
@property
def name(self):
"""The status name."""
+ from .http import HTTP_STATUS_CODES
+
return HTTP_STATUS_CODES.get(self.code, "Unknown Error")
def get_description(self, environ=None):
@@ -176,6 +172,8 @@ def get_response(self, environ=None):
on how the request looked like.
:return: a :class:`Response` object or a subclass thereof.
"""
+ from .wrappers.response import Response
+
if self.response is not None:
return self.response
if environ is not None:
@@ -776,11 +774,6 @@ def abort(status, *args, **kwargs):
_aborter = Aborter()
-
#: An exception that is used to signal both a :exc:`KeyError` and a
#: :exc:`BadRequest`. Used by many of the datastructures.
BadRequestKeyError = BadRequest.wrap(KeyError)
-
-# imported here because of circular dependencies of werkzeug.utils
-from .http import HTTP_STATUS_CODES
-from .utils import escape
diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py
index 0ddc5c8ff..ffdb9b0f1 100644
--- a/src/werkzeug/formparser.py
+++ b/src/werkzeug/formparser.py
@@ -16,6 +16,7 @@
from itertools import repeat
from itertools import tee
+from . import exceptions
from ._compat import BytesIO
from ._compat import text_type
from ._compat import to_native
@@ -581,6 +582,3 @@ def parse(self, file, boundary, content_length):
form = (p[1] for p in formstream if p[0] == "form")
files = (p[1] for p in filestream if p[0] == "file")
return self.cls(form), self.cls(files)
-
-
-from . import exceptions
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index af3200750..686824c12 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -1144,6 +1144,8 @@ def dump_cookie(
value = to_bytes(value, charset)
if path is not None:
+ from .urls import iri_to_uri
+
path = iri_to_uri(path, charset)
domain = _make_cookie_domain(domain)
if isinstance(max_age, timedelta):
@@ -1235,7 +1237,7 @@ def is_byte_range_valid(start, stop, length):
return 0 <= start < length
-# circular dependency fun
+# circular dependencies
from .datastructures import Accept
from .datastructures import Authorization
from .datastructures import ContentRange
@@ -1246,58 +1248,12 @@ def is_byte_range_valid(start, stop, length):
from .datastructures import RequestCacheControl
from .datastructures import TypeConversionDict
from .datastructures import WWWAuthenticate
-from .urls import iri_to_uri
-
-# DEPRECATED
-from .datastructures import CharsetAccept as _CharsetAccept
-from .datastructures import Headers as _Headers
-from .datastructures import LanguageAccept as _LanguageAccept
-from .datastructures import MIMEAccept as _MIMEAccept
-
-class MIMEAccept(_MIMEAccept):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.http.MIMEAccept' has moved to 'werkzeug"
- ".datastructures.MIMEAccept' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(MIMEAccept, self).__init__(*args, **kwargs)
-
-
-class CharsetAccept(_CharsetAccept):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.http.CharsetAccept' has moved to 'werkzeug"
- ".datastructures.CharsetAccept' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(CharsetAccept, self).__init__(*args, **kwargs)
+from werkzeug import _DeprecatedImportModule
-
-class LanguageAccept(_LanguageAccept):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.http.LanguageAccept' has moved to 'werkzeug"
- ".datastructures.LanguageAccept' as of version 0.5. This"
- " old import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(LanguageAccept, self).__init__(*args, **kwargs)
-
-
-class Headers(_Headers):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.http.Headers' has moved to 'werkzeug"
- ".datastructures.Headers' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(Headers, self).__init__(*args, **kwargs)
+_DeprecatedImportModule(
+ __name__,
+ {".datastructures": ["CharsetAccept", "Headers", "LanguageAccept", "MIMEAccept"]},
+ "Werkzeug 1.0",
+)
+del _DeprecatedImportModule
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index ff9f8805f..d817120f2 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -41,7 +41,6 @@
import socket
import sys
-import werkzeug
from ._compat import PY2
from ._compat import reraise
from ._compat import WIN
@@ -174,7 +173,9 @@ class WSGIRequestHandler(BaseHTTPRequestHandler, object):
@property
def server_version(self):
- return "Werkzeug/" + werkzeug.__version__
+ from . import __version__
+
+ return "Werkzeug/" + __version__
def make_environ(self):
request_url = url_parse(self.path)
diff --git a/src/werkzeug/testapp.py b/src/werkzeug/testapp.py
index 8ea23bee1..5ea854904 100644
--- a/src/werkzeug/testapp.py
+++ b/src/werkzeug/testapp.py
@@ -14,7 +14,7 @@
import sys
from textwrap import wrap
-import werkzeug
+from . import __version__ as _werkzeug_version
from .utils import escape
from .wrappers import BaseRequest as Request
from .wrappers import BaseResponse as Response
@@ -205,7 +205,7 @@ def render_testapp(req):
"os": escape(os.name),
"api_version": sys.api_version,
"byteorder": sys.byteorder,
- "werkzeug_version": werkzeug.__version__,
+ "werkzeug_version": _werkzeug_version,
"python_eggs": "\n".join(python_eggs),
"wsgi_env": "\n".join(wsgi_env),
"sys_path": "\n".join(sys_path),
diff --git a/src/werkzeug/urls.py b/src/werkzeug/urls.py
index 38e9e5adf..566017d7f 100644
--- a/src/werkzeug/urls.py
+++ b/src/werkzeug/urls.py
@@ -31,8 +31,6 @@
from ._compat import try_coerce_native
from ._internal import _decode_idna
from ._internal import _encode_idna
-from .datastructures import iter_multi_items
-from .datastructures import MultiDict
# A regular expression for what a valid schema looks like
_scheme_re = re.compile(r"^[a-zA-Z0-9+-.]+$")
@@ -415,6 +413,8 @@ def _unquote_to_bytes(string, unsafe=""):
def _url_encode_impl(obj, charset, encode_keys, sort, key):
+ from .datastructures import iter_multi_items
+
iterable = iter_multi_items(obj)
if sort:
iterable = sorted(iterable, key=key)
@@ -825,6 +825,8 @@ def url_decode(
or `None` the default :class:`MultiDict` is used.
"""
if cls is None:
+ from .datastructures import MultiDict
+
cls = MultiDict
if isinstance(s, text_type) and not isinstance(separator, text_type):
separator = separator.decode(charset or "ascii")
@@ -884,6 +886,8 @@ def url_decode_stream(
return decoder
if cls is None:
+ from .datastructures import MultiDict
+
cls = MultiDict
return cls(decoder)
diff --git a/src/werkzeug/useragents.py b/src/werkzeug/useragents.py
index e265e0939..8fce41538 100644
--- a/src/werkzeug/useragents.py
+++ b/src/werkzeug/useragents.py
@@ -12,7 +12,6 @@
:license: BSD-3-Clause
"""
import re
-import warnings
class UserAgentParser(object):
@@ -203,18 +202,9 @@ def __repr__(self):
return "<%s %r/%s>" % (self.__class__.__name__, self.browser, self.version)
-# DEPRECATED
-from .wrappers import UserAgentMixin as _UserAgentMixin
+from werkzeug import _DeprecatedImportModule
-
-class UserAgentMixin(_UserAgentMixin):
- @property
- def user_agent(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.useragents.UserAgentMixin' should be imported"
- " from 'werkzeug.wrappers.UserAgentMixin'. This old import"
- " will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return super(_UserAgentMixin, self).user_agent
+_DeprecatedImportModule(
+ __name__, {".wrappers.user_agent": ["UserAgentMixin"]}, "Werkzeug 1.0"
+)
+del _DeprecatedImportModule
diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py
index 20620572c..477164e30 100644
--- a/src/werkzeug/utils.py
+++ b/src/werkzeug/utils.py
@@ -15,7 +15,6 @@
import pkgutil
import re
import sys
-import warnings
from ._compat import iteritems
from ._compat import PY2
@@ -757,80 +756,19 @@ def __repr__(self):
)
-# DEPRECATED
-from .datastructures import CombinedMultiDict as _CombinedMultiDict
-from .datastructures import EnvironHeaders as _EnvironHeaders
-from .datastructures import Headers as _Headers
-from .datastructures import MultiDict as _MultiDict
-from .http import dump_cookie as _dump_cookie
-from .http import parse_cookie as _parse_cookie
-
-
-class MultiDict(_MultiDict):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.MultiDict' has moved to 'werkzeug"
- ".datastructures.MultiDict' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(MultiDict, self).__init__(*args, **kwargs)
-
-
-class CombinedMultiDict(_CombinedMultiDict):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.CombinedMultiDict' has moved to 'werkzeug"
- ".datastructures.CombinedMultiDict' as of version 0.5. This"
- " old import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(CombinedMultiDict, self).__init__(*args, **kwargs)
-
-
-class Headers(_Headers):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.Headers' has moved to 'werkzeug"
- ".datastructures.Headers' as of version 0.5. This old"
- " import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(Headers, self).__init__(*args, **kwargs)
-
-
-class EnvironHeaders(_EnvironHeaders):
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.EnvironHeaders' has moved to 'werkzeug"
- ".datastructures.EnvironHeaders' as of version 0.5. This"
- " old import will be removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(EnvironHeaders, self).__init__(*args, **kwargs)
-
-
-def parse_cookie(*args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.parse_cookie' as moved to 'werkzeug.http"
- ".parse_cookie' as of version 0.5. This old import will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return _parse_cookie(*args, **kwargs)
-
-
-def dump_cookie(*args, **kwargs):
- warnings.warn(
- "'werkzeug.utils.dump_cookie' as moved to 'werkzeug.http"
- ".dump_cookie' as of version 0.5. This old import will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return _dump_cookie(*args, **kwargs)
+from werkzeug import _DeprecatedImportModule
+
+_DeprecatedImportModule(
+ __name__,
+ {
+ ".datastructures": [
+ "CombinedMultiDict",
+ "EnvironHeaders",
+ "Headers",
+ "MultiDict",
+ ],
+ ".http": ["dump_cookie", "parse_cookie"],
+ },
+ "Werkzeug 1.0",
+)
+del _DeprecatedImportModule
diff --git a/src/werkzeug/wrappers/user_agent.py b/src/werkzeug/wrappers/user_agent.py
index 72588dd94..a32d8acd2 100644
--- a/src/werkzeug/wrappers/user_agent.py
+++ b/src/werkzeug/wrappers/user_agent.py
@@ -1,3 +1,4 @@
+from ..useragents import UserAgent
from ..utils import cached_property
@@ -10,6 +11,4 @@ class UserAgentMixin(object):
@cached_property
def user_agent(self):
"""The current user agent."""
- from ..useragents import UserAgent
-
return UserAgent(self.environ)
diff --git a/src/werkzeug/wsgi.py b/src/werkzeug/wsgi.py
index f069f2d86..741195547 100644
--- a/src/werkzeug/wsgi.py
+++ b/src/werkzeug/wsgi.py
@@ -10,7 +10,6 @@
"""
import io
import re
-import warnings
from functools import partial
from functools import update_wrapper
from itertools import chain
@@ -1001,67 +1000,14 @@ def readable(self):
return True
-# DEPRECATED
-from .middleware.dispatcher import DispatcherMiddleware as _DispatcherMiddleware
-from .middleware.http_proxy import ProxyMiddleware as _ProxyMiddleware
-from .middleware.shared_data import SharedDataMiddleware as _SharedDataMiddleware
+from werkzeug import _DeprecatedImportModule
-
-class ProxyMiddleware(_ProxyMiddleware):
- """
- .. deprecated:: 0.15
- ``werkzeug.wsgi.ProxyMiddleware`` has moved to
- :mod:`werkzeug.middleware.http_proxy`. This import will be
- removed in 1.0.
- """
-
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.wsgi.ProxyMiddleware' has moved to 'werkzeug"
- ".middleware.http_proxy.ProxyMiddleware'. This import is"
- " deprecated as of version 0.15 and will be removed in"
- " version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(ProxyMiddleware, self).__init__(*args, **kwargs)
-
-
-class SharedDataMiddleware(_SharedDataMiddleware):
- """
- .. deprecated:: 0.15
- ``werkzeug.wsgi.SharedDataMiddleware`` has moved to
- :mod:`werkzeug.middleware.shared_data`. This import will be
- removed in 1.0.
- """
-
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.wsgi.SharedDataMiddleware' has moved to"
- " 'werkzeug.middleware.shared_data.SharedDataMiddleware'."
- " This import is deprecated as of version 0.15 and will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(SharedDataMiddleware, self).__init__(*args, **kwargs)
-
-
-class DispatcherMiddleware(_DispatcherMiddleware):
- """
- .. deprecated:: 0.15
- ``werkzeug.wsgi.DispatcherMiddleware`` has moved to
- :mod:`werkzeug.middleware.dispatcher`. This import will be
- removed in 1.0.
- """
-
- def __init__(self, *args, **kwargs):
- warnings.warn(
- "'werkzeug.wsgi.DispatcherMiddleware' has moved to"
- " 'werkzeug.middleware.dispatcher.DispatcherMiddleware'."
- " This import is deprecated as of version 0.15 and will be"
- " removed in version 1.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- super(DispatcherMiddleware, self).__init__(*args, **kwargs)
+_DeprecatedImportModule(
+ __name__,
+ {
+ ".middleware.dispatcher": ["DispatcherMiddleware"],
+ ".middleware.http_proxy": ["ProxyMiddleware"],
+ ".middleware.shared_data": ["SharedDataMiddleware"],
+ },
+ "Werkzeug 1.0",
+)
diff --git a/tests/contrib/test_securecookie.py b/tests/contrib/test_securecookie.py
index 7231ac889..cea072c2e 100644
--- a/tests/contrib/test_securecookie.py
+++ b/tests/contrib/test_securecookie.py
@@ -14,7 +14,7 @@
from werkzeug._compat import to_native
from werkzeug.contrib.securecookie import SecureCookie
-from werkzeug.utils import parse_cookie
+from werkzeug.http import parse_cookie
from werkzeug.wrappers import Request
from werkzeug.wrappers import Response
diff --git a/tests/test_compat.py b/tests/test_compat.py
deleted file mode 100644
index 98851ba28..000000000
--- a/tests/test_compat.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-# flake8: noqa
-"""
- tests.compat
- ~~~~~~~~~~~~
-
- Ensure that old stuff does not break on update.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-from werkzeug.test import create_environ
-from werkzeug.wrappers import Response
-
-
-def test_old_imports():
- from werkzeug.utils import (
- Headers,
- MultiDict,
- CombinedMultiDict,
- Headers,
- EnvironHeaders,
- )
- from werkzeug.http import (
- Accept,
- MIMEAccept,
- CharsetAccept,
- LanguageAccept,
- ETags,
- HeaderSet,
- WWWAuthenticate,
- Authorization,
- )
-
-
-def test_exposed_werkzeug_mod():
- import werkzeug
-
- for key in werkzeug.__all__:
- getattr(werkzeug, key)
From 73590e813b2312c50949e8baa50ee6e040945eb1 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 17 Sep 2019 11:07:13 -0700
Subject: [PATCH 073/733] fix changelog formatting
---
CHANGES.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index f0ad588af..b3b177f50 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,7 +10,7 @@ Unreleased
removed in version 1.0.
For example, instead of ``import werkzeug; werkzeug.url_quote``, do
- ``from werkzeug.urls import url_quote. A deprecation warning will
+ ``from werkzeug.urls import url_quote``. A deprecation warning will
show the correct import to use. ``werkzeug.exceptions`` and
``werkzeug.routing`` should also be imported instead of accessed,
but for technical reasons can't show a warning.
From d50618e3651ad5d4d3118e903a040b733c4d0233 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 17 Sep 2019 12:16:57 -0700
Subject: [PATCH 074/733] remove deprecated top-level imports
---
CHANGES.rst | 4 +-
docs/index.rst | 1 -
docs/transition.rst | 55 -------
src/werkzeug/__init__.py | 205 +-----------------------
werkzeug-import-rewrite.py | 310 -------------------------------------
5 files changed, 4 insertions(+), 571 deletions(-)
delete mode 100644 docs/transition.rst
delete mode 100644 werkzeug-import-rewrite.py
diff --git a/CHANGES.rst b/CHANGES.rst
index 92260f3f5..a58fd226b 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -11,8 +11,8 @@ Unreleased
- Remove most top-level attributes provided by the ``werkzeug``
module in favor of direct imports. For example, instead of
``import werkzeug; werkzeug.url_quote``, do
- ``from werkzeug.urls import url_quote. Install version 0.16 first to
- see deprecation warnings while upgrading. :issue:`2`, :pr:`1640`
+ ``from werkzeug.urls import url_quote``. Install version 0.16 first
+ to see deprecation warnings while upgrading. :issue:`2`, :pr:`1640`
- Added ``utils.invalidate_cached_property()`` to invalidate cached
properties. (:pr:`1474`)
- Directive keys for the ``Set-Cookie`` response header are not
diff --git a/docs/index.rst b/docs/index.rst
index cb16d6670..f1ad4613d 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -22,7 +22,6 @@ Getting Started
:maxdepth: 2
installation
- transition
tutorial
levels
quickstart
diff --git a/docs/transition.rst b/docs/transition.rst
deleted file mode 100644
index 25a23e9af..000000000
--- a/docs/transition.rst
+++ /dev/null
@@ -1,55 +0,0 @@
-Transition to Werkzeug 1.0
-==========================
-
-Werkzeug originally had a magical import system hook that enabled
-everything to be imported from one module and still loading the actual
-implementations lazily as necessary. Unfortunately this turned out to be
-slow and also unreliable on alternative Python implementations and
-Google's App Engine.
-
-Starting with 0.7 we recommend against the short imports and strongly
-encourage starting importing from the actual implementation module.
-Werkzeug 1.0 will disable the magical import hook completely.
-
-Because finding out where the actual functions are imported and rewriting
-them by hand is a painful and boring process we wrote a tool that aids in
-making this transition.
-
-Automatically Rewriting Imports
--------------------------------
-
-For instance, with Werkzeug < 0.7 the recommended way to use the escape function
-was this::
-
- from werkzeug import escape
-
-With Werkzeug 0.7, the recommended way to import this function is
-directly from the utils module (and with 1.0 this will become mandatory).
-To automatically rewrite all imports one can use the
-`werkzeug-import-rewrite `_ script.
-
-You can use it by executing it with Python and with a list of folders with
-Werkzeug based code. It will then spit out a hg/git compatible patch
-file. Example patch file creation::
-
- $ python werkzeug-import-rewrite.py . > new-imports.udiff
-
-To apply the patch one of the following methods work:
-
-hg:
-
- ::
-
- hg import new-imports.udiff
-
-git:
-
- ::
-
- git apply new-imports.udiff
-
-patch:
-
- ::
-
- patch -p1 < new-imports.udiff
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index 49907e93c..6a7f50fbc 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -12,210 +12,9 @@
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
-from types import ModuleType
-
-__version__ = "0.16.0.dev0"
-
-__all__ = ["run_simple", "Client", "Request", "Response", "__version__"]
-
-
-class _DeprecatedImportModule(ModuleType):
- """Wrap a module in order to raise """
-
- def __init__(self, name, available, removed_in):
- import sys
-
- super(_DeprecatedImportModule, self).__init__(name) # noqa F821
- self._real_module = sys.modules[name] # noqa F821
- sys.modules[name] = self
- self._removed_in = removed_in
- self._origin = {item: mod for mod, items in available.items() for item in items}
- mod_all = getattr(self._real_module, "__all__", dir(self._real_module))
- self.__all__ = sorted(mod_all + list(self._origin))
-
- def __getattr__(self, item):
- # Don't export internal variables.
- if item in {"_real_module", "_origin", "_removed_in"}:
- raise AttributeError(item)
-
- if item in self._origin:
- from importlib import import_module
-
- origin = self._origin[item]
-
- if origin == ".":
- # No warning for the "submodule as attribute" case, it's way too messy
- # and unreliable to try to distinguish 'from werkzueug import
- # exceptions' and 'import werkzeug; werkzeug.exceptions'.
- value = import_module(origin + item, self.__name__)
- else:
- from warnings import warn
-
- # Import the module, get the attribute, and show a warning about where
- # to correctly import it from.
- mod = import_module(origin, self.__name__.rsplit(".")[0])
- value = getattr(mod, item)
- warn(
- "The import '{name}.{item}' is deprecated and will be removed in"
- " {removed_in}. Use 'from {name}{origin} import {item}'"
- " instead.".format(
- name=self.__name__,
- item=item,
- removed_in=self._removed_in,
- origin=origin,
- ),
- DeprecationWarning,
- stacklevel=2,
- )
- else:
- value = getattr(self._real_module, item)
-
- # Cache the value so it won't go through this process on subsequent accesses.
- setattr(self, item, value)
- return value
-
- def __dir__(self):
- return sorted(dir(self._real_module) + list(self._origin))
-
-
-del ModuleType
-
-_DeprecatedImportModule(
- __name__,
- {
- ".": ["exceptions", "routing"],
- "._internal": ["_easteregg"],
- ".datastructures": [
- "Accept",
- "Authorization",
- "CallbackDict",
- "CharsetAccept",
- "CombinedMultiDict",
- "EnvironHeaders",
- "ETags",
- "FileMultiDict",
- "FileStorage",
- "Headers",
- "HeaderSet",
- "ImmutableDict",
- "ImmutableList",
- "ImmutableMultiDict",
- "ImmutableOrderedMultiDict",
- "ImmutableTypeConversionDict",
- "LanguageAccept",
- "MIMEAccept",
- "MultiDict",
- "OrderedMultiDict",
- "RequestCacheControl",
- "ResponseCacheControl",
- "TypeConversionDict",
- "WWWAuthenticate",
- ],
- ".debug": ["DebuggedApplication"],
- ".exceptions": ["abort", "Aborter"],
- ".formparser": ["parse_form_data"],
- ".http": [
- "cookie_date",
- "dump_cookie",
- "dump_header",
- "dump_options_header",
- "generate_etag",
- "http_date",
- "HTTP_STATUS_CODES",
- "is_entity_header",
- "is_hop_by_hop_header",
- "is_resource_modified",
- "parse_accept_header",
- "parse_authorization_header",
- "parse_cache_control_header",
- "parse_cookie",
- "parse_date",
- "parse_dict_header",
- "parse_etags",
- "parse_list_header",
- "parse_options_header",
- "parse_set_header",
- "parse_www_authenticate_header",
- "quote_etag",
- "quote_header_value",
- "remove_entity_headers",
- "remove_hop_by_hop_headers",
- "unquote_etag",
- "unquote_header_value",
- ],
- ".local": [
- "Local",
- "LocalManager",
- "LocalProxy",
- "LocalStack",
- "release_local",
- ],
- ".middleware.dispatcher": ["DispatcherMiddleware"],
- ".middleware.shared_data": ["SharedDataMiddleware"],
- ".security": ["check_password_hash", "generate_password_hash"],
- ".test": ["create_environ", "EnvironBuilder", "run_wsgi_app"],
- ".testapp": ["test_app"],
- ".urls": [
- "Href",
- "iri_to_uri",
- "uri_to_iri",
- "url_decode",
- "url_encode",
- "url_fix",
- "url_quote",
- "url_quote_plus",
- "url_unquote",
- "url_unquote_plus",
- ],
- ".useragents": ["UserAgent"],
- ".utils": [
- "append_slash_redirect",
- "ArgumentValidationError",
- "bind_arguments",
- "cached_property",
- "environ_property",
- "escape",
- "find_modules",
- "format_string",
- "header_property",
- "html",
- "HTMLBuilder",
- "import_string",
- "redirect",
- "secure_filename",
- "unescape",
- "validate_arguments",
- "xhtml",
- ],
- ".wrappers.accept": ["AcceptMixin"],
- ".wrappers.auth": ["AuthorizationMixin", "WWWAuthenticateMixin"],
- ".wrappers.base_request": ["BaseRequest"],
- ".wrappers.base_response": ["BaseResponse"],
- ".wrappers.common_descriptors": [
- "CommonRequestDescriptorsMixin",
- "CommonResponseDescriptorsMixin",
- ],
- ".wrappers.etag": ["ETagRequestMixin", "ETagResponseMixin"],
- ".wrappers.response": ["ResponseStreamMixin"],
- ".wrappers.user_agent": ["UserAgentMixin"],
- ".wsgi": [
- "ClosingIterator",
- "extract_path_info",
- "FileWrapper",
- "get_current_url",
- "get_host",
- "LimitedStream",
- "make_line_iter",
- "peek_path_info",
- "pop_path_info",
- "responder",
- "wrap_file",
- ],
- },
- "Werkzeug 1.0",
-)
-
from .serving import run_simple
from .test import Client
from .wrappers import Request
from .wrappers import Response
+
+__version__ = "1.0.0.dev0"
diff --git a/werkzeug-import-rewrite.py b/werkzeug-import-rewrite.py
deleted file mode 100644
index 81af6a55a..000000000
--- a/werkzeug-import-rewrite.py
+++ /dev/null
@@ -1,310 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
- Werkzeug Import Rewriter
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Changes the deprecated werkzeug imports to the full canonical imports.
- This is a terrible hack, don't trust the diff untested.
-
- :copyright: 2007 Pallets
- :license: BSD-3-Clause
-"""
-import difflib
-import os
-import posixpath
-import re
-import sys
-
-
-_from_import_re = re.compile(r"(\s*(>>>|\.\.\.)?\s*)from werkzeug import\s+")
-_direct_usage = re.compile(r"(? 79:
- yield prefix + ", ".join(item_buffer[:-1]) + ", \\"
- item_buffer = [item_buffer[-1]]
- # doctest continuations
- indentation = indentation.replace(">", ".")
- prefix = indentation + " "
- yield prefix + ", ".join(item_buffer)
-
-
-def inject_imports(lines, imports):
- pos = 0
- for idx, line in enumerate(lines):
- if re.match(r"(from|import)\s+werkzeug", line):
- pos = idx
- break
- lines[pos:pos] = [
- "from %s import %s" % (mod, ", ".join(sorted(attrs)))
- for mod, attrs in sorted(imports.items())
- ]
-
-
-def rewrite_file(filename):
- with open(filename) as f:
- old_file = f.read().splitlines()
-
- new_file = []
- deferred_imports = {}
- lineiter = iter(old_file)
- for line in lineiter:
- # rewrite from imports
- match = _from_import_re.search(line)
- if match is not None:
- fromlist = line[match.end() :]
- new_file.extend(rewrite_from_imports(fromlist, match.group(1), lineiter))
- continue
-
- def _handle_match(match):
- # rewrite attribute access to 'werkzeug'
- attr = match.group(2)
- mod = find_module(attr)
- if mod == "werkzeug":
- return match.group(0)
- deferred_imports.setdefault(mod, []).append(attr)
- return attr
-
- new_file.append(_direct_usage.sub(_handle_match, line))
- if deferred_imports:
- inject_imports(new_file, deferred_imports)
-
- for line in difflib.unified_diff(
- old_file,
- new_file,
- posixpath.normpath(posixpath.join("a", filename)),
- posixpath.normpath(posixpath.join("b", filename)),
- lineterm="",
- ):
- print(line)
-
-
-def rewrite_in_folders(folders):
- for folder in folders:
- for dirpath, _dirnames, filenames in os.walk(folder):
- for filename in filenames:
- filename = os.path.join(dirpath, filename)
- if filename.endswith((".rst", ".py")):
- rewrite_file(filename)
-
-
-def main():
- if len(sys.argv) == 1:
- print("usage: werkzeug-import-rewrite.py [folders]")
- sys.exit(1)
- rewrite_in_folders(sys.argv[1:])
-
-
-if __name__ == "__main__":
- main()
From 1aba7408bbb1118ba86013f3e16585dbd3e56ede Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 19 Sep 2019 07:49:17 -0700
Subject: [PATCH 075/733] release version 0.16.0
---
CHANGES.rst | 2 +-
src/werkzeug/__init__.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index b3b177f50..5586909d5 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -3,7 +3,7 @@
Version 0.16.0
--------------
-Unreleased
+Released 2019-09-19
- Deprecate most top-level attributes provided by the ``werkzeug``
module in favor of direct imports. The deprecated imports will be
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index 49907e93c..d53bb3910 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -14,7 +14,7 @@
"""
from types import ModuleType
-__version__ = "0.16.0.dev0"
+__version__ = "0.16.0"
__all__ = ["run_simple", "Client", "Request", "Response", "__version__"]
From 38ec32dfb1fd44e6b51b778e619b67106dbdec93 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Fri, 20 Sep 2019 10:08:37 -0700
Subject: [PATCH 076/733] package directory access returns 404
---
CHANGES.rst | 4 ++
src/werkzeug/middleware/shared_data.py | 66 +++++++++++++++++++-------
tests/middleware/test_shared_data.py | 7 +--
3 files changed, 58 insertions(+), 19 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index b7cead45f..0f9742801 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -57,6 +57,10 @@ Unreleased
:pr:`1469`
- ``is_resource_modified`` will run for methods other than ``GET`` and
``HEAD``, rather than always returning ``False``. :issue:`409`
+- ``SharedDataMiddleware`` returns 404 rather than 500 when trying to
+ access a directory instead of a file with the package loader. The
+ dependency on setuptools and pkg_resources is removed.
+ :issue:`1599`, :pr:`1647`
- Optional request log highlighting with the development server is
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
diff --git a/src/werkzeug/middleware/shared_data.py b/src/werkzeug/middleware/shared_data.py
index 088504a92..5c000c978 100644
--- a/src/werkzeug/middleware/shared_data.py
+++ b/src/werkzeug/middleware/shared_data.py
@@ -10,6 +10,7 @@
"""
import mimetypes
import os
+import pkgutil
import posixpath
from datetime import datetime
from io import BytesIO
@@ -139,32 +140,65 @@ def get_file_loader(self, filename):
return lambda x: (os.path.basename(filename), self._opener(filename))
def get_package_loader(self, package, package_path):
- from pkg_resources import DefaultProvider, ResourceManager, get_provider
-
loadtime = datetime.utcnow()
- provider = get_provider(package)
- manager = ResourceManager()
- filesystem_bound = isinstance(provider, DefaultProvider)
+ provider = pkgutil.get_loader(package)
- def loader(path):
- if path is None:
- return None, None
+ if hasattr(provider, "get_resource_reader"):
+ # Python 3
+ reader = provider.get_resource_reader(package)
+
+ def loader(path):
+ if path is None:
+ return None, None
- path = safe_join(package_path, path)
+ path = safe_join(package_path, path)
+ basename = posixpath.basename(path)
- if not provider.has_resource(path):
- return None, None
+ try:
+ resource = reader.open_resource(path)
+ except IOError:
+ return None, None
- basename = posixpath.basename(path)
+ if isinstance(resource, BytesIO):
+ return (
+ basename,
+ lambda: (resource, loadtime, len(resource.getvalue())),
+ )
- if filesystem_bound:
return (
basename,
- self._opener(provider.get_resource_filename(manager, path)),
+ lambda: (
+ resource,
+ datetime.utcfromtimestamp(os.path.getmtime(resource.name)),
+ os.path.getsize(resource.name),
+ ),
)
- s = provider.get_resource_string(manager, path)
- return basename, lambda: (BytesIO(s), loadtime, len(s))
+ else:
+ # Python 2
+ package_filename = provider.get_filename(package)
+ is_filesystem = os.path.exists(package_filename)
+ root = os.path.join(os.path.dirname(package_filename), package_path)
+
+ def loader(path):
+ if path is None:
+ return None, None
+
+ path = safe_join(root, path)
+ basename = posixpath.basename(path)
+
+ if is_filesystem:
+ if not os.path.isfile(path):
+ return None, None
+
+ return basename, self._opener(path)
+
+ try:
+ data = provider.get_data(path)
+ except IOError:
+ return None, None
+
+ return basename, lambda: (BytesIO(data), loadtime, len(data))
return loader
diff --git a/tests/middleware/test_shared_data.py b/tests/middleware/test_shared_data.py
index 20bd88cb5..fb685f77c 100644
--- a/tests/middleware/test_shared_data.py
+++ b/tests/middleware/test_shared_data.py
@@ -61,6 +61,7 @@ def null_application(environ, start_response):
assert b"$(function() {" in contents
- app_iter, status, headers = run_wsgi_app(app, create_environ("/missing"))
- assert status == "404 NOT FOUND"
- assert b"".join(app_iter).strip() == b"NOT FOUND"
+ for path in ("/missing", "/pkg", "/pkg/", "/pkg/missing.txt"):
+ app_iter, status, headers = run_wsgi_app(app, create_environ(path))
+ assert status == "404 NOT FOUND"
+ assert b"".join(app_iter).strip() == b"NOT FOUND"
From 3f035da76555d0fa54ec320bb4ad9087011ef764 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Fri, 20 Sep 2019 11:49:43 -0700
Subject: [PATCH 077/733] support Cache-Control: immutable
---
CHANGES.rst | 5 ++++-
src/werkzeug/datastructures.py | 2 +-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 0f9742801..2f79596ea 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -60,7 +60,10 @@ Unreleased
- ``SharedDataMiddleware`` returns 404 rather than 500 when trying to
access a directory instead of a file with the package loader. The
dependency on setuptools and pkg_resources is removed.
- :issue:`1599`, :pr:`1647`
+ :issue:`1599`
+- Add a ``response.cache_control.immutable`` flag. Keep in mind that
+ browser support for this ``Cache-Control`` header option is still
+ experimental and may not be implemented. :issue:`1185`
- Optional request log highlighting with the development server is
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 2286f8dd4..e3231b081 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -2050,7 +2050,6 @@ class RequestCacheControl(ImmutableDictMixin, _CacheControl):
max_stale = cache_property("max-stale", "*", int)
min_fresh = cache_property("min-fresh", "*", int)
- no_transform = cache_property("no-transform", None, None)
only_if_cached = cache_property("only-if-cached", None, bool)
@@ -2074,6 +2073,7 @@ class ResponseCacheControl(_CacheControl):
must_revalidate = cache_property("must-revalidate", None, bool)
proxy_revalidate = cache_property("proxy-revalidate", None, bool)
s_maxage = cache_property("s-maxage", None, None)
+ immutable = cache_property("immutable", None, bool)
# attach cache_property to the _CacheControl as staticmethod
From b403bbb9a296a4057d72c560b58ae9774b5f4106 Mon Sep 17 00:00:00 2001
From: syndrowm
Date: Fri, 15 Nov 2019 22:57:50 -0700
Subject: [PATCH 078/733] adhoc cert support macos 10.15 requirements
---
src/werkzeug/serving.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index eac541b60..8b6093672 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -520,6 +520,10 @@ def generate_adhoc_ssl_pair(cn=None):
.serial_number(x509.random_serial_number())
.not_valid_before(dt.utcnow())
.not_valid_after(dt.utcnow() + timedelta(days=365))
+ .add_extension(
+ x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False)
+ .add_extension(
+ x509.SubjectAlternativeName([x509.DNSName(u'*')]), critical=False)
.sign(pkey, hashes.SHA256(), default_backend())
)
return cert, pkey
From eddf90148802ed1b357f18d5b9201108f32f60e5 Mon Sep 17 00:00:00 2001
From: syndrowm
Date: Fri, 15 Nov 2019 23:11:58 -0700
Subject: [PATCH 079/733] sytle
---
src/werkzeug/serving.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index 8b6093672..acb71d461 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -520,10 +520,10 @@ def generate_adhoc_ssl_pair(cn=None):
.serial_number(x509.random_serial_number())
.not_valid_before(dt.utcnow())
.not_valid_after(dt.utcnow() + timedelta(days=365))
+ .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False)
.add_extension(
- x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False)
- .add_extension(
- x509.SubjectAlternativeName([x509.DNSName(u'*')]), critical=False)
+ x509.SubjectAlternativeName([x509.DNSName(u'*')]), critical=False
+ )
.sign(pkey, hashes.SHA256(), default_backend())
)
return cert, pkey
From 5ff85a816ba94927ce768c0802c9e6e408cf8cf3 Mon Sep 17 00:00:00 2001
From: syndrowm
Date: Fri, 15 Nov 2019 23:17:27 -0700
Subject: [PATCH 080/733] sytle
---
src/werkzeug/serving.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index acb71d461..8494c5efd 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -522,7 +522,7 @@ def generate_adhoc_ssl_pair(cn=None):
.not_valid_after(dt.utcnow() + timedelta(days=365))
.add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False)
.add_extension(
- x509.SubjectAlternativeName([x509.DNSName(u'*')]), critical=False
+ x509.SubjectAlternativeName([x509.DNSName(u"*")]), critical=False
)
.sign(pkey, hashes.SHA256(), default_backend())
)
From 7dcd28e1d96264dfee6439ce07f2c5cb013d720c Mon Sep 17 00:00:00 2001
From: syndrowm
Date: Sat, 16 Nov 2019 12:12:01 -0700
Subject: [PATCH 081/733] test ci
---
src/werkzeug/serving.py | 4 ----
1 file changed, 4 deletions(-)
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index 8494c5efd..eac541b60 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -520,10 +520,6 @@ def generate_adhoc_ssl_pair(cn=None):
.serial_number(x509.random_serial_number())
.not_valid_before(dt.utcnow())
.not_valid_after(dt.utcnow() + timedelta(days=365))
- .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False)
- .add_extension(
- x509.SubjectAlternativeName([x509.DNSName(u"*")]), critical=False
- )
.sign(pkey, hashes.SHA256(), default_backend())
)
return cert, pkey
From 6d961dfb81ff9d655ca38f7b8385b509014f95b4 Mon Sep 17 00:00:00 2001
From: syndrowm
Date: Sat, 16 Nov 2019 12:16:18 -0700
Subject: [PATCH 082/733] restore change
---
src/werkzeug/serving.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index eac541b60..8494c5efd 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -520,6 +520,10 @@ def generate_adhoc_ssl_pair(cn=None):
.serial_number(x509.random_serial_number())
.not_valid_before(dt.utcnow())
.not_valid_after(dt.utcnow() + timedelta(days=365))
+ .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False)
+ .add_extension(
+ x509.SubjectAlternativeName([x509.DNSName(u"*")]), critical=False
+ )
.sign(pkey, hashes.SHA256(), default_backend())
)
return cert, pkey
From 99e1b14bce666ffb67a699d6492af4e6c7bea696 Mon Sep 17 00:00:00 2001
From: Chris Nickel
Date: Sun, 15 Dec 2019 10:46:58 -0500
Subject: [PATCH 083/733] Docs: Remove redundant abort code example and update
description
---
src/werkzeug/exceptions.py | 14 +++++---------
1 file changed, 5 insertions(+), 9 deletions(-)
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index ee06110f4..edc6454ab 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -767,17 +767,13 @@ def __call__(self, code, *args, **kwargs):
def abort(status, *args, **kwargs):
"""Raises an :py:exc:`HTTPException` for the given status code or WSGI
- application::
+ application.
- abort(404) # 404 Not Found
- abort(Response('Hello World'))
+ If a status code is given, it will be looked up in the list of
+ exceptions and will raise that exception. If passed a WSGI application,
+ it will wrap it in a proxy WSGI exception and raise that::
- Can be passed a WSGI application or a status code. If a status code is
- given it's looked up in the list of exceptions and will raise that
- exception, if passed a WSGI application it will wrap it in a proxy WSGI
- exception and raise that::
-
- abort(404)
+ abort(404) # 404 Not Found
abort(Response('Hello World'))
"""
From c0f7efe3e4acbe7253677266e2b77b1a127096c6 Mon Sep 17 00:00:00 2001
From: Adam Englander
Date: Sun, 1 Dec 2019 16:41:55 -0800
Subject: [PATCH 084/733] Update Tox and Azure Pipelines with Python 3.8
---
.azure-pipelines.yml | 10 ++++++----
tox.ini | 2 +-
2 files changed, 7 insertions(+), 5 deletions(-)
diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml
index c85fe0539..afdb617f0 100644
--- a/.azure-pipelines.yml
+++ b/.azure-pipelines.yml
@@ -4,20 +4,22 @@ trigger:
variables:
vmImage: ubuntu-latest
- python.version: 3.7
+ python.version: 3.8
TOXENV: py,coverage-ci
hasTestResults: true
strategy:
matrix:
- Python 3.7 Linux:
+ Python 3.8 Linux:
vmImage: ubuntu-latest
- Python 3.7 Windows:
+ Python 3.8 Windows:
vmImage: windows-latest
- Python 3.7 Mac:
+ Python 3.8 Mac:
vmImage: macos-latest
PyPy 3 Linux:
python.version: pypy3
+ Python 3.7 Linux:
+ python.version: 3.7
Python 3.6 Linux:
python.version: 3.6
Python 3.5 Linux:
diff --git a/tox.ini b/tox.ini
index abda16f0e..46800a905 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
envlist =
- py{37,36,35,27,py3,py}
+ py{38,37,36,35,27,py3,py}
style
docs-html
coverage
From 121d4f1c11ed453da51316de82a68920f1ce4438 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Fri, 3 Jan 2020 14:54:41 -0800
Subject: [PATCH 085/733] fix tests and update ci config
---
.azure-pipelines.yml | 20 ++++++++++----------
.pre-commit-config.yaml | 8 ++++----
setup.cfg | 1 +
src/werkzeug/formparser.py | 2 +-
tests/test_serving.py | 14 +++++++-------
tox.ini | 10 +++-------
6 files changed, 26 insertions(+), 29 deletions(-)
diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml
index afdb617f0..eabcaf768 100644
--- a/.azure-pipelines.yml
+++ b/.azure-pipelines.yml
@@ -4,9 +4,9 @@ trigger:
variables:
vmImage: ubuntu-latest
- python.version: 3.8
+ python.version: '3.8'
TOXENV: py,coverage-ci
- hasTestResults: true
+ hasTestResults: 'true'
strategy:
matrix:
@@ -19,22 +19,22 @@ strategy:
PyPy 3 Linux:
python.version: pypy3
Python 3.7 Linux:
- python.version: 3.7
+ python.version: '3.7'
Python 3.6 Linux:
- python.version: 3.6
+ python.version: '3.6'
Python 3.5 Linux:
- python.version: 3.5
+ python.version: '3.5'
Python 2.7 Linux:
- python.version: 2.7
+ python.version: '2.7'
Python 2.7 Windows:
- python.version: 2.7
+ python.version: '2.7'
vmImage: windows-latest
Docs:
- TOXENV: docs-html
- hasTestResults: false
+ TOXENV: docs
+ hasTestResults: 'false'
Style:
TOXENV: style
- hasTestResults: false
+ hasTestResults: 'false'
pool:
vmImage: $[ variables.vmImage ]
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2fb466196..a92c42137 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/reorder_python_imports
- rev: v1.4.0
+ rev: v1.8.0
hooks:
- id: reorder-python-imports
name: Reorder Python imports (src, tests)
@@ -11,16 +11,16 @@ repos:
files: "^examples/"
args: ["--application-directories", "examples"]
- repo: https://github.com/ambv/black
- rev: 18.9b0
+ rev: 19.10b0
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
- rev: 3.7.7
+ rev: 3.7.9
hooks:
- id: flake8
additional_dependencies: [flake8-bugbear]
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v2.1.0
+ rev: v2.4.0
hooks:
- id: check-byte-order-marker
- id: trailing-whitespace
diff --git a/setup.cfg b/setup.cfg
index 1def1f7ba..02e0db220 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -8,6 +8,7 @@ universal = true
testpaths = tests
norecursedirs = tests/hypothesis
filterwarnings =
+ error
ignore::requests.packages.urllib3.exceptions.InsecureRequestWarning
; warning about collections.abc fixed in watchdog master
ignore::DeprecationWarning:watchdog.utils.bricks:175
diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py
index ffdb9b0f1..02ae2ce5e 100644
--- a/src/werkzeug/formparser.py
+++ b/src/werkzeug/formparser.py
@@ -137,7 +137,7 @@ def wrapper(self, stream, *args, **kwargs):
while 1:
chunk = stream.read(1024 * 64)
if not chunk:
- break
+ break # noqa: B012
return update_wrapper(wrapper, f)
diff --git a/tests/test_serving.py b/tests/test_serving.py
index cf31bd2e6..b60c39fcd 100644
--- a/tests/test_serving.py
+++ b/tests/test_serving.py
@@ -295,14 +295,14 @@ def app(environ, start_response):
raise RuntimeError("Change event not detected.")
-def test_windows_get_args_for_reloading(monkeypatch, tmpdir):
- test_py_exe = r"C:\Users\test\AppData\Local\Programs\Python\Python36\python.exe"
- monkeypatch.setattr(os, "name", "nt")
- monkeypatch.setattr(sys, "executable", test_py_exe)
- test_exe = tmpdir.mkdir("test").join("test.exe")
- monkeypatch.setattr(sys, "argv", [test_exe.strpath, "run"])
+def test_windows_get_args_for_reloading(monkeypatch, tmp_path):
+ argv = [str(tmp_path / "test.exe"), "run"]
+ monkeypatch.setattr("sys.executable", str(tmp_path / "python.exe"))
+ monkeypatch.setattr("sys.argv", argv)
+ monkeypatch.setattr("__main__.__package__", None)
+ monkeypatch.setattr("os.name", "nt")
rv = _reloader._get_args_for_reloading()
- assert rv == [test_exe.strpath, "run"]
+ assert rv == argv
def test_monkeypatched_sleep(tmpdir):
diff --git a/tox.ini b/tox.ini
index 46800a905..fdab3b3a5 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,7 +2,7 @@
envlist =
py{38,37,36,35,27,py3,py}
style
- docs-html
+ docs
coverage
skip_missing_interpreters = true
@@ -24,11 +24,8 @@ deps = pre-commit
skip_install = true
commands = pre-commit run --all-files --show-diff-on-failure
-[testenv:docs-html]
-deps =
- Sphinx
- Pallets-Sphinx-Themes
- sphinx-issues
+[testenv:docs]
+deps = -r docs/requirements.txt
commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html
[testenv:coverage]
@@ -45,4 +42,3 @@ skip_install = true
commands =
coverage combine
coverage xml
- coverage report
From 6d5c6e203d82552eb272e021fa7347000473b7e5 Mon Sep 17 00:00:00 2001
From: manlix
Date: Thu, 17 Oct 2019 20:21:13 +0300
Subject: [PATCH 086/733] Add support for pathlib to save uploaded file
---
CHANGES.rst | 1 +
src/werkzeug/datastructures.py | 7 +++++++
tests/test_datastructures.py | 25 +++++++++++++++++++++++++
3 files changed, 33 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 2f79596ea..b19aeb3ee 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -68,6 +68,7 @@ Unreleased
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
by cryptography instead of pyOpenSSL. :pr:`1555`
+- Add support for pathlib to save uploaded file. :issue:`1653`
Version 0.16.0
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index e3231b081..4667b847a 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -30,6 +30,11 @@
from ._internal import _missing
from .filesystem import get_filesystem_encoding
+try:
+ import pathlib
+except ImportError:
+ pathlib = None
+
def is_immutable(self):
raise TypeError("%r objects are immutable" % self.__class__.__name__)
@@ -2952,6 +2957,8 @@ def save(self, dst, buffer_size=16384):
from shutil import copyfileobj
close_dst = False
+ if pathlib is not None and isinstance(dst, pathlib.PurePath):
+ dst = str(dst)
if isinstance(dst, string_types):
dst = open(dst, "wb")
close_dst = True
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index 800da86b0..e4b35c02f 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -1188,6 +1188,31 @@ def test_proxy_can_access_stream_attrs(self, stream):
for name in ("fileno", "writable", "readable", "seekable"):
assert hasattr(file_storage, name)
+ @pytest.mark.skipif(PY2, reason="Test only needed in PY3")
+ def test_save_to_pathlib_dst(self, tmp_path):
+ import pathlib
+
+ tmp = str(tmp_path)
+
+ dst = pathlib.Path(tmp, "dst.txt")
+ assert isinstance(dst, pathlib.PurePath)
+
+ test_text = "test text"
+
+ src = pathlib.Path(tmp, "src.txt")
+ src.write_text(test_text)
+ file_storage = self.storage_class(src.open("rb"))
+ file_storage.save(dst)
+
+ result = pathlib.Path(dst).read_text()
+ assert result == "test text"
+
+ def test_string_instead_valid_stream(self):
+ file_storage = self.storage_class("string_instead_valid_stream")
+
+ with pytest.raises(AttributeError):
+ file_storage.read()
+
@pytest.mark.parametrize("ranges", ([(0, 1), (-5, None)], [(5, None)]))
def test_range_to_header(ranges):
From ad0ef8c51aebf4062ffb47a3da28bc1ce0fb88f4 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 2 Jan 2020 18:23:54 -0800
Subject: [PATCH 087/733] use fspath for general compatibility
---
CHANGES.rst | 3 ++-
src/werkzeug/_compat.py | 9 +++++++++
src/werkzeug/datastructures.py | 23 +++++++++++------------
tests/test_datastructures.py | 29 ++++++-----------------------
4 files changed, 28 insertions(+), 36 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index b19aeb3ee..f62a395d4 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -68,7 +68,8 @@ Unreleased
handled by Click instead of termcolor. :issue:`1235`
- Optional ad-hoc TLS support for the development server is handled
by cryptography instead of pyOpenSSL. :pr:`1555`
-- Add support for pathlib to save uploaded file. :issue:`1653`
+- ``FileStorage.save()`` supports ``pathlib`` and :pep:`519`
+ ``PathLike`` objects. :issue:`1653`
Version 0.16.0
diff --git a/src/werkzeug/_compat.py b/src/werkzeug/_compat.py
index 1097983e0..1d8c81040 100644
--- a/src/werkzeug/_compat.py
+++ b/src/werkzeug/_compat.py
@@ -217,3 +217,12 @@ def to_unicode(
if charset is None and allow_none_charset:
return x
return x.decode(charset, errors)
+
+
+try:
+ from os import fspath
+except ImportError:
+ # Python < 3.6
+ # https://www.python.org/dev/peps/pep-0519/#backwards-compatibility
+ def fspath(path):
+ return path.__fspath__() if hasattr(path, "__fspath__") else path
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 4667b847a..e194dd732 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -17,6 +17,7 @@
from . import exceptions
from ._compat import BytesIO
from ._compat import collections_abc
+from ._compat import fspath
from ._compat import integer_types
from ._compat import iteritems
from ._compat import iterkeys
@@ -30,11 +31,6 @@
from ._internal import _missing
from .filesystem import get_filesystem_encoding
-try:
- import pathlib
-except ImportError:
- pathlib = None
-
def is_immutable(self):
raise TypeError("%r objects are immutable" % self.__class__.__name__)
@@ -2948,20 +2944,23 @@ def save(self, dst, buffer_size=16384):
For secure file saving also have a look at :func:`secure_filename`.
- :param dst: a filename or open file object the uploaded file
- is saved to.
- :param buffer_size: the size of the buffer. This works the same as
- the `length` parameter of
- :func:`shutil.copyfileobj`.
+ :param dst: a filename, :class:`os.PathLike`, or open file
+ object to write to.
+ :param buffer_size: Passed as the ``length`` parameter of
+ :func:`shutil.copyfileobj`.
+
+ .. versionchanged:: 1.0
+ Supports :mod:`pathlib`.
"""
from shutil import copyfileobj
close_dst = False
- if pathlib is not None and isinstance(dst, pathlib.PurePath):
- dst = str(dst)
+ dst = fspath(dst)
+
if isinstance(dst, string_types):
dst = open(dst, "wb")
close_dst = True
+
try:
copyfileobj(self.stream, dst, buffer_size)
finally:
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index e4b35c02f..8ff556a58 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -1188,30 +1188,13 @@ def test_proxy_can_access_stream_attrs(self, stream):
for name in ("fileno", "writable", "readable", "seekable"):
assert hasattr(file_storage, name)
- @pytest.mark.skipif(PY2, reason="Test only needed in PY3")
def test_save_to_pathlib_dst(self, tmp_path):
- import pathlib
-
- tmp = str(tmp_path)
-
- dst = pathlib.Path(tmp, "dst.txt")
- assert isinstance(dst, pathlib.PurePath)
-
- test_text = "test text"
-
- src = pathlib.Path(tmp, "src.txt")
- src.write_text(test_text)
- file_storage = self.storage_class(src.open("rb"))
- file_storage.save(dst)
-
- result = pathlib.Path(dst).read_text()
- assert result == "test text"
-
- def test_string_instead_valid_stream(self):
- file_storage = self.storage_class("string_instead_valid_stream")
-
- with pytest.raises(AttributeError):
- file_storage.read()
+ src = tmp_path / "src.txt"
+ src.write_text(u"test")
+ storage = self.storage_class(src.open("rb"))
+ dst = tmp_path / "dst.txt"
+ storage.save(dst)
+ assert dst.read_text() == "test"
@pytest.mark.parametrize("ranges", ([(0, 1), (-5, None)], [(5, None)]))
From 617309a7c317ae1ade428de48f5bc4a906c2950f Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sat, 4 Jan 2020 12:08:47 -0800
Subject: [PATCH 088/733] get_machine_id unique for podman
---
CHANGES.rst | 2 +
src/werkzeug/debug/__init__.py | 70 ++++++++++++++++++----------------
2 files changed, 39 insertions(+), 33 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index f62a395d4..712b01346 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -70,6 +70,8 @@ Unreleased
by cryptography instead of pyOpenSSL. :pr:`1555`
- ``FileStorage.save()`` supports ``pathlib`` and :pep:`519`
``PathLike`` objects. :issue:`1653`
+- The debugger security pin is unique in containers managed by Podman.
+ :issue:`1661`
Version 0.16.0
diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py
index bb188017a..f9f6e8531 100644
--- a/src/werkzeug/debug/__init__.py
+++ b/src/werkzeug/debug/__init__.py
@@ -47,59 +47,62 @@ def hash_pin(pin):
def get_machine_id():
global _machine_id
- rv = _machine_id
- if rv is not None:
- return rv
- def _generate():
- # docker containers share the same machine id, get the
- # container id instead
- try:
- with open("/proc/self/cgroup") as f:
- value = f.readline()
- except IOError:
- pass
- else:
- value = value.strip().partition("/docker/")[2]
+ if _machine_id is not None:
+ return _machine_id
- if value:
- return value
+ def _generate():
+ linux = b""
- # Potential sources of secret information on linux. The machine-id
- # is stable across boots, the boot id is not
+ # machine-id is stable across boots, boot_id is not.
for filename in "/etc/machine-id", "/proc/sys/kernel/random/boot_id":
try:
with open(filename, "rb") as f:
- return f.readline().strip()
+ value = f.readline().strip()
except IOError:
continue
- # On OS X we can use the computer's serial number assuming that
- # ioreg exists and can spit out that information.
+ if value:
+ linux += value
+ break
+
+ # Containers share the same machine id, add some cgroup
+ # information. This is used outside containers too but should be
+ # relatively stable across boots.
try:
- # Also catch import errors: subprocess may not be available, e.g.
- # Google App Engine
- # See https://github.com/pallets/werkzeug/issues/925
+ with open("/proc/self/cgroup", "rb") as f:
+ linux += f.readline().strip().rpartition(b"/")[2]
+ except IOError:
+ pass
+
+ if linux:
+ return linux
+
+ # On OS X, use ioreg to get the computer's serial number.
+ try:
+ # subprocess may not be available, e.g. Google App Engine
+ # https://github.com/pallets/werkzeug/issues/925
from subprocess import Popen, PIPE
dump = Popen(
["ioreg", "-c", "IOPlatformExpertDevice", "-d", "2"], stdout=PIPE
).communicate()[0]
match = re.search(b'"serial-number" = <([^>]+)', dump)
+
if match is not None:
return match.group(1)
except (OSError, ImportError):
pass
- # On Windows we can use winreg to get the machine guid
- wr = None
+ # On Windows, use winreg to get the machine guid.
try:
import winreg as wr
except ImportError:
try:
import _winreg as wr
except ImportError:
- pass
+ wr = None
+
if wr is not None:
try:
with wr.OpenKey(
@@ -108,16 +111,17 @@ def _generate():
0,
wr.KEY_READ | wr.KEY_WOW64_64KEY,
) as rk:
- machineGuid, wrType = wr.QueryValueEx(rk, "MachineGuid")
- if wrType == wr.REG_SZ:
- return machineGuid.encode("utf-8")
- else:
- return machineGuid
+ guid, guid_type = wr.QueryValueEx(rk, "MachineGuid")
+
+ if guid_type == wr.REG_SZ:
+ return guid.encode("utf-8")
+
+ return guid
except WindowsError:
pass
- _machine_id = rv = _generate()
- return rv
+ _machine_id = _generate()
+ return _machine_id
class _ConsoleFrame(object):
From e3a880bf1dc06231ce756a4fbf31896101d190fc Mon Sep 17 00:00:00 2001
From: junnplus
Date: Sat, 16 Nov 2019 00:11:39 +0800
Subject: [PATCH 089/733] fix rename DeprecationWarning message for subpackages
---
CHANGES.rst | 7 +++++++
src/werkzeug/__init__.py | 6 ++++--
2 files changed, 11 insertions(+), 2 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 5586909d5..50b85678c 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,5 +1,12 @@
.. currentmodule:: werkzeug
+Version 0.16.1
+--------------
+
+- Fix import location in deprecation messages for subpackages.
+ :issue:`1663`
+
+
Version 0.16.0
--------------
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index d53bb3910..4be464264 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -53,15 +53,17 @@ def __getattr__(self, item):
# Import the module, get the attribute, and show a warning about where
# to correctly import it from.
- mod = import_module(origin, self.__name__.rsplit(".")[0])
+ package = self.__name__.rsplit(".")[0]
+ mod = import_module(origin, package)
value = getattr(mod, item)
warn(
"The import '{name}.{item}' is deprecated and will be removed in"
- " {removed_in}. Use 'from {name}{origin} import {item}'"
+ " {removed_in}. Use 'from {package}{origin} import {item}'"
" instead.".format(
name=self.__name__,
item=item,
removed_in=self._removed_in,
+ package=package,
origin=origin,
),
DeprecationWarning,
From 0978789dcb6ce7d3d8811ee46cae3f447d072bbd Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sat, 4 Jan 2020 12:54:47 -0800
Subject: [PATCH 090/733] fix newline issue in test on pypy 2.7
---
tests/test_debug.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/test_debug.py b/tests/test_debug.py
index ac795c785..15e5b942d 100644
--- a/tests/test_debug.py
+++ b/tests/test_debug.py
@@ -363,9 +363,9 @@ def test_console_closure_variables(monkeypatch):
c = console.Console()
c.eval("y = 5")
c.eval("x = lambda: y")
- ret = c.eval("x()")
- expected = ">>> x()\n5" if PY2 else ">>> x()\n5\n"
- assert ret == expected
+ # strip() is only needed for Python 2 compat
+ ret = c.eval("x()").strip()
+ assert ret == ">>> x()\n5"
@pytest.mark.skipif(PY2, reason="Python 2 doesn't have chained exceptions.")
From 1d8cd3c73c93ce2cd5bb6f433c1ec503b64f529f Mon Sep 17 00:00:00 2001
From: Adam Englander
Date: Sun, 1 Dec 2019 16:33:30 -0800
Subject: [PATCH 091/733] Only write body when there is a body to write.
Solves a serving bug that only exists in Python 3.5. There was no good
way to write a reliable test to reproduce the issue. As such, not
tests were added at this time.
---
CHANGES.rst | 4 ++++
src/werkzeug/serving.py | 4 +++-
2 files changed, 7 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 50b85678c..580146876 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -3,8 +3,12 @@
Version 0.16.1
--------------
+Unreleased
+
- Fix import location in deprecation messages for subpackages.
:issue:`1663`
+- Fix an SSL error on Python 3.5 when the dev server responds with no
+ content. :issue:`1659`
Version 0.16.0
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index d817120f2..19ac92fc8 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -273,7 +273,9 @@ def write(data):
self.end_headers()
assert isinstance(data, bytes), "applications must write bytes"
- self.wfile.write(data)
+ if data:
+ # Only write data if there is any to avoid Python 3.5 SSL bug
+ self.wfile.write(data)
self.wfile.flush()
def start_response(status, response_headers, exc_info=None):
From d0bc2470b0026fbf2357956d2945a8fb22bb5f2f Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sat, 4 Jan 2020 21:52:58 +0000
Subject: [PATCH 092/733] add Python 3.8 trove classifier
---
setup.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/setup.py b/setup.py
index c53454c4d..54fda2ffc 100644
--- a/setup.py
+++ b/setup.py
@@ -40,6 +40,7 @@
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
From b7f0413399b0f164ac616b87cfe5aa120e542b8b Mon Sep 17 00:00:00 2001
From: Adam Englander
Date: Sun, 1 Dec 2019 16:46:21 -0800
Subject: [PATCH 093/733] Include host matching if possible when building URLs
---
CHANGES.rst | 3 +++
src/werkzeug/routing.py | 15 ++++++++++++---
tests/test_routing.py | 25 +++++++++++++++++++++++++
3 files changed, 40 insertions(+), 3 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 712b01346..afc558097 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -72,6 +72,9 @@ Unreleased
``PathLike`` objects. :issue:`1653`
- The debugger security pin is unique in containers managed by Podman.
:issue:`1661`
+- Building a URL when ``host_matching`` is enabled takes into account
+ the current host when there are duplicate endpoints with different
+ hosts. :issue:`488`
Version 0.16.0
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index b7b5a066e..0b42c97fd 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -1932,13 +1932,22 @@ def _partial_build(self, endpoint, values, method, append_unknown):
if rv is not None:
return rv
- # default method did not match or a specific method is passed,
- # check all and go with first result.
+ # Default method did not match or a specific method is passed.
+ # Check all for first match with matching host. If no matching
+ # host is found, go with first result.
+ first_match = None
+
for rule in self.map._rules_by_endpoint.get(endpoint, ()):
if rule.suitable_for(values, method):
rv = rule.build(values, append_unknown)
+
if rv is not None:
- return rv
+ if rv[0] == self.server_name:
+ return rv
+ elif first_match is None:
+ first_match = rv
+
+ return first_match
def build(
self,
diff --git a/tests/test_routing.py b/tests/test_routing.py
index ef2605867..a7172e4a9 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -1131,3 +1131,28 @@ def test_build_url_with_arg_keyword():
ret = adapter.build("foo", {"class": "bar"})
assert ret == "http://example.org/foo/bar"
+
+
+def test_build_url_same_endpoint_multiple_hosts():
+ m = r.Map(
+ [
+ r.Rule("/", endpoint="index", host="alpha.example.com"),
+ r.Rule("/", endpoint="index", host="beta.example.com"),
+ r.Rule("/", endpoint="gamma", host="gamma.example.com"),
+ ],
+ host_matching=True,
+ )
+
+ alpha = m.bind("alpha.example.com")
+ assert alpha.build("index") == "/"
+ assert alpha.build("gamma") == "http://gamma.example.com/"
+
+ alpha_case = m.bind("AlPhA.ExAmPlE.CoM")
+ assert alpha_case.build("index") == "/"
+ assert alpha_case.build("gamma") == "http://gamma.example.com/"
+
+ beta = m.bind("beta.example.com")
+ assert beta.build("index") == "/"
+
+ beta_case = m.bind("BeTa.ExAmPlE.CoM")
+ assert beta_case.build("index") == "/"
From a9c46f1c4f6c2de51b603359c7dfddefb2887758 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sat, 4 Jan 2020 15:34:34 -0800
Subject: [PATCH 094/733] avoid building all rules when not host matching
---
src/werkzeug/routing.py | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 0b42c97fd..952423f3e 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -1942,10 +1942,13 @@ def _partial_build(self, endpoint, values, method, append_unknown):
rv = rule.build(values, append_unknown)
if rv is not None:
- if rv[0] == self.server_name:
+ if self.map.host_matching:
+ if rv[0] == self.server_name:
+ return rv
+ elif first_match is None:
+ first_match = rv
+ else:
return rv
- elif first_match is None:
- first_match = rv
return first_match
From ccfd0750ae69c874594de1d8dd439c6318761b6b Mon Sep 17 00:00:00 2001
From: Adam Englander
Date: Sun, 1 Dec 2019 18:52:15 -0800
Subject: [PATCH 095/733] Add ability to set Retry-After header to 429 Too Many
Requests response.
Added optional retry_after_secs to TooManyRequests exception init.
This brings the output in line with the HTTP spec.
---
src/werkzeug/exceptions.py | 18 ++++++++++++++++++
tests/test_exceptions.py | 10 ++++++++++
2 files changed, 28 insertions(+)
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index edc6454ab..7b64048f9 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -600,11 +600,29 @@ class TooManyRequests(HTTPException):
to identify users and their request rates). The server may include a
"Retry-After" header to indicate how long the user should wait before
retrying.
+
+ .. versionchanged:: 0.16.1
+ ``retry_after_secs`` was added as the first argument, ahead of
+ ``description``.
"""
code = 429
description = "This user has exceeded an allotted request count. Try again later."
+ def __init__(self, description=None, retry_after_secs=None):
+ """
+ Use the optional value of retry_after_secs to specify the number of seconds
+ to wait for a retry attempt.
+ """
+ HTTPException.__init__(self, description)
+ self.retry_after_secs = retry_after_secs
+
+ def get_headers(self, environ=None):
+ headers = HTTPException.get_headers(self, environ)
+ if self.retry_after_secs:
+ headers.append(("Retry-After", str(self.retry_after_secs)))
+ return headers
+
class RequestHeaderFieldsTooLarge(HTTPException):
"""*431* `Request Header Fields Too Large`
diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py
index 809667eaa..6ccfad0f5 100644
--- a/tests/test_exceptions.py
+++ b/tests/test_exceptions.py
@@ -120,3 +120,13 @@ def test_response_header_content_type_should_contain_charset():
exc = exceptions.HTTPException("An error message")
h = exc.get_response({})
assert h.headers["Content-Type"] == "text/html; charset=utf-8"
+
+
+def test_too_many_requests_retry_after():
+ exc = exceptions.TooManyRequests(retry_after_secs=20)
+ h = dict(exc.get_headers({}))
+ assert h["Retry-After"] == "20"
+ assert (
+ "This user has exceeded an allotted request count. Try again later."
+ in exc.get_description()
+ )
From 88ee0a580252f94f8224f5b94c3b808c302e03fd Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sat, 4 Jan 2020 19:15:05 -0800
Subject: [PATCH 096/733] extract retry_after to base class
---
CHANGES.rst | 3 ++
src/werkzeug/exceptions.py | 74 +++++++++++++++++++++++++-------------
tests/test_exceptions.py | 25 ++++++++-----
3 files changed, 69 insertions(+), 33 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index afc558097..31404332c 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -75,6 +75,9 @@ Unreleased
- Building a URL when ``host_matching`` is enabled takes into account
the current host when there are duplicate endpoints with different
hosts. :issue:`488`
+- The ``429 TooManyRequests`` and ``503 ServiceUnavailable`` HTTP
+ exceptions takes a ``retry_after`` parameter to set the
+ ``Retry-After`` header. :issue:`1657`
Version 0.16.0
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index 7b64048f9..82e99c2e5 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -58,6 +58,7 @@ def application(environ, start_response):
:license: BSD-3-Clause
"""
import sys
+from datetime import datetime
from ._compat import implements_to_string
from ._compat import integer_types
@@ -592,37 +593,52 @@ class PreconditionRequired(HTTPException):
)
-class TooManyRequests(HTTPException):
+class _RetryAfter(HTTPException):
+ """Adds an optional ``retry_after`` parameter which will set the
+ ``Retry-After`` header. May be an :class:`int` number of seconds or
+ a :class:`~datetime.datetime`.
+ """
+
+ def __init__(self, description=None, response=None, retry_after=None):
+ super(_RetryAfter, self).__init__(description, response)
+ self.retry_after = retry_after
+
+ def get_headers(self, environ=None):
+ headers = super(_RetryAfter, self).get_headers(environ)
+
+ if self.retry_after:
+ if isinstance(self.retry_after, datetime):
+ from .http import http_date
+
+ value = http_date(self.retry_after)
+ else:
+ value = str(self.retry_after)
+
+ headers.append(("Retry-After", value))
+
+ return headers
+
+
+class TooManyRequests(_RetryAfter):
"""*429* `Too Many Requests`
- The server is limiting the rate at which this user receives responses, and
- this request exceeds that rate. (The server may use any convenient method
- to identify users and their request rates). The server may include a
- "Retry-After" header to indicate how long the user should wait before
- retrying.
+ The server is limiting the rate at which this user receives
+ responses, and this request exceeds that rate. (The server may use
+ any convenient method to identify users and their request rates).
+ The server may include a "Retry-After" header to indicate how long
+ the user should wait before retrying.
- .. versionchanged:: 0.16.1
- ``retry_after_secs`` was added as the first argument, ahead of
- ``description``.
+ :param retry_after: If given, set the ``Retry-After`` header to this
+ value. May be an :class:`int` number of seconds or a
+ :class:`~datetime.datetime`.
+
+ .. versionchanged:: 1.0
+ Added ``retry_after`` parameter.
"""
code = 429
description = "This user has exceeded an allotted request count. Try again later."
- def __init__(self, description=None, retry_after_secs=None):
- """
- Use the optional value of retry_after_secs to specify the number of seconds
- to wait for a retry attempt.
- """
- HTTPException.__init__(self, description)
- self.retry_after_secs = retry_after_secs
-
- def get_headers(self, environ=None):
- headers = HTTPException.get_headers(self, environ)
- if self.retry_after_secs:
- headers.append(("Retry-After", str(self.retry_after_secs)))
- return headers
-
class RequestHeaderFieldsTooLarge(HTTPException):
"""*431* `Request Header Fields Too Large`
@@ -699,10 +715,18 @@ class BadGateway(HTTPException):
)
-class ServiceUnavailable(HTTPException):
+class ServiceUnavailable(_RetryAfter):
"""*503* `Service Unavailable`
- Status code you should return if a service is temporarily unavailable.
+ Status code you should return if a service is temporarily
+ unavailable.
+
+ :param retry_after: If given, set the ``Retry-After`` header to this
+ value. May be an :class:`int` number of seconds or a
+ :class:`~datetime.datetime`.
+
+ .. versionchanged:: 1.0
+ Added ``retry_after`` parameter.
"""
code = 503
diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py
index 6ccfad0f5..5b2ed1a4a 100644
--- a/tests/test_exceptions.py
+++ b/tests/test_exceptions.py
@@ -12,6 +12,8 @@
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
+from datetime import datetime
+
import pytest
from werkzeug import exceptions
@@ -122,11 +124,18 @@ def test_response_header_content_type_should_contain_charset():
assert h.headers["Content-Type"] == "text/html; charset=utf-8"
-def test_too_many_requests_retry_after():
- exc = exceptions.TooManyRequests(retry_after_secs=20)
- h = dict(exc.get_headers({}))
- assert h["Retry-After"] == "20"
- assert (
- "This user has exceeded an allotted request count. Try again later."
- in exc.get_description()
- )
+@pytest.mark.parametrize(
+ ("cls", "value", "expect"),
+ [
+ (exceptions.TooManyRequests, 20, "20"),
+ (
+ exceptions.ServiceUnavailable,
+ datetime(2020, 1, 4, 18, 52, 16),
+ "Sat, 04 Jan 2020 18:52:16 GMT",
+ ),
+ ],
+)
+def test_retry_after_mixin(cls, value, expect):
+ e = cls(retry_after=value)
+ h = dict(e.get_headers({}))
+ assert h["Retry-After"] == expect
From 82dd5fdb68fc993a17f1ea91b3f79c73252f5cf6 Mon Sep 17 00:00:00 2001
From: Ed Kellett
Date: Mon, 23 Apr 2018 00:15:08 +0100
Subject: [PATCH 097/733] merge slashes and redirect when matching
---
src/werkzeug/routing.py | 60 +++++++++++++++++++++++++++++++++++++----
tests/test_routing.py | 30 +++++++++++++++++++++
2 files changed, 85 insertions(+), 5 deletions(-)
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 952423f3e..82decb144 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -161,6 +161,10 @@
)
+class InvalidURLWarning(Warning):
+ pass
+
+
_PYTHON_CONSTANTS = {"None": None, "True": True, "False": False}
@@ -253,9 +257,14 @@ def get_response(self, environ):
return redirect(self.new_url, self.code)
-class RequestSlash(RoutingException):
+class RequestPath(RoutingException):
"""Internal exception."""
+ __slots__ = ("path_info",)
+
+ def __init__(self, path_info):
+ self.path_info = path_info
+
class RequestAliasRedirect(RoutingException): # noqa: B903
"""This rule is an alias and wants to redirect to the canonical URL."""
@@ -582,6 +591,11 @@ class Rule(RuleFactory):
Override the `Map` setting for `strict_slashes` only for this rule. If
not specified the `Map` setting is used.
+ `merge_slashes`
+ Override the `Map` setting for `merge_slashes` for this rule.
+
+ .. versionadded:: 0.15
+
`build_only`
Set this to True and the rule will never match but will create a URL
that can be build. This is useful if you have resources on a subdomain
@@ -634,17 +648,25 @@ def __init__(
build_only=False,
endpoint=None,
strict_slashes=None,
+ merge_slashes=None,
redirect_to=None,
alias=False,
host=None,
):
if not string.startswith("/"):
raise ValueError("urls must start with a leading slash")
- self.rule = string
+ self.rule = re.sub(r"//+", "/", string)
+ if self.rule != string:
+ warnings.warn(
+ "Consecutive '/' separators will be stripped from URL: %r" % string,
+ InvalidURLWarning,
+ stacklevel=2,
+ )
self.is_leaf = not string.endswith("/")
self.map = None
self.strict_slashes = strict_slashes
+ self.merge_slashes = merge_slashes
self.subdomain = subdomain
self.host = host
self.defaults = defaults
@@ -726,6 +748,8 @@ def bind(self, map, rebind=False):
self.map = map
if self.strict_slashes is None:
self.strict_slashes = map.strict_slashes
+ if self.merge_slashes is None:
+ self.merge_slashes = map.merge_slashes
if self.subdomain is None:
self.subdomain = map.default_subdomain
self.compile()
@@ -817,6 +841,12 @@ def match(self, path, method=None):
:internal:
"""
if not self.build_only:
+ require_redirect = False
+
+ if self.merge_slashes and "//" in path:
+ path = re.sub(r"//+", "/", path)
+ require_redirect = True
+
m = self._regex.search(path)
if m is not None:
groups = m.groupdict()
@@ -832,7 +862,8 @@ def match(self, path, method=None):
method is None or self.methods is None or method in self.methods
)
):
- raise RequestSlash()
+ path += "/"
+ require_redirect = True
# if we are not in strict slashes mode we have to remove
# a __suffix__
elif not self.strict_slashes:
@@ -848,6 +879,10 @@ def match(self, path, method=None):
if self.defaults:
result.update(self.defaults)
+ if require_redirect:
+ path = path.split("|", 1)[1]
+ raise RequestPath(path)
+
if self.alias and self.map.redirect_defaults:
raise RequestAliasRedirect(result)
@@ -1300,6 +1335,7 @@ class Map(object):
subdomain defined.
:param charset: charset of the url. defaults to ``"utf-8"``
:param strict_slashes: Take care of trailing slashes.
+ :param merge_slashes: Take care of repeated slashes.
:param redirect_defaults: This will redirect to the default rule if it
wasn't visited that way. This helps creating
unique URLs.
@@ -1320,6 +1356,9 @@ class Map(object):
.. versionadded:: 0.7
`encoding_errors` and `host_matching` was added.
+
+ .. versionadded:: 1.0.0
+ Added ``merge_slashes``.
"""
#: A dict of default converters to be used.
@@ -1331,6 +1370,7 @@ def __init__(
default_subdomain="",
charset="utf-8",
strict_slashes=True,
+ merge_slashes=True,
redirect_defaults=True,
converters=None,
sort_parameters=False,
@@ -1347,6 +1387,7 @@ def __init__(
self.charset = charset
self.encoding_errors = encoding_errors
self.strict_slashes = strict_slashes
+ self.merge_slashes = merge_slashes
self.redirect_defaults = redirect_defaults
self.host_matching = host_matching
@@ -1752,6 +1793,8 @@ def match(self, path_info=None, method=None, return_rule=False, query_args=None)
query_args = self.query_args
method = (method or self.default_method).upper()
+ require_redirect = False
+
path = u"%s|%s" % (
self.map.host_matching and self.server_name or self.subdomain,
path_info and "/%s" % path_info.lstrip("/"),
@@ -1761,10 +1804,10 @@ def match(self, path_info=None, method=None, return_rule=False, query_args=None)
for rule in self.map._rules:
try:
rv = rule.match(path, method)
- except RequestSlash:
+ except RequestPath as e:
raise RequestRedirect(
self.make_redirect_url(
- url_quote(path_info, self.map.charset, safe="/:|+") + "/",
+ url_quote(e.path_info, self.map.charset, safe="/:|+"),
query_args,
)
)
@@ -1810,6 +1853,13 @@ def _handle_match(match):
)
)
+ if require_redirect:
+ raise RequestRedirect(
+ self.make_redirect_url(
+ url_quote(path_info, self.map.charset, safe="/:|+"), query_args
+ )
+ )
+
if return_rule:
return rule, rv
else:
diff --git a/tests/test_routing.py b/tests/test_routing.py
index a7172e4a9..047422119 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -62,6 +62,36 @@ def test_basic_routing():
assert excinfo.value.new_url == "http://example.org/bar/?foo=bar"
+def test_multi_slash():
+ map = r.Map(
+ [
+ r.Rule("/frob/zarf", endpoint="blorwoop"),
+ r.Rule("/bleeg/bloog/", endpoint="bluff"),
+ r.Rule("/quux/", endpoint="zoop"),
+ ]
+ )
+ adapter = map.bind("localhost", "/")
+ with pytest.raises(r.RequestRedirect) as excinfo:
+ adapter.match("/frob//zarf")
+ assert excinfo.value.new_url.endswith("/frob/zarf")
+
+ with pytest.raises(r.RequestRedirect) as excinfo:
+ adapter.match("/bleeg//bloog")
+ assert excinfo.value.new_url.endswith("/bleeg/bloog/")
+
+ # test some negatives too
+ adapter.match("/frob/zarf")
+ adapter.match("/bleeg/bloog/")
+
+ ep, rv = adapter.match("/quux/http://splud/")
+ assert rv["slub"] == "http://splud/"
+
+ with pytest.warns(r.InvalidURLWarning):
+ map = r.Map([r.Rule("/frob//zarf", endpoint="blorwoop")])
+ adapter = map.bind("localhost", "/")
+ assert adapter.build("blorwoop") == "/frob/zarf"
+
+
def test_strict_slashes_redirect():
map = r.Map(
[
From 0d193937c814032bbdee193fc6e80e08faf253ec Mon Sep 17 00:00:00 2001
From: Ed Kellett
Date: Tue, 24 Apr 2018 12:44:29 +0100
Subject: [PATCH 098/733] be more selective about merging slashes
* don't merge slashes immediately after ':'
* don't merge slashes on construction
We don't know whether the Map wants us to yet, so there's no ideal
place to do it at all. The associated warning and its corresponding
test are removed. merge_slashes is now enforced at `Rule.compile`
time for building as well as parsing.
* merge slashes only in static parts
---
CHANGES.rst | 3 +++
src/werkzeug/routing.py | 39 +++++++++++++++++++++------------------
tests/test_routing.py | 5 +++--
3 files changed, 27 insertions(+), 20 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 31404332c..c5bd221d5 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -78,6 +78,9 @@ Unreleased
- The ``429 TooManyRequests`` and ``503 ServiceUnavailable`` HTTP
exceptions takes a ``retry_after`` parameter to set the
``Retry-After`` header. :issue:`1657`
+- ``Map`` and ``Rule`` have a ``merge_slashes`` option to collapse
+ multiple slashes into one, similar to how many HTTP servers behave.
+ :pr:`1286`
Version 0.16.0
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 82decb144..e31c47def 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -161,10 +161,6 @@
)
-class InvalidURLWarning(Warning):
- pass
-
-
_PYTHON_CONSTANTS = {"None": None, "True": True, "False": False}
@@ -655,13 +651,7 @@ def __init__(
):
if not string.startswith("/"):
raise ValueError("urls must start with a leading slash")
- self.rule = re.sub(r"//+", "/", string)
- if self.rule != string:
- warnings.warn(
- "Consecutive '/' separators will be stripped from URL: %r" % string,
- InvalidURLWarning,
- stacklevel=2,
- )
+ self.rule = string
self.is_leaf = not string.endswith("/")
self.map = None
@@ -790,9 +780,18 @@ def _build_regex(rule):
index = 0
for converter, arguments, variable in parse_rule(rule):
if converter is None:
- regex_parts.append(re.escape(variable))
- self._trace.append((False, variable))
- for part in variable.split("/"):
+ for match in re.finditer(r"/+|[^/]+", variable):
+ part = match.group(0)
+ if part.startswith("/"):
+ if self.merge_slashes:
+ regex_parts.append(r"/+?")
+ self._trace.append((False, "/"))
+ else:
+ regex.parts.append(part)
+ self._trace.append((False, part))
+ continue
+ self._trace.append((False, part))
+ regex_parts.append(re.escape(part))
if part:
self._static_weights.append((index, -len(part)))
else:
@@ -843,10 +842,6 @@ def match(self, path, method=None):
if not self.build_only:
require_redirect = False
- if self.merge_slashes and "//" in path:
- path = re.sub(r"//+", "/", path)
- require_redirect = True
-
m = self._regex.search(path)
if m is not None:
groups = m.groupdict()
@@ -879,6 +874,14 @@ def match(self, path, method=None):
if self.defaults:
result.update(self.defaults)
+ if self.merge_slashes:
+ new_path = "|".join(self.build(result, False))
+ if path.endswith("/") and not new_path.endswith("/"):
+ new_path += "/"
+ if new_path.count("/") < path.count("/"):
+ path = new_path
+ require_redirect = True
+
if require_redirect:
path = path.split("|", 1)[1]
raise RequestPath(path)
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 047422119..38ea83123 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -85,9 +85,10 @@ def test_multi_slash():
ep, rv = adapter.match("/quux/http://splud/")
assert rv["slub"] == "http://splud/"
+ ep, rv = adapter.match("/quux/x//splud/")
+ assert rv["slub"] == "x//splud/"
- with pytest.warns(r.InvalidURLWarning):
- map = r.Map([r.Rule("/frob//zarf", endpoint="blorwoop")])
+ map = r.Map([r.Rule("/frob//zarf", endpoint="blorwoop")])
adapter = map.bind("localhost", "/")
assert adapter.build("blorwoop") == "/frob/zarf"
From c7041890c969eb1c959b24af5ac292c626cf7d3b Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sun, 5 Jan 2020 09:15:22 -0800
Subject: [PATCH 099/733] add tests for merge_slashes=False
fix error in merge_slashes=False code
rename paths in test for clarity
---
src/werkzeug/routing.py | 2 +-
tests/test_routing.py | 53 +++++++++++++++++++++++++----------------
2 files changed, 34 insertions(+), 21 deletions(-)
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index e31c47def..230e01010 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -787,7 +787,7 @@ def _build_regex(rule):
regex_parts.append(r"/+?")
self._trace.append((False, "/"))
else:
- regex.parts.append(part)
+ regex_parts.append(part)
self._trace.append((False, part))
continue
self._trace.append((False, part))
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 38ea83123..77952778a 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -62,35 +62,48 @@ def test_basic_routing():
assert excinfo.value.new_url == "http://example.org/bar/?foo=bar"
-def test_multi_slash():
- map = r.Map(
+def test_merge_slashes_match():
+ url_map = r.Map(
[
- r.Rule("/frob/zarf", endpoint="blorwoop"),
- r.Rule("/bleeg/bloog/", endpoint="bluff"),
- r.Rule("/quux/", endpoint="zoop"),
+ r.Rule("/no/tail", endpoint="no_tail"),
+ r.Rule("/yes/tail/", endpoint="yes_tail"),
+ r.Rule("/with/", endpoint="with_path"),
+ r.Rule("/no//merge", endpoint="no_merge", merge_slashes=False),
]
)
- adapter = map.bind("localhost", "/")
+ adapter = url_map.bind("localhost", "/")
+
with pytest.raises(r.RequestRedirect) as excinfo:
- adapter.match("/frob//zarf")
- assert excinfo.value.new_url.endswith("/frob/zarf")
+ adapter.match("/no//tail")
+
+ assert excinfo.value.new_url.endswith("/no/tail")
with pytest.raises(r.RequestRedirect) as excinfo:
- adapter.match("/bleeg//bloog")
- assert excinfo.value.new_url.endswith("/bleeg/bloog/")
+ adapter.match("/yes//tail")
- # test some negatives too
- adapter.match("/frob/zarf")
- adapter.match("/bleeg/bloog/")
+ assert excinfo.value.new_url.endswith("/yes/tail/")
- ep, rv = adapter.match("/quux/http://splud/")
- assert rv["slub"] == "http://splud/"
- ep, rv = adapter.match("/quux/x//splud/")
- assert rv["slub"] == "x//splud/"
+ assert adapter.match("/no/tail")[0] == "no_tail"
+ assert adapter.match("/yes/tail/")[0] == "yes_tail"
- map = r.Map([r.Rule("/frob//zarf", endpoint="blorwoop")])
- adapter = map.bind("localhost", "/")
- assert adapter.build("blorwoop") == "/frob/zarf"
+ _, rv = adapter.match("/with/http://example.com/")
+ assert rv["path"] == "http://example.com/"
+ _, rv = adapter.match("/with/x//y")
+ assert rv["path"] == "x//y"
+
+ assert adapter.match("/no//merge")[0] == "no_merge"
+
+
+def test_merge_slashes_build():
+ url_map = r.Map(
+ [
+ r.Rule("/yes//merge", endpoint="yes_merge"),
+ r.Rule("/no//merge", endpoint="no_merge", merge_slashes=False),
+ ]
+ )
+ adapter = url_map.bind("localhost", "/")
+ assert adapter.build("yes_merge") == "/yes/merge"
+ assert adapter.build("no_merge") == "/no//merge"
def test_strict_slashes_redirect():
From 64b194259bc52fc4ddbacfe5a45a9a168a2e47b8 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sun, 5 Jan 2020 09:40:04 -0800
Subject: [PATCH 100/733] docs for merge_slashes
---
CHANGES.rst | 2 +-
docs/conf.py | 1 +
docs/requirements.txt | 1 +
docs/routing.rst | 42 +++++++++++++++++++++++------------------
src/werkzeug/routing.py | 23 ++++++++++++----------
5 files changed, 40 insertions(+), 29 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index c5bd221d5..a71ce7632 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -80,7 +80,7 @@ Unreleased
``Retry-After`` header. :issue:`1657`
- ``Map`` and ``Rule`` have a ``merge_slashes`` option to collapse
multiple slashes into one, similar to how many HTTP servers behave.
- :pr:`1286`
+ This is enabled by default. :pr:`1286`
Version 0.16.0
diff --git a/docs/conf.py b/docs/conf.py
index a053617f8..53e6a1ef5 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -16,6 +16,7 @@
"sphinx.ext.intersphinx",
"pallets_sphinx_themes",
"sphinx_issues",
+ "sphinxcontrib.log_cabinet",
]
intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)}
issues_github_path = "pallets/werkzeug"
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 5d106d6c9..cd135e3d5 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,3 +1,4 @@
Sphinx~=1.8.3
Pallets-Sphinx-Themes~=1.1.2
sphinx-issues~=1.2.0
+sphinxcontrib-log-cabinet~=1.0.1
diff --git a/docs/routing.rst b/docs/routing.rst
index f5a768cc1..68f74cc1c 100644
--- a/docs/routing.rst
+++ b/docs/routing.rst
@@ -70,24 +70,30 @@ exceptions have a look at the documentation of the :meth:`MapAdapter.match` meth
Rule Format
===========
-Rule strings basically are just normal URL paths with placeholders in the
-format ````, where converter and the arguments
-are optional. If no converter is defined, the `default` converter is used
-(which means `string` in the normal configuration).
-
-URL rules that end with a slash are branch URLs, others are leaves. If you
-have `strict_slashes` enabled (which is the default), all branch URLs that are
-visited without a trailing slash will trigger a redirect to the same URL with
-that slash appended.
-
-The list of converters can be extended, the default converters are explained
-below.
-
-
-Builtin Converters
-==================
-
-Here a list of converters that come with Werkzeug:
+Rule strings are URL paths with placeholders for variable parts in the
+format ````. ``converter`` and ``arguments``
+(with parentheses) are optional. If no converter is given, the
+``default`` converter is used (``string`` by default). The available
+converters are discussed below.
+
+Rules that end with a slash are "branches", others are "leaves". If
+``strict_slashes`` is enabled (the default), visiting a branch URL
+without a trailing slash will redirect to the URL with a slash appended.
+
+Many HTTP servers merge consecutive slashes into one when receiving
+requests. If ``merge_slashes`` is enabled (the default), rules will
+merge slashes in non-variable parts when matching and building. Visiting
+a URL with consecutive slashes will redirect to the URL with slashes
+merged. If you want to disable ``merge_slashes`` for a :class:`Rule` or
+:class:`Map`, you'll also need to configure your web server
+appropriately.
+
+
+Built-in Converters
+===================
+
+Converters for common types of URL variables are built-in. The available
+converters can be overridden or extended through :attr:`Map.converters`.
.. autoclass:: UnicodeConverter
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 230e01010..5b5cbfbbb 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -588,9 +588,9 @@ class Rule(RuleFactory):
not specified the `Map` setting is used.
`merge_slashes`
- Override the `Map` setting for `merge_slashes` for this rule.
+ Override the ``Map`` setting for ``merge_slashes`` for this rule.
- .. versionadded:: 0.15
+ .. versionadded:: 1.0
`build_only`
Set this to True and the rule will never match but will create a URL
@@ -1337,8 +1337,11 @@ class Map(object):
:param default_subdomain: The default subdomain for rules without a
subdomain defined.
:param charset: charset of the url. defaults to ``"utf-8"``
- :param strict_slashes: Take care of trailing slashes.
- :param merge_slashes: Take care of repeated slashes.
+ :param strict_slashes: If a rule ends with a slash but the matched
+ URL does not, redirect to the URL with a trailing slash.
+ :param merge_slashes: Merge consecutive slashes when matching or
+ building URLs. Matches will redirect to the normalized URL.
+ Slashes in variable parts are not merged.
:param redirect_defaults: This will redirect to the default rule if it
wasn't visited that way. This helps creating
unique URLs.
@@ -1354,14 +1357,14 @@ class Map(object):
enabled the `host` parameter to rules is used
instead of the `subdomain` one.
- .. versionadded:: 0.5
- `sort_parameters` and `sort_key` was added.
+ .. versionchanged:: 1.0
+ Added ``merge_slashes``.
- .. versionadded:: 0.7
- `encoding_errors` and `host_matching` was added.
+ .. versionchanged:: 0.7
+ Added ``encoding_errors`` and ``host_matching``.
- .. versionadded:: 1.0.0
- Added ``merge_slashes``.
+ .. versionchanged:: 0.5
+ Added ``sort_parameters`` and ``sort_key``.
"""
#: A dict of default converters to be used.
From 2d4c4c703d989fdc4024943cf2b5507bf5031b85 Mon Sep 17 00:00:00 2001
From: Alejandro de Haro
Date: Tue, 3 Dec 2019 11:44:44 +0100
Subject: [PATCH 101/733] add some RFC HTTP status codes
---
CHANGES.rst | 2 ++
src/werkzeug/http.py | 7 +++++++
2 files changed, 9 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 1adf52877..1063744b1 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -81,6 +81,8 @@ Unreleased
- ``Map`` and ``Rule`` have a ``merge_slashes`` option to collapse
multiple slashes into one, similar to how many HTTP servers behave.
This is enabled by default. :pr:`1286`
+- Add HTTP 103, 208, 306, 425, 506, 508, and 511 to the list of status
+ codes. :pr:`1678`
Version 0.16.1
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index 56dbb0fe5..b428ceeb2 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -140,6 +140,7 @@
100: "Continue",
101: "Switching Protocols",
102: "Processing",
+ 103: "Early Hints", # see RFC 8297
200: "OK",
201: "Created",
202: "Accepted",
@@ -148,6 +149,7 @@
205: "Reset Content",
206: "Partial Content",
207: "Multi Status",
+ 208: "Already Reported", # see RFC 5842
226: "IM Used", # see RFC 3229
300: "Multiple Choices",
301: "Moved Permanently",
@@ -155,6 +157,7 @@
303: "See Other",
304: "Not Modified",
305: "Use Proxy",
+ 306: "Switch Proxy", # unused
307: "Temporary Redirect",
308: "Permanent Redirect",
400: "Bad Request",
@@ -180,6 +183,7 @@
422: "Unprocessable Entity",
423: "Locked",
424: "Failed Dependency",
+ 425: "Too Early", # see RFC 8470
426: "Upgrade Required",
428: "Precondition Required", # see RFC 6585
429: "Too Many Requests",
@@ -192,8 +196,11 @@
503: "Service Unavailable",
504: "Gateway Timeout",
505: "HTTP Version Not Supported",
+ 506: "Variant Also Negotiates", # see RFC 2295
507: "Insufficient Storage",
+ 508: "Loop Detected", # see RFC 5842
510: "Not Extended",
+ 511: "Network Authentication Failed", # see RFC 6585
}
From baa7bdc19bdbf2db66d0f01ffdf98c4ab5a178ab Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sun, 29 Dec 2019 11:17:54 +0000
Subject: [PATCH 102/733] add an update method to the Headers data structure
This allows a typical dict action (namely a.update(b)) to be possible
with Headers instances. Notably I think the update action should
emulate that of standard python dictionaries and overwrite rather than
extend (the multi keys). This is the opposite to how the Werkzeug
MultiDict works.
---
CHANGES.rst | 2 ++
src/werkzeug/datastructures.py | 20 ++++++++++++++++++++
tests/test_datastructures.py | 11 +++++++++++
3 files changed, 33 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 1063744b1..3ca861ce0 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -83,6 +83,8 @@ Unreleased
This is enabled by default. :pr:`1286`
- Add HTTP 103, 208, 306, 425, 506, 508, and 511 to the list of status
codes. :pr:`1678`
+- Add an ``update`` method to the ``Headers`` data structure.
+ :pr:`1687`
Version 0.16.1
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index e194dd732..9695c7716 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1265,6 +1265,26 @@ def __setitem__(self, key, value):
else:
self.set(key, value)
+ def update(self, *args, **kwargs):
+ """Update the headers with the key/value pairs from another
+ headers object and keyword arguments.
+
+ If provided, the first argument can be another :class:`Headers`
+ object, a :class:`MultiDict`, :class:`dict`, or iterable of
+ pairs.
+
+ .. versionadded:: 1.0
+ """
+ if len(args) > 1:
+ raise TypeError("update expected at most 1 arguments, got %d" % len(args))
+
+ if args:
+ for key, value in iter_multi_items(args[0]):
+ self[key] = value
+
+ for key, value in iter_multi_items(kwargs):
+ self[key] = value
+
def to_wsgi_list(self):
"""Convert the headers into a list suitable for WSGI.
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index 8ff556a58..33ea5301c 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -780,6 +780,17 @@ def test_bytes_operations(self):
assert h.get("x-whoops", as_bytes=True) == b"\xff"
assert h.get("x-bytes") == "something"
+ def test_update(self):
+ h = self.storage_class()
+ h["x"] = "1"
+ h.update({"x": "2", "y": "1"})
+ assert h.getlist("x") == ["2"]
+ assert h.getlist("y") == ["1"]
+ h.update(z="2")
+ assert h.getlist("z") == ["2"]
+ h.update(self.storage_class([("a", "b")]))
+ assert h["a"] == "b"
+
def test_to_wsgi_list(self):
h = self.storage_class()
h.set(u"Key", u"Value")
From f4f183dfb339e2d8d0cbf55a208d64e89c6b9570 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 7 Jan 2020 08:51:56 -0800
Subject: [PATCH 103/733] merge slashes at the end of a URL
---
CHANGES.rst | 2 +-
src/werkzeug/routing.py | 14 ++++++++------
tests/test_routing.py | 5 +++++
3 files changed, 14 insertions(+), 7 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 3ca861ce0..c0e252fe7 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -80,7 +80,7 @@ Unreleased
``Retry-After`` header. :issue:`1657`
- ``Map`` and ``Rule`` have a ``merge_slashes`` option to collapse
multiple slashes into one, similar to how many HTTP servers behave.
- This is enabled by default. :pr:`1286`
+ This is enabled by default. :pr:`1286, 1694`
- Add HTTP 103, 208, 306, 425, 506, 508, and 511 to the list of status
codes. :pr:`1678`
- Add an ``update`` method to the ``Headers`` data structure.
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 5b5cbfbbb..aa07892cb 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -820,12 +820,14 @@ def _build_regex(rule):
if self.build_only:
return
- regex = r"^%s%s$" % (
- u"".join(regex_parts),
- (not self.is_leaf or not self.strict_slashes)
- and "(?/?)"
- or "",
- )
+
+ if not (self.is_leaf and self.strict_slashes):
+ reps = u"*" if self.merge_slashes else u"?"
+ tail = u"(?/%s)" % reps
+ else:
+ tail = u""
+
+ regex = u"^%s%s$" % (u"".join(regex_parts), tail)
self._regex = re.compile(regex, re.UNICODE)
def match(self, path, method=None):
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 77952778a..65d3ae8b0 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -83,6 +83,11 @@ def test_merge_slashes_match():
assert excinfo.value.new_url.endswith("/yes/tail/")
+ with pytest.raises(r.RequestRedirect) as excinfo:
+ adapter.match("/yes/tail//")
+
+ assert excinfo.value.new_url.endswith("/yes/tail/")
+
assert adapter.match("/no/tail")[0] == "no_tail"
assert adapter.match("/yes/tail/")[0] == "yes_tail"
From ba0ec96554c69d9afd8414b25524fbb0a26c5f85 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 7 Jan 2020 12:42:44 -0800
Subject: [PATCH 104/733] dev server accepts two slashes at start of path
---
CHANGES.rst | 2 ++
src/werkzeug/serving.py | 13 ++++++++++++-
2 files changed, 14 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index c0e252fe7..7e87dad1f 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -85,6 +85,8 @@ Unreleased
codes. :pr:`1678`
- Add an ``update`` method to the ``Headers`` data structure.
:pr:`1687`
+- The development server accepts paths that start with two slashes,
+ rather than stripping off the first path segment. :issue:`491`
Version 0.16.1
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index 4979c3ef8..f2a0dc95e 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -183,7 +183,16 @@ def shutdown_server():
self.client_address = (self.client_address, 0)
else:
pass
- path_info = url_unquote(request_url.path)
+
+ # If there was no scheme but the path started with two slashes,
+ # the first segment may have been incorrectly parsed as the
+ # netloc, prepend it to the path again.
+ if not request_url.scheme and request_url.netloc:
+ path_info = "/%s%s" % (request_url.netloc, request_url.path)
+ else:
+ path_info = request_url.path
+
+ path_info = url_unquote(path_info)
environ = {
"wsgi.version": (1, 0),
@@ -223,6 +232,8 @@ def shutdown_server():
environ["wsgi.input_terminated"] = True
environ["wsgi.input"] = DechunkedInput(environ["wsgi.input"])
+ # Per RFC 2616, if the URL is absolute, use that as the host.
+ # We're using "has a scheme" to indicate an absolute URL.
if request_url.scheme and request_url.netloc:
environ["HTTP_HOST"] = request_url.netloc
From 6eb110acf3616af20fcad2d2871e43f443c51426 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sat, 11 Jan 2020 09:32:53 -0800
Subject: [PATCH 105/733] update flake8 noqa
---
setup.cfg | 1 +
src/werkzeug/formparser.py | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/setup.cfg b/setup.cfg
index 02e0db220..9381383dc 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -53,3 +53,4 @@ per-file-ignores =
**/__init__.py: F401
# LocalProxy assigns lambdas
src/werkzeug/local.py: E731
+ src/werkzeug/contrib/*.py: B014
diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py
index 02ae2ce5e..ffdb9b0f1 100644
--- a/src/werkzeug/formparser.py
+++ b/src/werkzeug/formparser.py
@@ -137,7 +137,7 @@ def wrapper(self, stream, *args, **kwargs):
while 1:
chunk = stream.read(1024 * 64)
if not chunk:
- break # noqa: B012
+ break
return update_wrapper(wrapper, f)
From 3c4783bb833818956f9d1558af6c177f0d2f3236 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Wed, 8 Jan 2020 14:33:22 +0000
Subject: [PATCH 106/733] Add a setlist method to the Headers datastructure
The update method added in baa7bdc19bdbf2db66d0f01ffdf98c4ab5a178ab is
meant to replace headers with the mapping passed in. If a MultDict or
Headers object is passed in it would replace headers with the final
iterated value, rather than all the values iterated over. This could
lead to unexpected results, therefore this corrects the functionality
to what I think is expected.
Consider,
h1 = Headers()
h1.add("X-Multi", "value")
h2 = Headers()
h2.add("X-Multi", "newValue")
h2.add("X-Multi", "alternativeValue")
h1.update(h2)
previously `h1.getlist("X-Multi")` would likely equal
`["alternativeValue"]` whereas now it equals `["newValue",
"alternativeValue"]` which is as you'd expect.
---
src/werkzeug/datastructures.py | 32 ++++++++++++++++++++++++++++----
tests/test_datastructures.py | 7 +++++--
2 files changed, 33 insertions(+), 6 deletions(-)
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 9695c7716..398eeb736 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1235,6 +1235,19 @@ def set(self, _key, _value, **kw):
return
self._list[idx + 1 :] = [t for t in listiter if t[0].lower() != ikey]
+ def setlist(self, key, values):
+ """Set multiple header values at once.
+
+ The `values` argument should be iterable. This will replace
+ any existing values for the key with the values passed. It is
+ the inverse of the getlist method.
+
+ .. versionadded:: 1.0
+ """
+ self.set(key, values[0])
+ for value in values[1:]:
+ self.add(key, value)
+
def setdefault(self, key, default):
"""Returns the value for the key if it is in the dict, otherwise it
returns `default` and sets that value for `key`.
@@ -1279,10 +1292,21 @@ def update(self, *args, **kwargs):
raise TypeError("update expected at most 1 arguments, got %d" % len(args))
if args:
- for key, value in iter_multi_items(args[0]):
- self[key] = value
-
- for key, value in iter_multi_items(kwargs):
+ mapping = args[0]
+
+ if isinstance(mapping, (Headers, MultiDict)):
+ for key in iterkeys(mapping):
+ self.setlist(key, mapping.getlist(key))
+ elif isinstance(mapping, dict):
+ for key, value in iteritems(mapping):
+ if isinstance(value, (tuple, list)):
+ self.setlist(key, value)
+ else:
+ self[key] = value
+ else:
+ for item in mapping:
+ self[key] = item
+ for key, value in iteritems(kwargs):
self[key] = value
def to_wsgi_list(self):
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index 33ea5301c..1209607d8 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -788,8 +788,11 @@ def test_update(self):
assert h.getlist("y") == ["1"]
h.update(z="2")
assert h.getlist("z") == ["2"]
- h.update(self.storage_class([("a", "b")]))
- assert h["a"] == "b"
+ h2 = self.storage_class([("a", "b")])
+ h2.add("a", "c")
+ h.update(h2, d="e")
+ assert h.getlist("a") == ["b", "c"]
+ assert h["d"] == "e"
def test_to_wsgi_list(self):
h = self.storage_class()
From 7d8b779fc3fffe60532934642d8fc7c36257dfc8 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sun, 12 Jan 2020 13:33:56 -0800
Subject: [PATCH 107/733] improve Headers setlist, update, and related
setlist removes key if values is empty
setlist doesn't copy the values for iteration
update uses set instead of `__setitem__`
update passes list values in kwargs to setlist
add setlistdefault method
extend takes kwargs, supports MultiDict
add methods to ImmutableHeadersMixin
add tests
---
CHANGES.rst | 5 +-
src/werkzeug/datastructures.py | 123 ++++++++++++++++++++++++---------
tests/test_datastructures.py | 53 ++++++++++----
3 files changed, 133 insertions(+), 48 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 7e87dad1f..e66354b60 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -83,8 +83,9 @@ Unreleased
This is enabled by default. :pr:`1286, 1694`
- Add HTTP 103, 208, 306, 425, 506, 508, and 511 to the list of status
codes. :pr:`1678`
-- Add an ``update`` method to the ``Headers`` data structure.
- :pr:`1687`
+- Add ``update``, ``setlist``, and ``setlistdefault`` methods to the
+ ``Headers`` data structure. ``extend`` method can take ``MultiDict``
+ and kwargs. :pr:`1687, 1697`
- The development server accepts paths that start with two slashes,
rather than stripping off the first path segment. :issue:`491`
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 398eeb736..fdff4c2d4 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1080,21 +1080,30 @@ def values(self):
for _, value in iteritems(self):
yield value
- def extend(self, iterable):
- """Extend the headers with a dict or an iterable yielding keys and
- values.
+ def extend(self, *args, **kwargs):
+ """Extend headers in this object with items from another object
+ containing header items as well as keyword arguments.
+
+ To replace existing keys instead of extending, use
+ :meth:`update` instead.
+
+ If provided, the first argument can be another :class:`Headers`
+ object, a :class:`MultiDict`, :class:`dict`, or iterable of
+ pairs.
+
+ .. versionchanged:: 1.0
+ Support :class:`MultiDict`. Allow passing ``kwargs``.
"""
- if isinstance(iterable, dict):
- for key, value in iteritems(iterable):
- if isinstance(value, (tuple, list)):
- for v in value:
- self.add(key, v)
- else:
- self.add(key, value)
- else:
- for key, value in iterable:
+ if len(args) > 1:
+ raise TypeError("update expected at most 1 arguments, got %d" % len(args))
+
+ if args:
+ for key, value in iter_multi_items(args[0]):
self.add(key, value)
+ for key, value in iter_multi_items(kwargs):
+ self.add(key, value)
+
def __delitem__(self, key, _index_operation=True):
if _index_operation and isinstance(key, (integer_types, slice)):
del self._list[key]
@@ -1236,31 +1245,56 @@ def set(self, _key, _value, **kw):
self._list[idx + 1 :] = [t for t in listiter if t[0].lower() != ikey]
def setlist(self, key, values):
- """Set multiple header values at once.
+ """Remove any existing values for a header and add new ones.
- The `values` argument should be iterable. This will replace
- any existing values for the key with the values passed. It is
- the inverse of the getlist method.
+ :param key: The header key to set.
+ :param values: An iterable of values to set for the key.
.. versionadded:: 1.0
"""
- self.set(key, values[0])
- for value in values[1:]:
- self.add(key, value)
+ if values:
+ values_iter = iter(values)
+ self.set(key, next(values_iter))
+
+ for value in values_iter:
+ self.add(key, value)
+ else:
+ self.remove(key)
def setdefault(self, key, default):
- """Returns the value for the key if it is in the dict, otherwise it
- returns `default` and sets that value for `key`.
+ """Return the first value for the key if it is in the headers,
+ otherwise set the header to the value given by ``default`` and
+ return that.
- :param key: The key to be looked up.
- :param default: The default value to be returned if the key is not
- in the dict. If not further specified it's `None`.
+ :param key: The header key to get.
+ :param default: The value to set for the key if it is not in the
+ headers.
"""
if key in self:
return self[key]
+
self.set(key, default)
return default
+ def setlistdefault(self, key, default):
+ """Return the list of values for the key if it is in the
+ headers, otherwise set the header to the list of values given
+ by ``default`` and return that.
+
+ Unlike :meth:`MultiDict.setlistdefault`, modifying the returned
+ list will not affect the headers.
+
+ :param key: The header key to get.
+ :param default: An iterable of values to set for the key if it
+ is not in the headers.
+
+ .. versionadded:: 1.0
+ """
+ if key not in self:
+ self.setlist(key, default)
+
+ return self.getlist(key)
+
def __setitem__(self, key, value):
"""Like :meth:`set` but also supports index/slice based setting."""
if isinstance(key, (slice, integer_types)):
@@ -1279,9 +1313,12 @@ def __setitem__(self, key, value):
self.set(key, value)
def update(self, *args, **kwargs):
- """Update the headers with the key/value pairs from another
+ """Replace headers in this object with items from another
headers object and keyword arguments.
+ To extend existing keys instead of replacing, use :meth:`extend`
+ instead.
+
If provided, the first argument can be another :class:`Headers`
object, a :class:`MultiDict`, :class:`dict`, or iterable of
pairs.
@@ -1295,19 +1332,23 @@ def update(self, *args, **kwargs):
mapping = args[0]
if isinstance(mapping, (Headers, MultiDict)):
- for key in iterkeys(mapping):
+ for key in mapping.keys():
self.setlist(key, mapping.getlist(key))
elif isinstance(mapping, dict):
for key, value in iteritems(mapping):
- if isinstance(value, (tuple, list)):
+ if isinstance(value, (list, tuple)):
self.setlist(key, value)
else:
- self[key] = value
+ self.set(key, value)
else:
- for item in mapping:
- self[key] = item
+ for key, value in mapping:
+ self.set(key, value)
+
for key, value in iteritems(kwargs):
- self[key] = value
+ if isinstance(value, (list, tuple)):
+ self.setlist(key, value)
+ else:
+ self.set(key, value)
def to_wsgi_list(self):
"""Convert the headers into a list suitable for WSGI.
@@ -1355,14 +1396,25 @@ def __delitem__(self, key, **kwargs):
def __setitem__(self, key, value):
is_immutable(self)
- set = __setitem__
+ def set(self, key, value):
+ is_immutable(self)
+
+ def setlist(self, key, value):
+ is_immutable(self)
def add(self, item):
is_immutable(self)
- remove = add_header = add
+ def add_header(self, item):
+ is_immutable(self)
- def extend(self, iterable):
+ def remove(self, item):
+ is_immutable(self)
+
+ def extend(self, *args, **kwargs):
+ is_immutable(self)
+
+ def update(self, *args, **kwargs):
is_immutable(self)
def insert(self, pos, value):
@@ -1377,6 +1429,9 @@ def popitem(self):
def setdefault(self, key, default):
is_immutable(self)
+ def setlistdefault(self, key, default):
+ is_immutable(self)
+
class EnvironHeaders(ImmutableHeadersMixin, Headers):
"""Read only version of the headers from a WSGI environment. This
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index 1209607d8..97ac1e287 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -780,19 +780,48 @@ def test_bytes_operations(self):
assert h.get("x-whoops", as_bytes=True) == b"\xff"
assert h.get("x-bytes") == "something"
+ def test_extend(self):
+ h = self.storage_class([("a", "0"), ("b", "1"), ("c", "2")])
+ h.extend(datastructures.Headers([("a", "3"), ("a", "4")]))
+ assert h.getlist("a") == ["0", "3", "4"]
+ h.extend(b=["5", "6"])
+ assert h.getlist("b") == ["1", "5", "6"]
+ h.extend({"c": "7", "d": ["8", "9"]}, c="10")
+ assert h.getlist("c") == ["2", "7", "10"]
+ assert h.getlist("d") == ["8", "9"]
+
+ with pytest.raises(TypeError):
+ h.extend({"x": "x"}, {"x": "x"})
+
def test_update(self):
- h = self.storage_class()
- h["x"] = "1"
- h.update({"x": "2", "y": "1"})
- assert h.getlist("x") == ["2"]
- assert h.getlist("y") == ["1"]
- h.update(z="2")
- assert h.getlist("z") == ["2"]
- h2 = self.storage_class([("a", "b")])
- h2.add("a", "c")
- h.update(h2, d="e")
- assert h.getlist("a") == ["b", "c"]
- assert h["d"] == "e"
+ h = self.storage_class([("a", "0"), ("b", "1"), ("c", "2")])
+ h.update(datastructures.Headers([("a", "3"), ("a", "4")]))
+ assert h.getlist("a") == ["3", "4"]
+ h.update(b=["5", "6"])
+ assert h.getlist("b") == ["5", "6"]
+ h.update({"c": "7", "d": ["8", "9"]})
+ assert h.getlist("c") == ["7"]
+ assert h.getlist("d") == ["8", "9"]
+ h.update({"c": "10"}, c="11")
+ assert h.getlist("c") == ["11"]
+
+ with pytest.raises(TypeError):
+ h.extend({"x": "x"}, {"x": "x"})
+
+ def test_setlist(self):
+ h = self.storage_class([("a", "0"), ("b", "1"), ("c", "2")])
+ h.setlist("b", ["3", "4"])
+ assert h[1] == ("b", "3")
+ assert h[-1] == ("b", "4")
+ h.setlist("b", [])
+ assert "b" not in h
+ h.setlist("d", ["5"])
+ assert h["d"] == "5"
+
+ def test_setlistdefault(self):
+ h = self.storage_class([("a", "0"), ("b", "1"), ("c", "2")])
+ assert h.setlistdefault("a", ["3"]) == ["0"]
+ assert h.setlistdefault("d", ["4", "5"]) == ["4", "5"]
def test_to_wsgi_list(self):
h = self.storage_class()
From bb64132db67553a73bbdd3327a0da28edacb5b98 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Tue, 14 Jan 2020 21:43:23 +0000
Subject: [PATCH 108/733] Use parse_list_header to parse X-Forwarded-For
headers
This is a minor change to use the standardised and more robust parser
rather than splitting on `,`. In practice it may not matter as quotes
aren't meant to be present in the X-Forwarded-For header, and I can't
find reference to a bug. However there is no RFC and this seems the
more consistent way to parse this header.
---
src/werkzeug/wrappers/base_request.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/src/werkzeug/wrappers/base_request.py b/src/werkzeug/wrappers/base_request.py
index e18949b33..1f21db2e3 100644
--- a/src/werkzeug/wrappers/base_request.py
+++ b/src/werkzeug/wrappers/base_request.py
@@ -14,6 +14,7 @@
from ..formparser import default_stream_factory
from ..formparser import FormDataParser
from ..http import parse_cookie
+from ..http import parse_list_header
from ..http import parse_options_header
from ..urls import url_decode
from ..utils import cached_property
@@ -616,8 +617,9 @@ def access_route(self):
from the client ip to the last proxy server.
"""
if "HTTP_X_FORWARDED_FOR" in self.environ:
- addr = self.environ["HTTP_X_FORWARDED_FOR"].split(",")
- return self.list_storage_class([x.strip() for x in addr])
+ return self.list_storage_class(
+ parse_list_header(self.environ["HTTP_X_FORWARDED_FOR"])
+ )
elif "REMOTE_ADDR" in self.environ:
return self.list_storage_class([self.environ["REMOTE_ADDR"]])
return self.list_storage_class()
From 73358a809a4227960100c61e1286bae9123d877d Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sat, 11 Jan 2020 21:02:18 +0000
Subject: [PATCH 109/733] Add Access Control, CORS (Cross Origin Request
Sharing) header methods
This should make it a little easier to get and set access control
headers as it ensures the types and naming is correct. It is also
intentionally very minimal like the other header accessors.
---
CHANGES.rst | 2 +
src/werkzeug/wrappers/cors.py | 102 ++++++++++++++++++++++++++++++
src/werkzeug/wrappers/request.py | 5 ++
src/werkzeug/wrappers/response.py | 8 ++-
tests/test_wrappers.py | 25 ++++++++
5 files changed, 140 insertions(+), 2 deletions(-)
create mode 100644 src/werkzeug/wrappers/cors.py
diff --git a/CHANGES.rst b/CHANGES.rst
index e66354b60..6c1ec0975 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -88,6 +88,8 @@ Unreleased
and kwargs. :pr:`1687, 1697`
- The development server accepts paths that start with two slashes,
rather than stripping off the first path segment. :issue:`491`
+- Add access control (Cross Origin Request Sharing, CORS) header
+ methods to the Request and Response wrappers. :pr:`1699`
Version 0.16.1
diff --git a/src/werkzeug/wrappers/cors.py b/src/werkzeug/wrappers/cors.py
new file mode 100644
index 000000000..0ea442e26
--- /dev/null
+++ b/src/werkzeug/wrappers/cors.py
@@ -0,0 +1,102 @@
+from ..http import dump_header
+from ..http import parse_set_header
+from ..utils import environ_property
+from ..utils import header_property
+
+
+class CORSRequestMixin(object):
+ """A mixin for :class:`BaseRequest` subclasses. ``Request`` classes
+ that subclass this will get descriptors for Cross Origin Resource
+ Sharing (CORS) headers.
+
+ .. versionadded:: 1.0
+ """
+
+ origin = environ_property(
+ "HTTP_ORIGIN",
+ doc=(
+ "The host that the request originated from. Set"
+ " :attr:`~CORSResponseMixin.access_control_allow_origin` on"
+ " the response to indicate which origins are allowed."
+ ),
+ )
+
+ access_control_request_headers = environ_property(
+ "HTTP_ACCESS_CONTROL_REQUEST_HEADERS",
+ load_func=parse_set_header,
+ doc=(
+ "Sent with a preflight request to indicate which headers"
+ " will be sent with the cross origin request. Set"
+ " :attr:`~CORSResponseMixin.access_control_allow_headers`"
+ " on the response to indicate which headers are allowed."
+ ),
+ )
+
+ access_control_request_method = environ_property(
+ "HTTP_ACCESS_CONTROL_REQUEST_METHOD",
+ doc=(
+ "Sent with a preflight request to indicate which method"
+ " will be used for the cross origin request. Set"
+ " :attr:`~CORSResponseMixin.access_control_allow_methods`"
+ " on the response to indicate which methods are allowed."
+ ),
+ )
+
+
+class CORSResponseMixin(object):
+ """A mixin for :class:`BaseResponse` subclasses. ``Response``
+ classes that subclass this will get descriptors for Cross Origin
+ Resource Sharing (CORS) headers.
+
+ .. versionadded:: 1.0
+ """
+
+ @property
+ def access_control_allow_credentials(self):
+ """Whether credentials can be shared by the browser to
+ JavaScript code. As part of the preflight request it indicates
+ whether credentials can be used on the cross origin request.
+ """
+ return "Access-Control-Allow-Credentials" in self.headers
+
+ @access_control_allow_credentials.setter
+ def access_control_allow_credentials(self, value):
+ if value is True:
+ self.headers["Access-Control-Allow-Credentials"] = "true"
+ else:
+ self.headers.pop("Access-Control-Allow-Credentials", None)
+
+ access_control_allow_headers = header_property(
+ "Access-Control-Allow-Headers",
+ load_func=parse_set_header,
+ dump_func=dump_header,
+ doc="Which headers can be sent with the cross origin request.",
+ )
+
+ access_control_allow_methods = header_property(
+ "Access-Control-Allow-Methods",
+ load_func=parse_set_header,
+ dump_func=dump_header,
+ doc="Which methods can be used for the cross origin request.",
+ )
+
+ access_control_allow_origin = header_property(
+ "Access-Control-Allow-Origin",
+ load_func=parse_set_header,
+ dump_func=dump_header,
+ doc="The origins that may make cross origin requests.",
+ )
+
+ access_control_expose_headers = header_property(
+ "Access-Control-Expose-Headers",
+ load_func=parse_set_header,
+ dump_func=dump_header,
+ doc="Which headers can be shared by the browser to JavaScript code.",
+ )
+
+ access_control_max_age = header_property(
+ "Access-Control-Max-Age",
+ load_func=int,
+ dump_func=str,
+ doc="The maximum age in seconds the access control settings can be cached for.",
+ )
diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py
index 4aafa6fd8..3b469a1fa 100644
--- a/src/werkzeug/wrappers/request.py
+++ b/src/werkzeug/wrappers/request.py
@@ -2,6 +2,7 @@
from .auth import AuthorizationMixin
from .base_request import BaseRequest
from .common_descriptors import CommonRequestDescriptorsMixin
+from .cors import CORSRequestMixin
from .etag import ETagRequestMixin
from .user_agent import UserAgentMixin
@@ -12,6 +13,7 @@ class Request(
ETagRequestMixin,
UserAgentMixin,
AuthorizationMixin,
+ CORSRequestMixin,
CommonRequestDescriptorsMixin,
):
"""Full featured request object implementing the following mixins:
@@ -20,7 +22,10 @@ class Request(
- :class:`ETagRequestMixin` for etag and cache control handling
- :class:`UserAgentMixin` for user agent introspection
- :class:`AuthorizationMixin` for http auth handling
+ - :class:`RequestCORSMixin` for Cross Origin Resource Sharing
+ headers
- :class:`CommonRequestDescriptorsMixin` for common headers
+
"""
diff --git a/src/werkzeug/wrappers/response.py b/src/werkzeug/wrappers/response.py
index 58707a6ae..fad9f82f2 100644
--- a/src/werkzeug/wrappers/response.py
+++ b/src/werkzeug/wrappers/response.py
@@ -2,6 +2,7 @@
from .auth import WWWAuthenticateMixin
from .base_response import BaseResponse
from .common_descriptors import CommonResponseDescriptorsMixin
+from .cors import CORSResponseMixin
from .etag import ETagResponseMixin
@@ -65,14 +66,17 @@ def stream(self):
class Response(
BaseResponse,
ETagResponseMixin,
+ WWWAuthenticateMixin,
+ CORSResponseMixin,
ResponseStreamMixin,
CommonResponseDescriptorsMixin,
- WWWAuthenticateMixin,
):
"""Full featured response object implementing the following mixins:
- :class:`ETagResponseMixin` for etag and cache control handling
+ - :class:`WWWAuthenticateMixin` for HTTP authentication support
+ - :class:`ResponseCORSMixin` for Cross Origin Resource Sharing
+ headers
- :class:`ResponseStreamMixin` to add support for the `stream` property
- :class:`CommonResponseDescriptorsMixin` for various HTTP descriptors
- - :class:`WWWAuthenticateMixin` for HTTP authentication support
"""
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index b0ade1032..25a41c86a 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -270,6 +270,31 @@ def failing_application(request):
assert resp.status_code == 400
+def test_request_access_control():
+ request = wrappers.Request.from_values(
+ headers={
+ "Origin": "https://palletsprojects.com",
+ "Access-Control-Request-Headers": "X-A, X-B",
+ "Access-Control-Request-Method": "PUT",
+ },
+ )
+ assert request.origin == "https://palletsprojects.com"
+ assert request.access_control_request_headers == {"X-A", "X-B"}
+ assert request.access_control_request_method == "PUT"
+
+
+def test_response_access_control():
+ response = wrappers.Response("Hello World")
+ assert response.access_control_allow_credentials is False
+ response.access_control_allow_credentials = True
+ response.access_control_allow_headers = ["X-A", "X-B"]
+ assert response.headers["Access-Control-Allow-Credentials"] == "true"
+ assert set(response.headers["Access-Control-Allow-Headers"].split(", ")) == {
+ "X-A",
+ "X-B",
+ }
+
+
def test_base_response():
# unicode
response = wrappers.BaseResponse(u"öäü")
From 0a5f8929a31c1732093563ff5f33fdf02f4eac3d Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 14 Jan 2020 15:04:48 -0800
Subject: [PATCH 110/733] use parse_list_header in ProxyFix
---
src/werkzeug/middleware/proxy_fix.py | 28 ++++++++++++++--------------
1 file changed, 14 insertions(+), 14 deletions(-)
diff --git a/src/werkzeug/middleware/proxy_fix.py b/src/werkzeug/middleware/proxy_fix.py
index a562cfa2e..f393f61d3 100644
--- a/src/werkzeug/middleware/proxy_fix.py
+++ b/src/werkzeug/middleware/proxy_fix.py
@@ -21,6 +21,7 @@
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
+from werkzeug.http import parse_list_header
class ProxyFix(object):
@@ -90,20 +91,23 @@ def __init__(self, app, x_for=1, x_proto=1, x_host=0, x_port=0, x_prefix=0):
self.x_port = x_port
self.x_prefix = x_prefix
- def _get_trusted_comma(self, trusted, value):
- """Get the real value from a comma-separated header based on the
- configured number of trusted proxies.
+ def _get_real_value(self, trusted, value):
+ """Get the real value from a list header based on the configured
+ number of trusted proxies.
:param trusted: Number of values to trust in the header.
- :param value: Header value to parse.
+ :param value: Comma separated list header value to parse.
:return: The real value, or ``None`` if there are fewer values
than the number of trusted proxies.
+ .. versionchanged:: 1.0
+ Renamed from ``_get_trusted_comma``.
+
.. versionadded:: 0.15
"""
if not (trusted and value):
return
- values = [x.strip() for x in value.split(",")]
+ values = parse_list_header(value)
if len(values) >= trusted:
return values[-trusted]
@@ -129,19 +133,17 @@ def __call__(self, environ, start_response):
}
)
- x_for = self._get_trusted_comma(self.x_for, environ_get("HTTP_X_FORWARDED_FOR"))
+ x_for = self._get_real_value(self.x_for, environ_get("HTTP_X_FORWARDED_FOR"))
if x_for:
environ["REMOTE_ADDR"] = x_for
- x_proto = self._get_trusted_comma(
+ x_proto = self._get_real_value(
self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO")
)
if x_proto:
environ["wsgi.url_scheme"] = x_proto
- x_host = self._get_trusted_comma(
- self.x_host, environ_get("HTTP_X_FORWARDED_HOST")
- )
+ x_host = self._get_real_value(self.x_host, environ_get("HTTP_X_FORWARDED_HOST"))
if x_host:
environ["HTTP_HOST"] = x_host
parts = x_host.split(":", 1)
@@ -149,9 +151,7 @@ def __call__(self, environ, start_response):
if len(parts) == 2:
environ["SERVER_PORT"] = parts[1]
- x_port = self._get_trusted_comma(
- self.x_port, environ_get("HTTP_X_FORWARDED_PORT")
- )
+ x_port = self._get_real_value(self.x_port, environ_get("HTTP_X_FORWARDED_PORT"))
if x_port:
host = environ.get("HTTP_HOST")
if host:
@@ -160,7 +160,7 @@ def __call__(self, environ, start_response):
environ["HTTP_HOST"] = "%s:%s" % (host, x_port)
environ["SERVER_PORT"] = x_port
- x_prefix = self._get_trusted_comma(
+ x_prefix = self._get_real_value(
self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX")
)
if x_prefix:
From a4836ed10b9331bdf1a8af76f8468af281c7db97 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 14 Jan 2020 15:30:40 -0800
Subject: [PATCH 111/733] update wrapper mixin docs
---
CHANGES.rst | 2 +-
docs/wrappers.rst | 54 ++++++++++++++++++++++++-------
src/werkzeug/wrappers/cors.py | 12 +++----
src/werkzeug/wrappers/request.py | 14 ++++----
src/werkzeug/wrappers/response.py | 14 ++++----
5 files changed, 65 insertions(+), 31 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 6c1ec0975..951fc1117 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -89,7 +89,7 @@ Unreleased
- The development server accepts paths that start with two slashes,
rather than stripping off the first path segment. :issue:`491`
- Add access control (Cross Origin Request Sharing, CORS) header
- methods to the Request and Response wrappers. :pr:`1699`
+ properties to the ``Request`` and ``Response`` wrappers. :pr:`1699`
Version 0.16.1
diff --git a/docs/wrappers.rst b/docs/wrappers.rst
index 2a8c406f3..9d3de82a9 100644
--- a/docs/wrappers.rst
+++ b/docs/wrappers.rst
@@ -150,30 +150,64 @@ and :class:`BaseResponse` classes and implement all the mixins Werkzeug provides
.. autoclass:: Response
-.. autoclass:: AcceptMixin
- :members:
-.. autoclass:: AuthorizationMixin
- :members:
+Common Descriptors
+------------------
-.. autoclass:: ETagRequestMixin
+.. autoclass:: CommonRequestDescriptorsMixin
:members:
-.. autoclass:: ETagResponseMixin
+.. autoclass:: CommonResponseDescriptorsMixin
:members:
+
+Response Stream
+---------------
+
.. autoclass:: ResponseStreamMixin
:members:
-.. autoclass:: CommonRequestDescriptorsMixin
+
+Accept
+------
+
+.. autoclass:: AcceptMixin
:members:
-.. autoclass:: CommonResponseDescriptorsMixin
+
+Authentication
+--------------
+
+.. autoclass:: AuthorizationMixin
:members:
.. autoclass:: WWWAuthenticateMixin
:members:
+
+CORS
+----
+
+.. autoclass:: werkzeug.wrappers.cors.CORSRequestMixin
+ :members:
+
+.. autoclass:: werkzeug.wrappers.cors.CORSResponseMixin
+ :members:
+
+
+ETag
+----
+
+.. autoclass:: ETagRequestMixin
+ :members:
+
+.. autoclass:: ETagResponseMixin
+ :members:
+
+
+User Agent
+----------
+
.. autoclass:: UserAgentMixin
:members:
@@ -189,10 +223,8 @@ opted into by creating your own subclasses::
pass
-.. module:: werkzeug.wrappers.json
-
JSON
----
-.. autoclass:: JSONMixin
+.. autoclass:: werkzeug.wrappers.json.JSONMixin
:members:
diff --git a/src/werkzeug/wrappers/cors.py b/src/werkzeug/wrappers/cors.py
index 0ea442e26..790e50e9f 100644
--- a/src/werkzeug/wrappers/cors.py
+++ b/src/werkzeug/wrappers/cors.py
@@ -5,9 +5,9 @@
class CORSRequestMixin(object):
- """A mixin for :class:`BaseRequest` subclasses. ``Request`` classes
- that subclass this will get descriptors for Cross Origin Resource
- Sharing (CORS) headers.
+ """A mixin for :class:`~werkzeug.wrappers.BaseRequest` subclasses
+ that adds descriptors for Cross Origin Resource Sharing (CORS)
+ headers.
.. versionadded:: 1.0
"""
@@ -44,9 +44,9 @@ class CORSRequestMixin(object):
class CORSResponseMixin(object):
- """A mixin for :class:`BaseResponse` subclasses. ``Response``
- classes that subclass this will get descriptors for Cross Origin
- Resource Sharing (CORS) headers.
+ """A mixin for :class:`~werkzeug.wrappers.BaseResponse` subclasses
+ that adds descriptors for Cross Origin Resource Sharing (CORS)
+ headers.
.. versionadded:: 1.0
"""
diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py
index 3b469a1fa..5c2fe1029 100644
--- a/src/werkzeug/wrappers/request.py
+++ b/src/werkzeug/wrappers/request.py
@@ -18,13 +18,13 @@ class Request(
):
"""Full featured request object implementing the following mixins:
- - :class:`AcceptMixin` for accept header parsing
- - :class:`ETagRequestMixin` for etag and cache control handling
- - :class:`UserAgentMixin` for user agent introspection
- - :class:`AuthorizationMixin` for http auth handling
- - :class:`RequestCORSMixin` for Cross Origin Resource Sharing
- headers
- - :class:`CommonRequestDescriptorsMixin` for common headers
+ - :class:`AcceptMixin` for accept header parsing
+ - :class:`ETagRequestMixin` for etag and cache control handling
+ - :class:`UserAgentMixin` for user agent introspection
+ - :class:`AuthorizationMixin` for http auth handling
+ - :class:`~werkzeug.wrappers.cors.CORSRequestMixin` for Cross
+ Origin Resource Sharing headers
+ - :class:`CommonRequestDescriptorsMixin` for common headers
"""
diff --git a/src/werkzeug/wrappers/response.py b/src/werkzeug/wrappers/response.py
index fad9f82f2..8f190f75b 100644
--- a/src/werkzeug/wrappers/response.py
+++ b/src/werkzeug/wrappers/response.py
@@ -73,10 +73,12 @@ class Response(
):
"""Full featured response object implementing the following mixins:
- - :class:`ETagResponseMixin` for etag and cache control handling
- - :class:`WWWAuthenticateMixin` for HTTP authentication support
- - :class:`ResponseCORSMixin` for Cross Origin Resource Sharing
- headers
- - :class:`ResponseStreamMixin` to add support for the `stream` property
- - :class:`CommonResponseDescriptorsMixin` for various HTTP descriptors
+ - :class:`ETagResponseMixin` for etag and cache control handling
+ - :class:`WWWAuthenticateMixin` for HTTP authentication support
+ - :class:`~werkzeug.wrappers.cors.CORSResponseMixin` for Cross
+ Origin Resource Sharing headers
+ - :class:`ResponseStreamMixin` to add support for the ``stream``
+ property
+ - :class:`CommonResponseDescriptorsMixin` for various HTTP
+ descriptors
"""
From 89995fb1148a19714573db9173606a65f88f8aca Mon Sep 17 00:00:00 2001
From: pgjones
Date: Thu, 16 Jan 2020 13:44:48 +0000
Subject: [PATCH 112/733] Conserve order for equal quality in accept
datastructures
Both RFC4647-2.3 and RFC7231 imply that user agents may put equal
quality tags (especially language tags) in order of their prefered
priority. This means that this would ideally be the case,
a = parse_accept_header("en-US,fr-FR", LanguageAccept)
assert a.best == "en-US"
which it previously wasn't as `f` is after `e`.
I've looked back through the commits and found the previous behaviour
(sorting by tag name) was added with Accept in
802a12c0850439088de46ae188c64ab259b16695. I can find no reason for the
previous behaviour to be requirement.
---
CHANGES.rst | 2 ++
src/werkzeug/datastructures.py | 10 +++++++---
tests/test_datastructures.py | 4 ++++
tests/test_wrappers.py | 2 +-
4 files changed, 14 insertions(+), 4 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 951fc1117..22baeae09 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -90,6 +90,8 @@ Unreleased
rather than stripping off the first path segment. :issue:`491`
- Add access control (Cross Origin Request Sharing, CORS) header
properties to the ``Request`` and ``Response`` wrappers. :pr:`1699`
+- ``Accept`` values are no longer ordered alphabetically for equal
+ quality tags. Instead the initial order is preserved. :issue:`1686`
Version 0.16.1
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index fdff4c2d4..1cda034fc 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1743,6 +1743,12 @@ class Accept(ImmutableList):
.. versionchanged:: 0.5
:class:`Accept` objects are forced immutable now.
+
+ .. versionchanged:: 1.0.0
+ :class:`Accept` internal values are no longer ordered
+ alphabetically for equal quality tags. Instead the initial
+ order is preserved.
+
"""
def __init__(self, values=()):
@@ -1755,9 +1761,7 @@ def __init__(self, values=()):
else:
self.provided = True
values = sorted(
- values,
- key=lambda x: (self._specificity(x[0]), x[1], x[0]),
- reverse=True,
+ values, key=lambda x: (self._specificity(x[0]), x[1]), reverse=True,
)
list.__init__(self, values)
diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py
index 97ac1e287..a64fa9e1b 100644
--- a/tests/test_datastructures.py
+++ b/tests/test_datastructures.py
@@ -1126,6 +1126,10 @@ def test_accept_wildcard_specificity(self):
assert accept.best_match(["asterisk", "times"], default=None) == "times"
assert accept.best_match(["asterisk"], default=None) is None
+ def test_accept_equal_quality(self):
+ accept = self.storage_class([("a", 1), ("b", 1)])
+ assert accept.best == "a"
+
class TestMIMEAccept(object):
@pytest.mark.parametrize(
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index 25a41c86a..6973a1c57 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -438,9 +438,9 @@ def test_accept_mixin():
assert request.accept_mimetypes == MIMEAccept(
[
("text/xml", 1),
- ("image/png", 1),
("application/xml", 1),
("application/xhtml+xml", 1),
+ ("image/png", 1),
("text/html", 0.9),
("text/plain", 0.8),
("*/*", 0.5),
From a578c8f7842df3b2c2fc0ccfe6ac16aa0bc7b0de Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sun, 26 Jan 2020 15:28:19 -0800
Subject: [PATCH 113/733] remove old docs about decoding errors
---
docs/unicode.rst | 47 +++++++----------------------------------------
1 file changed, 7 insertions(+), 40 deletions(-)
diff --git a/docs/unicode.rst b/docs/unicode.rst
index 446febf9d..a993ca5eb 100644
--- a/docs/unicode.rst
+++ b/docs/unicode.rst
@@ -68,49 +68,16 @@ a text file from the file system looks like this::
There is also the codecs module which provides an open function that decodes
automatically from the given encoding.
+
Error Handling
==============
-With Werkzeug 0.3 onwards you can further control the way Werkzeug works with
-unicode. In the past Werkzeug ignored encoding errors silently on incoming
-data. This decision was made to avoid internal server errors if the user
-tampered with the submitted data. However there are situations where you
-want to abort with a `400 BAD REQUEST` instead of silently ignoring the error.
-
-All the functions that do internal decoding now accept an `errors` keyword
-argument that behaves like the `errors` parameter of the builtin string method
-`decode`. The following values are possible:
-
-`ignore`
- This is the default behavior and tells the codec to ignore characters that
- it doesn't understand silently.
-
-`replace`
- The codec will replace unknown characters with a replacement character
- (`U+FFFD` ``REPLACEMENT CHARACTER``)
-
-`strict`
- Raise an exception if decoding fails.
-
-Unlike the regular python decoding Werkzeug does not raise an
-:exc:`UnicodeDecodeError` if the decoding failed but an
-:exc:`~exceptions.HTTPUnicodeError` which
-is a direct subclass of `UnicodeError` and the `BadRequest` HTTP exception.
-The reason is that if this exception is not caught by the application but
-a catch-all for HTTP exceptions exists a default `400 BAD REQUEST` error
-page is displayed.
-
-There is additional error handling available which is a Werkzeug extension
-to the regular codec error handling which is called `fallback`. Often you
-want to use utf-8 but support latin1 as legacy encoding too if decoding
-failed. For this case you can use the `fallback` error handling. For
-example you can specify ``'fallback:iso-8859-15'`` to tell Werkzeug it should
-try with `iso-8859-15` if `utf-8` failed. If this decoding fails too (which
-should not happen for most legacy charsets such as `iso-8859-15`) the error
-is silently ignored as if the error handling was `ignore`.
-
-Further details are available as part of the API documentation of the concrete
-implementations of the functions or classes working with unicode.
+Functions that do internal encoding or decoding accept an ``errors``
+keyword argument that is passed to :meth:`str.decode` and
+:meth:`str.encode`. The default is ``'replace'`` so that errors are easy
+to spot. It might be useful to set it to ``'strict'`` in order to catch
+the error and report the bad data to the client.
+
Request and Response Objects
============================
From 2b2c4c3dd3cf7389e9f4aa06371b7332257c6289 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Sun, 26 Jan 2020 19:51:37 -0800
Subject: [PATCH 114/733] release version 0.16.1
---
CHANGES.rst | 2 +-
setup.cfg | 1 +
src/werkzeug/__init__.py | 2 +-
3 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 580146876..69f54795a 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -3,7 +3,7 @@
Version 0.16.1
--------------
-Unreleased
+Released 2020-01-27
- Fix import location in deprecation messages for subpackages.
:issue:`1663`
diff --git a/setup.cfg b/setup.cfg
index 9381383dc..260a2f926 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,5 +1,6 @@
[metadata]
license_file = LICENSE.rst
+long_description_content_type = text/x-rst
[bdist_wheel]
universal = true
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index 4be464264..c3395e0bb 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -14,7 +14,7 @@
"""
from types import ModuleType
-__version__ = "0.16.0"
+__version__ = "0.16.1"
__all__ = ["run_simple", "Client", "Request", "Response", "__version__"]
From 5ee2ffeca0447b8fe78dad6a168c5252a115a767 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sun, 19 Jan 2020 17:34:12 +0000
Subject: [PATCH 115/733] Allow routing Map lock type to be customized
This will allow a subclass to specify a different type of lock,
specifically an asyncio or trio lock in order for the routing to be
used with these event loops. Note that gevent/eventlet monkey patch
Lock so nothing has been or is required for their usage.
---
CHANGES.rst | 2 ++
src/werkzeug/routing.py | 7 ++++++-
2 files changed, 8 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 004ea10d7..f931152f1 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -92,6 +92,8 @@ Unreleased
properties to the ``Request`` and ``Response`` wrappers. :pr:`1699`
- ``Accept`` values are no longer ordered alphabetically for equal
quality tags. Instead the initial order is preserved. :issue:`1686`
+- Added ``Map.lock_class`` attribute for alternative
+ implementations. :pr:`1702`
Version 0.16.1
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index aa07892cb..6ca232136 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -1372,6 +1372,11 @@ class Map(object):
#: A dict of default converters to be used.
default_converters = ImmutableDict(DEFAULT_CONVERTERS)
+ #: The type of lock to use when updating.
+ #:
+ #: .. versionadded:: 1.0
+ lock_class = Lock
+
def __init__(
self,
rules=None,
@@ -1389,7 +1394,7 @@ def __init__(
self._rules = []
self._rules_by_endpoint = {}
self._remap = True
- self._remap_lock = Lock()
+ self._remap_lock = self.lock_class()
self.default_subdomain = default_subdomain
self.charset = charset
From 852c4e66aa59b66b82cfd13e6c2845db1060902a Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sat, 25 Jan 2020 14:06:57 +0000
Subject: [PATCH 116/733] add samesite cookie support to test client
---
CHANGES.rst | 2 ++
src/werkzeug/test.py | 12 +++++++++++-
2 files changed, 13 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index f931152f1..af67d1e43 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -44,6 +44,8 @@ Unreleased
such that ``X-Foo`` is the same as ``x-foo``. :pr:`1605`
- :meth:`http.dump_cookie` accepts ``'None'`` as a value for
``samesite``. :issue:`1549`
+- :meth:`~test.Client.set_cookie` accepts a ``samesite`` argument.
+ :pr:`1705`
- Support the Content Security Policy header through the
`Response.content_security_policy` data structure. :pr:`1617`
- ``LanguageAccept`` will fall back to matching "en" for "en-US" or
diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py
index 6746c500f..c5ce50a06 100644
--- a/src/werkzeug/test.py
+++ b/src/werkzeug/test.py
@@ -824,6 +824,7 @@ def set_cookie(
domain=None,
secure=None,
httponly=False,
+ samesite=None,
charset="utf-8",
):
"""Sets a cookie in the client's cookie jar. The server name
@@ -832,7 +833,16 @@ def set_cookie(
"""
assert self.cookie_jar is not None, "cookies disabled"
header = dump_cookie(
- key, value, max_age, expires, path, domain, secure, httponly, charset
+ key,
+ value,
+ max_age,
+ expires,
+ path,
+ domain,
+ secure,
+ httponly,
+ charset,
+ samesite=samesite,
)
environ = create_environ(path, base_url="http://" + server_name)
headers = [("Set-Cookie", header)]
From e751e31bab28a80e83c5b4b6520979e553a61a10 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Fri, 31 Jan 2020 16:37:18 +0000
Subject: [PATCH 117/733] Support for Python 3.4 has been dropped
(This was missed previously)
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 32eb48698..157d884ab 100644
--- a/setup.py
+++ b/setup.py
@@ -52,7 +52,7 @@
packages=find_packages("src"),
package_dir={"": "src"},
include_package_data=True,
- python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
extras_require={
"watchdog": ["watchdog"],
"dev": [
From 49cf35b05142d05f9a3c4db80606a7d4722c9db9 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Fri, 31 Jan 2020 18:48:33 +0000
Subject: [PATCH 118/733] Upgrade version to 1.0.0rc1
This follows the release of this release candidate version. Note
e751e31bab28a80e83c5b4b6520979e553a61a10 is the commit that
corresponds to 1.0.0rc1
---
src/werkzeug/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index 6a7f50fbc..ae4efeb75 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -17,4 +17,4 @@
from .wrappers import Request
from .wrappers import Response
-__version__ = "1.0.0.dev0"
+__version__ = "1.0.0rc1"
From e932a1f18f5d79c535aea1588ea5545c1870fde6 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Wed, 29 Jan 2020 21:02:13 +0000
Subject: [PATCH 119/733] Add support for WebSocket rules in the routing
This allows for Rules to be marked as a WebSocket route and only
matched if the binding is websocket. It also ensures that when a
websocket rule is built with a scheme it defaults to the `ws` or `wss`
scheme.
---
CHANGES.rst | 2 +
docs/routing.rst | 29 ++++++++++++++
src/werkzeug/routing.py | 83 +++++++++++++++++++++++++++++++++++++----
tests/test_routing.py | 31 +++++++++++++++
4 files changed, 137 insertions(+), 8 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index af67d1e43..276e866ab 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -96,6 +96,8 @@ Unreleased
quality tags. Instead the initial order is preserved. :issue:`1686`
- Added ``Map.lock_class`` attribute for alternative
implementations. :pr:`1702`
+- Support WebSocket rules (binding to WebSocket requests) in the
+ routing systems. :pr:`1709`
Version 0.16.1
diff --git a/docs/routing.rst b/docs/routing.rst
index 68f74cc1c..d3accb114 100644
--- a/docs/routing.rst
+++ b/docs/routing.rst
@@ -227,3 +227,32 @@ Variable parts are of course also possible in the host section::
Rule('/', endpoint='www_index', host='www.example.com'),
Rule('/', endpoint='user_index', host='.example.com')
], host_matching=True)
+
+
+WebSockets
+==========
+
+.. versionadded:: 1.0
+
+With Werkzeug 1.0 onwards it is possible to mark a Rule as a websocket
+and only match it if the MapAdapter is created with a websocket
+bind. This functionality can be used as so::
+
+ url_map = Map([
+ Rule("/", endpoint="index", websocket=True),
+ ])
+ adapter = map.bind("example.org", "/", url_scheme="ws")
+ assert adapter.match("/") == ("index", {})
+
+If the only match is a WebSocket rule and the bind is http (or the
+only match is http and the bind is websocket) a
+:class:`WebsocketMismatch` (derives from :class:`BadRequest`)
+exception is raised.
+
+As WebSocket urls have a different scheme, WebSocket Rules are always
+built with a scheme and host i.e. as if ``force_external = True``.
+
+.. note::
+
+ Werkzeug has no further WebSocket support (beyond routing). This
+ functionality is mostly of use to ASGI projects.
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 6ca232136..8422b0c2f 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -118,6 +118,7 @@
from .datastructures import ImmutableDict
from .datastructures import MultiDict
from .exceptions import BadHost
+from .exceptions import BadRequest
from .exceptions import HTTPException
from .exceptions import MethodNotAllowed
from .exceptions import NotFound
@@ -329,7 +330,15 @@ def __str__(self):
return u"".join(message)
+class WebsocketMismatch(BadRequest):
+ """The only matched rule is either a websocket and the request is http
+ or the rule is http and the request is a websocket."""
+
+ pass
+
+
class ValidationError(ValueError):
+
"""Validation error. If a rule converter raises this exception the rule
does not match the current URL and the next URL is tried.
"""
@@ -631,8 +640,15 @@ def foo_with_slug(adapter, id):
used to provide a match rule for the whole host. This also means
that the subdomain feature is disabled.
+ `websocket`
+ If True (defaults to False) this represents a WebSocket, rather than
+ a http route.
+
.. versionadded:: 0.7
The `alias` and `host` parameters were added.
+
+ .. versionadded:: 1.0
+ The `websocket` parameter was added.
"""
def __init__(
@@ -648,6 +664,7 @@ def __init__(
redirect_to=None,
alias=False,
host=None,
+ websocket=False,
):
if not string.startswith("/"):
raise ValueError("urls must start with a leading slash")
@@ -662,14 +679,27 @@ def __init__(
self.defaults = defaults
self.build_only = build_only
self.alias = alias
+ self.websocket = websocket
+ if methods is not None:
+ if isinstance(methods, str):
+ raise TypeError("param `methods` should be `Iterable[str]`, not `str`")
+ methods = set([x.upper() for x in methods])
+ if "HEAD" not in methods and "GET" in methods:
+ methods.add("HEAD")
+
+ if (
+ websocket
+ and methods is not None
+ and len(methods - {"GET", "HEAD", "OPTIONS"}) > 0
+ ):
+ raise ValueError(
+ "WebSocket Rules can only use 'GET', 'HEAD', or 'OPTIONS' methods"
+ )
+
if methods is None:
self.methods = None
else:
- if isinstance(methods, str):
- raise TypeError("param `methods` should be `Iterable[str]`, not `str`")
- self.methods = set([x.upper() for x in methods])
- if "HEAD" not in self.methods and "GET" in self.methods:
- self.methods.add("HEAD")
+ self.methods = methods
self.endpoint = endpoint
self.redirect_to = redirect_to
@@ -1490,8 +1520,12 @@ def bind(
.. versionadded:: 0.8
`query_args` can now also be a string.
+ .. versionadded:: 1.0
+ `websocket` added
+
.. versionchanged:: 0.15
``path_info`` defaults to ``'/'`` if ``None``.
+
"""
server_name = server_name.lower()
if self.host_matching:
@@ -1663,6 +1697,7 @@ def __init__(
self.path_info = to_unicode(path_info)
self.default_method = to_unicode(default_method)
self.query_args = query_args
+ self.websocket = self.url_scheme in {"ws", "wss"}
def dispatch(
self, view_func, path_info=None, method=None, catch_http_exceptions=False
@@ -1720,7 +1755,14 @@ def application(environ, start_response):
return e
raise
- def match(self, path_info=None, method=None, return_rule=False, query_args=None):
+ def match(
+ self,
+ path_info=None,
+ method=None,
+ return_rule=False,
+ query_args=None,
+ websocket=None,
+ ):
"""The usage is simple: you just pass the match method the current
path info as well as the method (which defaults to `GET`). The
following things can then happen:
@@ -1741,6 +1783,10 @@ def match(self, path_info=None, method=None, return_rule=False, query_args=None)
You can use the `RequestRedirect` instance as response-like object
similar to all other subclasses of `HTTPException`.
+ - you receive a ``WebsocketMismatch`` exception if the only match is
+ a websocket rule and the bind is to a http request, or if the match
+ is a http rule and the bind is to a websocket request.
+
- you get a tuple in the form ``(endpoint, arguments)`` if there is
a match (unless `return_rule` is True, in which case you get a tuple
in the form ``(rule, arguments)``)
@@ -1805,6 +1851,8 @@ def match(self, path_info=None, method=None, return_rule=False, query_args=None)
if query_args is None:
query_args = self.query_args
method = (method or self.default_method).upper()
+ if websocket is None:
+ websocket = self.websocket
require_redirect = False
@@ -1814,6 +1862,7 @@ def match(self, path_info=None, method=None, return_rule=False, query_args=None)
)
have_match_for = set()
+ websocket_mismatch = False
for rule in self.map._rules:
try:
rv = rule.match(path, method)
@@ -1835,6 +1884,9 @@ def match(self, path_info=None, method=None, return_rule=False, query_args=None)
if rule.methods is not None and method not in rule.methods:
have_match_for.update(rule.methods)
continue
+ if rule.websocket != websocket:
+ websocket_mismatch = True
+ continue
if self.map.redirect_defaults:
redirect_url = self.get_default_redirect(rule, method, rv, query_args)
@@ -1880,6 +1932,8 @@ def _handle_match(match):
if have_match_for:
raise MethodNotAllowed(valid_methods=list(have_match_for))
+ if websocket_mismatch:
+ raise WebsocketMismatch()
raise NotFound()
def test(self, path_info=None, method=None):
@@ -2005,6 +2059,7 @@ def _partial_build(self, endpoint, values, method, append_unknown):
rv = rule.build(values, append_unknown)
if rv is not None:
+ rv = (rv[0], rv[1], rule.websocket)
if self.map.host_matching:
if rv[0] == self.server_name:
return rv
@@ -2114,10 +2169,22 @@ def build(
rv = self._partial_build(endpoint, values, method, append_unknown)
if rv is None:
raise BuildError(endpoint, values, method, self)
- domain_part, path = rv
+ domain_part, path, websocket = rv
host = self.get_host(domain_part)
+ # Only build WebSocket routes with the scheme (as relative
+ # WebSocket paths aren't useful and are misleading). In
+ # addition if bound to a WebSocket ensure that http routes are
+ # built with a http scheme (if required).
+ url_scheme = self.url_scheme
+ secure = url_scheme in {"https", "wss"}
+ if websocket:
+ force_external = True
+ url_scheme = "wss" if secure else "ws"
+ elif url_scheme:
+ url_scheme = "https" if secure else "http"
+
# shortcut this.
if not force_external and (
(self.map.host_matching and host == self.server_name)
@@ -2127,7 +2194,7 @@ def build(
return str(
"%s//%s%s/%s"
% (
- self.url_scheme + ":" if self.url_scheme else "",
+ url_scheme + ":" if url_scheme else "",
host,
self.script_name[:-1],
path.lstrip("/"),
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 65d3ae8b0..0ba9bcf26 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -27,6 +27,8 @@ def test_basic_routing():
r.Rule("/", endpoint="index"),
r.Rule("/foo", endpoint="foo"),
r.Rule("/bar/", endpoint="bar"),
+ r.Rule("/ws", endpoint="ws", websocket=True),
+ r.Rule("/", endpoint="indexws", websocket=True),
]
)
adapter = map.bind("example.org", "/")
@@ -36,6 +38,9 @@ def test_basic_routing():
pytest.raises(r.RequestRedirect, lambda: adapter.match("/bar"))
pytest.raises(r.NotFound, lambda: adapter.match("/blub"))
+ adapter = map.bind("example.org", "/", url_scheme="ws")
+ assert adapter.match("/") == ("indexws", {})
+
adapter = map.bind("example.org", "/test")
with pytest.raises(r.RequestRedirect) as excinfo:
adapter.match("/bar")
@@ -61,6 +66,13 @@ def test_basic_routing():
adapter.match()
assert excinfo.value.new_url == "http://example.org/bar/?foo=bar"
+ adapter = map.bind("example.org", "/ws", url_scheme="wss")
+ assert adapter.match("/ws", websocket=True) == ("ws", {})
+ with pytest.raises(r.WebsocketMismatch):
+ adapter.match("/ws", websocket=False)
+ with pytest.raises(r.WebsocketMismatch):
+ adapter.match("/foo", websocket=True)
+
def test_merge_slashes_match():
url_map = r.Map(
@@ -192,6 +204,7 @@ def test_basic_building():
r.Rule("/bar/", endpoint="barf"),
r.Rule("/bar/", endpoint="barp"),
r.Rule("/hehe", endpoint="blah", subdomain="blah"),
+ r.Rule("/ws", endpoint="ws", websocket=True),
]
)
adapter = map.bind("example.org", "/", subdomain="blah")
@@ -223,6 +236,11 @@ def test_basic_building():
assert adapter.build("foo", {}) == "/foo"
assert adapter.build("foo", {}, force_external=True) == "//example.org/foo"
+ adapter = map.bind("example.org", url_scheme="ws")
+ assert adapter.build("ws", {}) == "ws://example.org/ws"
+ assert adapter.build("foo", {}, force_external=True) == "http://example.org/foo"
+ assert adapter.build("foo", {}) == "/foo"
+
def test_long_build():
long_args = dict(("v%d" % x, x) for x in range(10000))
@@ -1205,3 +1223,16 @@ def test_build_url_same_endpoint_multiple_hosts():
beta_case = m.bind("BeTa.ExAmPlE.CoM")
assert beta_case.build("index") == "/"
+
+
+def test_rule_websocket_methods():
+ with pytest.raises(ValueError):
+ r.Rule("/ws", endpoint="ws", websocket=True, methods=["post"])
+ with pytest.raises(ValueError):
+ r.Rule(
+ "/ws",
+ endpoint="ws",
+ websocket=True,
+ methods=["get", "head", "options", "post"],
+ )
+ r.Rule("/ws", endpoint="ws", websocket=True, methods=["get", "head", "options"])
From ecd0d755b67449d30b07a8f878adc9181d053101 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 4 Feb 2020 14:34:17 -0800
Subject: [PATCH 120/733] docs cleanup
---
CHANGES.rst | 4 +-
docs/routing.rst | 37 ++++++++-----
src/werkzeug/routing.py | 118 +++++++++++++++++++++-------------------
3 files changed, 87 insertions(+), 72 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 276e866ab..54ac12db3 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -96,8 +96,8 @@ Unreleased
quality tags. Instead the initial order is preserved. :issue:`1686`
- Added ``Map.lock_class`` attribute for alternative
implementations. :pr:`1702`
-- Support WebSocket rules (binding to WebSocket requests) in the
- routing systems. :pr:`1709`
+- Support matching and building WebSocket rules in the routing system,
+ for use by async frameworks. :pr:`1709`
Version 0.16.1
diff --git a/docs/routing.rst b/docs/routing.rst
index d3accb114..b2564e42a 100644
--- a/docs/routing.rst
+++ b/docs/routing.rst
@@ -234,25 +234,32 @@ WebSockets
.. versionadded:: 1.0
-With Werkzeug 1.0 onwards it is possible to mark a Rule as a websocket
-and only match it if the MapAdapter is created with a websocket
-bind. This functionality can be used as so::
+If a :class:`Rule` is created with ``websocket=True``, it will only
+match if the :class:`Map` is bound to a request with a ``url_scheme`` of
+``ws`` or ``wss``.
+
+.. note::
+
+ Werkzeug has no further WebSocket support beyond routing. This
+ functionality is mostly of use to ASGI projects.
+
+.. code-block:: python
url_map = Map([
- Rule("/", endpoint="index", websocket=True),
+ Rule("/ws", endpoint="comm", websocket=True),
])
- adapter = map.bind("example.org", "/", url_scheme="ws")
- assert adapter.match("/") == ("index", {})
+ adapter = map.bind("example.org", "/ws", url_scheme="ws")
+ assert adapter.match() == ("comm", {})
-If the only match is a WebSocket rule and the bind is http (or the
-only match is http and the bind is websocket) a
-:class:`WebsocketMismatch` (derives from :class:`BadRequest`)
-exception is raised.
+If the only match is a WebSocket rule and the bind is HTTP (or the
+only match is HTTP and the bind is WebSocket) a
+:exc:`WebsocketMismatch` (derives from
+:exc:`~werkzeug.exceptions.BadRequest`) exception is raised.
-As WebSocket urls have a different scheme, WebSocket Rules are always
-built with a scheme and host i.e. as if ``force_external = True``.
+As WebSocket URLs have a different scheme, rules are always built with a
+scheme and host, ``force_external=True`` is implied.
-.. note::
+.. code-block:: python
- Werkzeug has no further WebSocket support (beyond routing). This
- functionality is mostly of use to ASGI projects.
+ url = adapter.build("comm")
+ assert url == "ws://example.org/ws"
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 8422b0c2f..8fa3c60a3 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -331,14 +331,12 @@ def __str__(self):
class WebsocketMismatch(BadRequest):
- """The only matched rule is either a websocket and the request is http
- or the rule is http and the request is a websocket."""
-
- pass
+ """The only matched rule is either a WebSocket and the request is
+ HTTP, or the rule is HTTP and the request is a WebSocket.
+ """
class ValidationError(ValueError):
-
"""Validation error. If a rule converter raises this exception the rule
does not match the current URL and the next URL is tried.
"""
@@ -585,21 +583,12 @@ class Rule(RuleFactory):
`MethodNotAllowed` rather than `NotFound`. If `GET` is present in the
list of methods and `HEAD` is not, `HEAD` is added automatically.
- .. versionchanged:: 0.6.1
- `HEAD` is now automatically added to the methods if `GET` is
- present. The reason for this is that existing code often did not
- work properly in servers not rewriting `HEAD` to `GET`
- automatically and it was not documented how `HEAD` should be
- treated. This was considered a bug in Werkzeug because of that.
-
`strict_slashes`
Override the `Map` setting for `strict_slashes` only for this rule. If
not specified the `Map` setting is used.
`merge_slashes`
- Override the ``Map`` setting for ``merge_slashes`` for this rule.
-
- .. versionadded:: 1.0
+ Override :attr:`Map.merge_slashes` for this rule.
`build_only`
Set this to True and the rule will never match but will create a URL
@@ -641,14 +630,21 @@ def foo_with_slug(adapter, id):
that the subdomain feature is disabled.
`websocket`
- If True (defaults to False) this represents a WebSocket, rather than
- a http route.
+ If ``True``, this rule is only matches for WebSocket (``ws://``,
+ ``wss://``) requests. By default, rules will only match for HTTP
+ requests.
- .. versionadded:: 0.7
- The `alias` and `host` parameters were added.
+ .. versionadded:: 1.0
+ Added ``websocket``.
.. versionadded:: 1.0
- The `websocket` parameter was added.
+ Added ``merge_slashes``.
+
+ .. versionadded:: 0.7
+ Added ``alias`` and ``host``.
+
+ .. versionchanged:: 0.6.1
+ ``HEAD`` is added to ``methods`` if ``GET`` is present.
"""
def __init__(
@@ -680,26 +676,22 @@ def __init__(
self.build_only = build_only
self.alias = alias
self.websocket = websocket
+
if methods is not None:
if isinstance(methods, str):
- raise TypeError("param `methods` should be `Iterable[str]`, not `str`")
- methods = set([x.upper() for x in methods])
+ raise TypeError("'methods' should be a list of strings.")
+
+ methods = {x.upper() for x in methods}
+
if "HEAD" not in methods and "GET" in methods:
methods.add("HEAD")
- if (
- websocket
- and methods is not None
- and len(methods - {"GET", "HEAD", "OPTIONS"}) > 0
- ):
- raise ValueError(
- "WebSocket Rules can only use 'GET', 'HEAD', or 'OPTIONS' methods"
- )
+ if websocket and methods - {"GET", "HEAD", "OPTIONS"}:
+ raise ValueError(
+ "WebSocket rules can only use 'GET', 'HEAD', and 'OPTIONS' methods."
+ )
- if methods is None:
- self.methods = None
- else:
- self.methods = methods
+ self.methods = methods
self.endpoint = endpoint
self.redirect_to = redirect_to
@@ -1389,6 +1381,10 @@ class Map(object):
enabled the `host` parameter to rules is used
instead of the `subdomain` one.
+ .. versionchanged:: 1.0
+ If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules
+ will match.
+
.. versionchanged:: 1.0
Added ``merge_slashes``.
@@ -1514,18 +1510,18 @@ def bind(
no defined. If there is no `default_subdomain` you cannot use the
subdomain feature.
- .. versionadded:: 0.7
- `query_args` added
-
- .. versionadded:: 0.8
- `query_args` can now also be a string.
-
- .. versionadded:: 1.0
- `websocket` added
+ .. versionchanged:: 1.0
+ If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules
+ will match.
.. versionchanged:: 0.15
``path_info`` defaults to ``'/'`` if ``None``.
+ .. versionchanged:: 0.8
+ ``query_args`` can be a string.
+
+ .. versionchanged:: 0.7
+ Added ``query_args``.
"""
server_name = server_name.lower()
if self.host_matching:
@@ -1783,9 +1779,10 @@ def match(
You can use the `RequestRedirect` instance as response-like object
similar to all other subclasses of `HTTPException`.
- - you receive a ``WebsocketMismatch`` exception if the only match is
- a websocket rule and the bind is to a http request, or if the match
- is a http rule and the bind is to a websocket request.
+ - you receive a ``WebsocketMismatch`` exception if the only
+ match is a WebSocket rule but the bind is an HTTP request, or
+ if the match is an HTTP rule but the bind is a WebSocket
+ request.
- you get a tuple in the form ``(endpoint, arguments)`` if there is
a match (unless `return_rule` is True, in which case you get a tuple
@@ -1833,15 +1830,21 @@ def match(
automatic redirects as string or dictionary. It's
currently not possible to use the query arguments
for URL matching.
+ :param websocket: Match WebSocket instead of HTTP requests. A
+ websocket request has a ``ws`` or ``wss``
+ :attr:`url_scheme`. This overrides that detection.
- .. versionadded:: 0.6
- `return_rule` was added.
+ .. versionadded:: 1.0
+ Added ``websocket``.
+
+ .. versionchanged:: 0.8
+ ``query_args`` can be a string.
.. versionadded:: 0.7
- `query_args` was added.
+ Added ``query_args``.
- .. versionchanged:: 0.8
- `query_args` can now also be a string.
+ .. versionadded:: 0.6
+ Added ``return_rule``.
"""
self.map.update()
if path_info is None:
@@ -1851,6 +1854,7 @@ def match(
if query_args is None:
query_args = self.query_args
method = (method or self.default_method).upper()
+
if websocket is None:
websocket = self.websocket
@@ -1863,6 +1867,7 @@ def match(
have_match_for = set()
websocket_mismatch = False
+
for rule in self.map._rules:
try:
rv = rule.match(path, method)
@@ -1884,6 +1889,7 @@ def match(
if rule.methods is not None and method not in rule.methods:
have_match_for.update(rule.methods)
continue
+
if rule.websocket != websocket:
websocket_mismatch = True
continue
@@ -1932,8 +1938,10 @@ def _handle_match(match):
if have_match_for:
raise MethodNotAllowed(valid_methods=list(have_match_for))
+
if websocket_mismatch:
raise WebsocketMismatch()
+
raise NotFound()
def test(self, path_info=None, method=None):
@@ -2169,16 +2177,16 @@ def build(
rv = self._partial_build(endpoint, values, method, append_unknown)
if rv is None:
raise BuildError(endpoint, values, method, self)
- domain_part, path, websocket = rv
+ domain_part, path, websocket = rv
host = self.get_host(domain_part)
- # Only build WebSocket routes with the scheme (as relative
- # WebSocket paths aren't useful and are misleading). In
- # addition if bound to a WebSocket ensure that http routes are
- # built with a http scheme (if required).
+ # Always build WebSocket routes with the scheme (browsers
+ # require full URLs). If bound to a WebSocket, ensure that HTTP
+ # routes are built with an HTTP scheme.
url_scheme = self.url_scheme
secure = url_scheme in {"https", "wss"}
+
if websocket:
force_external = True
url_scheme = "wss" if secure else "ws"
From 4249e01aed532774d6160eb7efbc4a71d94804d5 Mon Sep 17 00:00:00 2001
From: Brad Solomon
Date: Mon, 27 Jan 2020 10:12:34 -0500
Subject: [PATCH 121/733] document werkzeug logger
---
docs/utils.rst | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/docs/utils.rst b/docs/utils.rst
index 65072fb49..bcc1af124 100644
--- a/docs/utils.rst
+++ b/docs/utils.rst
@@ -76,3 +76,19 @@ Security Helpers
.. autofunction:: pbkdf2_hex
.. autofunction:: pbkdf2_bin
+
+
+Logging
+=======
+
+Werkzeug uses standard Python :mod:`logging`. The logger is named
+``"werkzeug"``.
+
+.. code-block:: python
+
+ import logging
+ logger = logging.getLogger("werkzeug")
+
+If the logger level is not set, it will be set to :data:`~logging.INFO`
+on first use. If there is no handler for that level, a
+:class:`~logging.StreamHandler` is added.
From 85eaee9b41dc16283d3a2a619ac4cc7cbdb8e6e0 Mon Sep 17 00:00:00 2001
From: David Lord
Date: Wed, 5 Feb 2020 18:10:38 -0800
Subject: [PATCH 122/733] range request always returns 206 status
---
CHANGES.rst | 3 +++
src/werkzeug/wrappers/etag.py | 29 ++++++++++++++++-------------
tests/test_wrappers.py | 4 ++--
3 files changed, 21 insertions(+), 15 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 54ac12db3..ae5fba0aa 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -98,6 +98,9 @@ Unreleased
implementations. :pr:`1702`
- Support matching and building WebSocket rules in the routing system,
for use by async frameworks. :pr:`1709`
+- Range requests that span an entire file respond with 206 instead of
+ 200, to be more compliant with :rfc:`7233`. This may help serving
+ media to older browsers. :issue:`410, 1704`
Version 0.16.1
diff --git a/src/werkzeug/wrappers/etag.py b/src/werkzeug/wrappers/etag.py
index ac2860a05..460629bdb 100644
--- a/src/werkzeug/wrappers/etag.py
+++ b/src/werkzeug/wrappers/etag.py
@@ -142,28 +142,31 @@ def _process_range_request(self, environ, complete_length=None, accept_ranges=No
"""
from ..exceptions import RequestedRangeNotSatisfiable
- if accept_ranges is None:
- return False
- self.headers["Accept-Ranges"] = accept_ranges
- if not self._is_range_request_processable(environ) or complete_length is None:
+ if (
+ accept_ranges is None
+ or complete_length is None
+ or not self._is_range_request_processable(environ)
+ ):
return False
+
parsed_range = parse_range_header(environ.get("HTTP_RANGE"))
+
if parsed_range is None:
raise RequestedRangeNotSatisfiable(complete_length)
+
range_tuple = parsed_range.range_for_length(complete_length)
content_range_header = parsed_range.to_content_range_header(complete_length)
+
if range_tuple is None or content_range_header is None:
raise RequestedRangeNotSatisfiable(complete_length)
+
content_length = range_tuple[1] - range_tuple[0]
- # Be sure not to send 206 response
- # if requested range is the full content.
- if content_length != complete_length:
- self.headers["Content-Length"] = content_length
- self.content_range = content_range_header
- self.status_code = 206
- self._wrap_response(range_tuple[0], content_length)
- return True
- return False
+ self.headers["Content-Length"] = content_length
+ self.headers["Accept-Ranges"] = accept_ranges
+ self.content_range = content_range_header
+ self.status_code = 206
+ self._wrap_response(range_tuple[0], content_length)
+ return True
def make_conditional(
self, request_or_environ, accept_ranges=False, complete_length=None
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index 6973a1c57..7e21eda75 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -878,9 +878,9 @@ def test_range_request_with_complete_file():
response = wrappers.Response(wrap_file(env, f))
env["HTTP_RANGE"] = "bytes=0-%d" % (fsize - 1)
response.make_conditional(env, accept_ranges=True, complete_length=fsize)
- assert response.status_code == 200
+ assert response.status_code == 206
assert response.headers["Accept-Ranges"] == "bytes"
- assert "Content-Range" not in response.headers
+ assert response.headers["Content-Range"] == "bytes 0-%d/%d" % (fsize - 1, fsize)
assert response.headers["Content-Length"] == str(fsize)
assert response.data == fcontent
From a8b2df202d81b45f7aeef442a844603e50db9b0c Mon Sep 17 00:00:00 2001
From: David Lord
Date: Wed, 5 Feb 2020 21:24:01 -0800
Subject: [PATCH 123/733] SharedDataMiddleware adds utf-8 charset
---
CHANGES.rst | 5 +++++
src/werkzeug/middleware/shared_data.py | 24 +++++++++++++++---------
tests/middleware/test_shared_data.py | 4 ++++
3 files changed, 24 insertions(+), 9 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index ae5fba0aa..90d07e8f1 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -101,6 +101,11 @@ Unreleased
- Range requests that span an entire file respond with 206 instead of
200, to be more compliant with :rfc:`7233`. This may help serving
media to older browsers. :issue:`410, 1704`
+- The :class:`~middleware.shared_data.SharedDataMiddleware` default
+ ``fallback_mimetype`` is ``application/octet-stream``. If a filename
+ looks like a text mimetype, the ``utf-8`` charset is added to it.
+ This matches the behavior of :class:`~wrappers.BaseResponse` and
+ Flask's ``send_file()``. :issue:`1689`
Version 0.16.1
diff --git a/src/werkzeug/middleware/shared_data.py b/src/werkzeug/middleware/shared_data.py
index 5c000c978..ab4ff0ffc 100644
--- a/src/werkzeug/middleware/shared_data.py
+++ b/src/werkzeug/middleware/shared_data.py
@@ -24,6 +24,7 @@
from ..http import http_date
from ..http import is_resource_modified
from ..security import safe_join
+from ..utils import get_content_type
from ..wsgi import get_path_info
from ..wsgi import wrap_file
@@ -70,19 +71,24 @@ class SharedDataMiddleware(object):
module. If it's unable to figure out the charset it will fall back
to `fallback_mimetype`.
- .. versionchanged:: 0.5
- The cache timeout is configurable now.
-
- .. versionadded:: 0.6
- The `fallback_mimetype` parameter was added.
-
:param app: the application to wrap. If you don't want to wrap an
application you can pass it :exc:`NotFound`.
:param exports: a list or dict of exported files and folders.
:param disallow: a list of :func:`~fnmatch.fnmatch` rules.
- :param fallback_mimetype: the fallback mimetype for unknown files.
:param cache: enable or disable caching headers.
:param cache_timeout: the cache timeout in seconds for the headers.
+ :param fallback_mimetype: The fallback mimetype for unknown files.
+
+ .. versionchanged:: 1.0
+ The default ``fallback_mimetype`` is
+ ``application/octet-stream``. If a filename looks like a text
+ mimetype, the ``utf-8`` charset is added to it.
+
+ .. versionadded:: 0.6
+ Added ``fallback_mimetype``.
+
+ .. versionchanged:: 0.5
+ Added ``cache_timeout``.
"""
def __init__(
@@ -92,7 +98,7 @@ def __init__(
disallow=None,
cache=True,
cache_timeout=60 * 60 * 12,
- fallback_mimetype="text/plain",
+ fallback_mimetype="application/octet-stream",
):
self.app = app
self.exports = []
@@ -254,7 +260,7 @@ def __call__(self, environ, start_response):
return self.app(environ, start_response)
guessed_type = mimetypes.guess_type(real_filename)
- mime_type = guessed_type[0] or self.fallback_mimetype
+ mime_type = get_content_type(guessed_type[0] or self.fallback_mimetype, "utf-8")
f, mtime, file_size = file_loader()
headers = [("Date", http_date())]
diff --git a/tests/middleware/test_shared_data.py b/tests/middleware/test_shared_data.py
index fb685f77c..175cbd674 100644
--- a/tests/middleware/test_shared_data.py
+++ b/tests/middleware/test_shared_data.py
@@ -47,6 +47,10 @@ def null_application(environ, start_response):
app_iter, status, headers = run_wsgi_app(app, create_environ(p))
assert status == "200 OK"
+ if p.endswith(".txt"):
+ content_type = next(v for k, v in headers if k == "Content-Type")
+ assert content_type == "text/plain; charset=utf-8"
+
with closing(app_iter) as app_iter:
data = b"".join(app_iter).strip()
From dfde671ef969e27c7b14bd464688c009b34a7d2b Mon Sep 17 00:00:00 2001
From: David Lord
Date: Thu, 6 Feb 2020 12:18:19 -0800
Subject: [PATCH 124/733] release version 1.0.0
---
CHANGES.rst | 2 +-
src/werkzeug/__init__.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 90d07e8f1..425dea85d 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -3,7 +3,7 @@
Version 1.0.0
-------------
-Unreleased
+Released 2020-02-06
- Drop support for Python 3.4. (:issue:`1478`)
- Remove code that issued deprecation warnings in version 0.15.
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index ae4efeb75..202fcea91 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -17,4 +17,4 @@
from .wrappers import Request
from .wrappers import Response
-__version__ = "1.0.0rc1"
+__version__ = "1.0.0"
From d715fd2b7e7a4f522885609ef63f8af69b05098f Mon Sep 17 00:00:00 2001
From: pgjones
Date: Fri, 7 Feb 2020 12:58:10 +0000
Subject: [PATCH 125/733] Add a default argument (None) the the RequestRedirect
get_response
The base class supplies the default and is documented to do so, this
matches that API. It also helps as the environ argument isn't used.
---
CHANGES.rst | 9 +++++++++
src/werkzeug/routing.py | 2 +-
2 files changed, 10 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 425dea85d..4768cd0ce 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,5 +1,14 @@
.. currentmodule:: werkzeug
+Version 1.0.1
+-------------
+
+Unreleased
+
+- Make the argument to ``RequestRedirect.get_response`` optional.
+ :issue:`1718`
+
+
Version 1.0.0
-------------
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 8fa3c60a3..e090c6680 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -250,7 +250,7 @@ def __init__(self, new_url):
RoutingException.__init__(self, new_url)
self.new_url = new_url
- def get_response(self, environ):
+ def get_response(self, environ=None):
return redirect(self.new_url, self.code)
From b400adb330dd1f4295baaf5c1ece08935645ce81 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sat, 8 Feb 2020 11:04:08 +0000
Subject: [PATCH 126/733] Allow the scheme to be overridden when building
This is useful to both Flask and Quart as they currently override the
`url_scheme` attribute then call build before restoring it.
---
CHANGES.rst | 9 +++++++++
src/werkzeug/routing.py | 16 ++++++++++++----
tests/test_routing.py | 5 +++++
3 files changed, 26 insertions(+), 4 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 425dea85d..d49a1fa49 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,5 +1,14 @@
.. currentmodule:: werkzeug
+Version 2.0.0
+-------------
+
+Unreleased
+
+- Add a ``url_scheme`` argument to :meth:`~routing.MapAdapter.build`
+ to override the bound scheme. :pr:`1721`
+
+
Version 1.0.0
-------------
diff --git a/src/werkzeug/routing.py b/src/werkzeug/routing.py
index 8fa3c60a3..f94382081 100644
--- a/src/werkzeug/routing.py
+++ b/src/werkzeug/routing.py
@@ -2085,6 +2085,7 @@ def build(
method=None,
force_external=False,
append_unknown=True,
+ url_scheme=None,
):
"""Building URLs works pretty much the other way round. Instead of
`match` you call `build` and pass it the endpoint and a dict of
@@ -2137,9 +2138,6 @@ def build(
to specify the method you want to have an URL built for if you have
different methods for the same endpoint specified.
- .. versionadded:: 0.6
- the `append_unknown` parameter was added.
-
:param endpoint: the endpoint of the URL to build.
:param values: the values for the URL to build. Unhandled values are
appended to the URL as query parameters.
@@ -2151,6 +2149,14 @@ def build(
:param append_unknown: unknown parameters are appended to the generated
URL as query string argument. Disable this
if you want the builder to ignore those.
+ :param url_scheme: Scheme to use in place of the bound
+ :attr:`url_scheme`.
+
+ .. versionadded:: 2.0
+ Added the ``url_scheme`` parameter.
+
+ .. versionadded:: 0.6
+ Added the ``append_unknown`` parameter.
"""
self.map.update()
@@ -2181,10 +2187,12 @@ def build(
domain_part, path, websocket = rv
host = self.get_host(domain_part)
+ if url_scheme is None:
+ url_scheme = self.url_scheme
+
# Always build WebSocket routes with the scheme (browsers
# require full URLs). If bound to a WebSocket, ensure that HTTP
# routes are built with an HTTP scheme.
- url_scheme = self.url_scheme
secure = url_scheme in {"https", "wss"}
if websocket:
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 0ba9bcf26..34f171c43 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -235,11 +235,16 @@ def test_basic_building():
adapter = map.bind("example.org", url_scheme="")
assert adapter.build("foo", {}) == "/foo"
assert adapter.build("foo", {}, force_external=True) == "//example.org/foo"
+ assert (
+ adapter.build("foo", {}, url_scheme="https", force_external=True)
+ == "https://example.org/foo"
+ )
adapter = map.bind("example.org", url_scheme="ws")
assert adapter.build("ws", {}) == "ws://example.org/ws"
assert adapter.build("foo", {}, force_external=True) == "http://example.org/foo"
assert adapter.build("foo", {}) == "/foo"
+ assert adapter.build("ws", {}, url_scheme="https") == "wss://example.org/ws"
def test_long_build():
From b5c5416a50f11d80ded7d0c2aec795b1c3533f3b Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sat, 8 Feb 2020 21:17:04 +0000
Subject: [PATCH 127/733] Only allow a single Allow Origin (Access Control)
value
The relevant specification text is,
Rather than allowing a space-separated list of origins, it is
either a single origin or the string "null".[0]
and
Note: null should not be used[1]
it is clear that the previous HeaderSet usage was wrong. (Also note
that the value is case sensitive)[0].
[0]: https://www.w3.org/TR/cors/
[1]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin
---
CHANGES.rst | 1 +
src/werkzeug/wrappers/cors.py | 4 +---
2 files changed, 2 insertions(+), 3 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 4768cd0ce..68757de52 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -7,6 +7,7 @@ Unreleased
- Make the argument to ``RequestRedirect.get_response`` optional.
:issue:`1718`
+- Only allow a single access control allow origin value. :pr:`1723`
Version 1.0.0
diff --git a/src/werkzeug/wrappers/cors.py b/src/werkzeug/wrappers/cors.py
index 790e50e9f..502fcf17f 100644
--- a/src/werkzeug/wrappers/cors.py
+++ b/src/werkzeug/wrappers/cors.py
@@ -82,9 +82,7 @@ def access_control_allow_credentials(self, value):
access_control_allow_origin = header_property(
"Access-Control-Allow-Origin",
- load_func=parse_set_header,
- dump_func=dump_header,
- doc="The origins that may make cross origin requests.",
+ doc="The origin or '*' for any origin that may make cross origin requests.",
)
access_control_expose_headers = header_property(
From 88aa39239f4a03605cfbd57029cc87a5139788a4 Mon Sep 17 00:00:00 2001
From: lathamfell
Date: Mon, 10 Feb 2020 11:27:38 -0800
Subject: [PATCH 128/733] Add VSCode local settings dir to .gitignore
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index d2d81e7f2..23c42f03c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,3 +21,4 @@ test_uwsgi_failed
.idea
.pytest_cache/
venv/
+.vscode
From b885fd9513156d428e8caf8b4b4c7b89d266edf4 Mon Sep 17 00:00:00 2001
From: lathamfell
Date: Mon, 10 Feb 2020 11:40:26 -0800
Subject: [PATCH 129/733] Include pytest-timeout in setup.py
---
setup.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/setup.py b/setup.py
index 157d884ab..de09ac3c7 100644
--- a/setup.py
+++ b/setup.py
@@ -57,6 +57,7 @@
"watchdog": ["watchdog"],
"dev": [
"pytest",
+ "pytest-timeout",
"coverage",
"tox",
"sphinx",
From d14c209a39f42c8626e4e60c75dbba6e73f5f4ec Mon Sep 17 00:00:00 2001
From: David Lord
Date: Mon, 10 Feb 2020 15:08:55 -0800
Subject: [PATCH 130/733] test client accepts multiple values for a header
Co-authored-by: Aaron Schuman
---
CHANGES.rst | 4 ++++
src/werkzeug/test.py | 8 +++++++-
tests/test_test.py | 10 ++++++++++
3 files changed, 21 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 3f363c025..3880345ba 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -7,6 +7,10 @@ Unreleased
- Add a ``url_scheme`` argument to :meth:`~routing.MapAdapter.build`
to override the bound scheme. :pr:`1721`
+- When passing a ``Headers`` object to a test client method or
+ ``EnvironBuilder``, multiple values for a key are joined into one
+ comma separated value. This matches the HTTP spec on multi-value
+ headers. :issue:`1655`
Version 1.0.1
diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py
index c5ce50a06..dc2220754 100644
--- a/src/werkzeug/test.py
+++ b/src/werkzeug/test.py
@@ -10,6 +10,7 @@
"""
import mimetypes
import sys
+from collections import defaultdict
from io import BytesIO
from itertools import chain
from random import random
@@ -740,8 +741,13 @@ def _path_encode(x):
result["CONTENT_LENGTH"] = str(content_length)
headers.set("Content-Length", content_length)
+ combined_headers = defaultdict(list)
+
for key, value in headers.to_wsgi_list():
- result["HTTP_%s" % key.upper().replace("-", "_")] = value
+ combined_headers["HTTP_%s" % key.upper().replace("-", "_")].append(value)
+
+ for key, values in combined_headers.items():
+ result[key] = ", ".join(values)
if self.environ_overrides:
result.update(self.environ_overrides)
diff --git a/tests/test_test.py b/tests/test_test.py
index 26a9b47df..445a0bbc6 100644
--- a/tests/test_test.py
+++ b/tests/test_test.py
@@ -20,6 +20,7 @@
from werkzeug._compat import iteritems
from werkzeug._compat import to_bytes
from werkzeug.datastructures import FileStorage
+from werkzeug.datastructures import Headers
from werkzeug.datastructures import MultiDict
from werkzeug.formparser import parse_form_data
from werkzeug.test import Client
@@ -224,6 +225,15 @@ def test_environ_builder_headers_content_type():
assert "CONTENT_TYPE" not in env
+def test_envrion_builder_multiple_headers():
+ h = Headers()
+ h.add("FOO", "bar")
+ h.add("FOO", "baz")
+ b = EnvironBuilder(headers=h)
+ env = b.get_environ()
+ assert env["HTTP_FOO"] == "bar, baz"
+
+
def test_environ_builder_paths():
b = EnvironBuilder(path="/foo", base_url="http://example.com/")
strict_eq(b.base_url, "http://example.com/")
From 84b2418ff1d7a5a4c5ef48fe59df7c5aa1898e7c Mon Sep 17 00:00:00 2001
From: pgjones
Date: Tue, 11 Feb 2020 21:43:23 +0000
Subject: [PATCH 131/733] Fix csp parsing
There may not be a content security policy header to parse, in which
case the value by default is None. Therefore rather than erroring this
change returns an empty ContentSecurityPolicy datastructure.
(This is the same logic as for Cache Control headers).
---
CHANGES.rst | 2 ++
src/werkzeug/http.py | 2 ++
2 files changed, 4 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 68757de52..6c77093cc 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -8,6 +8,8 @@ Unreleased
- Make the argument to ``RequestRedirect.get_response`` optional.
:issue:`1718`
- Only allow a single access control allow origin value. :pr:`1723`
+- Fix crash when trying to parse a non-existent Content Security
+ Policy header. :pr:`1731`
Version 1.0.0
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index b428ceeb2..954b1f0ec 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -540,6 +540,8 @@ def parse_csp_header(value, on_update=None, cls=None):
if cls is None:
cls = ContentSecurityPolicy
+ if value is None:
+ return cls(None, on_update)
items = []
for policy in value.split(";"):
policy = policy.strip()
From 492b666266c9b67a695476ead616a06e1f92c0d0 Mon Sep 17 00:00:00 2001
From: Manuel Gundlach
Date: Thu, 20 Feb 2020 19:42:06 +0100
Subject: [PATCH 132/733] Update documentation of SharedDataMiddleware
Let the doc of SharedDataMiddleware reflect its move to middleware.shared_data
---
src/werkzeug/middleware/shared_data.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/werkzeug/middleware/shared_data.py b/src/werkzeug/middleware/shared_data.py
index ab4ff0ffc..b6e37641e 100644
--- a/src/werkzeug/middleware/shared_data.py
+++ b/src/werkzeug/middleware/shared_data.py
@@ -35,7 +35,7 @@ class SharedDataMiddleware(object):
environments or simple server setups. Usage is quite simple::
import os
- from werkzeug.wsgi import SharedDataMiddleware
+ from werkzeug.middleware.shared_data import SharedDataMiddleware
app = SharedDataMiddleware(app, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
From e97a7363dab9b150bfdfcc00111e78bf5bd8d41c Mon Sep 17 00:00:00 2001
From: Manuel Gundlach
Date: Thu, 20 Feb 2020 19:45:47 +0100
Subject: [PATCH 133/733] Update tutorial
Let the tutorial reflect the move of SharedDataMiddleware to middleware.shared_data
---
docs/tutorial.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/tutorial.rst b/docs/tutorial.rst
index d90f986ad..9dfce24c2 100644
--- a/docs/tutorial.rst
+++ b/docs/tutorial.rst
@@ -127,7 +127,7 @@ if they are not used right away, to keep it from being confusing::
from werkzeug.wrappers import Request, Response
from werkzeug.routing import Map, Rule
from werkzeug.exceptions import HTTPException, NotFound
- from werkzeug.wsgi import SharedDataMiddleware
+ from werkzeug.middleware.shared_data import SharedDataMiddleware
from werkzeug.utils import redirect
from jinja2 import Environment, FileSystemLoader
From fd22f73dbf48c8f3f2b38b44ff6c7c40d2ca560c Mon Sep 17 00:00:00 2001
From: Io Mintz
Date: Mon, 24 Feb 2020 17:43:29 -0600
Subject: [PATCH 134/733] setup.py: docs requires sphinxcontrib-log-cabinet
---
setup.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/setup.py b/setup.py
index 157d884ab..a01bfbbd9 100644
--- a/setup.py
+++ b/setup.py
@@ -61,6 +61,7 @@
"tox",
"sphinx",
"pallets-sphinx-themes",
+ "sphinxcontrib-log-cabinet",
"sphinx-issues",
],
},
From c62f2bd4937ece43eeb00874d4ce8541999b50e0 Mon Sep 17 00:00:00 2001
From: northernSage
Date: Sun, 8 Mar 2020 16:08:49 -0300
Subject: [PATCH 135/733] change old urlparse (renamed in python 3) call to
werkzeug's url_parse
---
docs/tutorial.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/tutorial.rst b/docs/tutorial.rst
index d90f986ad..946b5fc81 100644
--- a/docs/tutorial.rst
+++ b/docs/tutorial.rst
@@ -123,7 +123,7 @@ if they are not used right away, to keep it from being confusing::
import os
import redis
- import urlparse
+ from werkzeug.urls import url_parse
from werkzeug.wrappers import Request, Response
from werkzeug.routing import Map, Rule
from werkzeug.exceptions import HTTPException, NotFound
From f248ed9b112e16286592eec16d3271aa1d5c67c4 Mon Sep 17 00:00:00 2001
From: northernSage
Date: Sun, 8 Mar 2020 16:11:46 -0300
Subject: [PATCH 136/733] change old comma-separated raise syntax to new ('as')
one
---
docs/tutorial.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/tutorial.rst b/docs/tutorial.rst
index 946b5fc81..12dc904f1 100644
--- a/docs/tutorial.rst
+++ b/docs/tutorial.rst
@@ -259,7 +259,7 @@ The way we will do it in this tutorial is by calling the method ``on_``
try:
endpoint, values = adapter.match()
return getattr(self, 'on_' + endpoint)(request, **values)
- except HTTPException, e:
+ except HTTPException as e:
return e
We bind the URL map to the current environment and get back a
From 37f2dbff7d973165ee085bd8a219182b23fe8d61 Mon Sep 17 00:00:00 2001
From: northernSage
Date: Sun, 8 Mar 2020 16:15:05 -0300
Subject: [PATCH 137/733] change urlparse call to werkzeug's url_parse
---
docs/tutorial.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/tutorial.rst b/docs/tutorial.rst
index 12dc904f1..60d2f729f 100644
--- a/docs/tutorial.rst
+++ b/docs/tutorial.rst
@@ -306,7 +306,7 @@ we need to write a function and a helper method. For URL validation this
is good enough::
def is_valid_url(url):
- parts = urlparse.urlparse(url)
+ parts = url_parse(url)
return parts.scheme in ('http', 'https')
For inserting the URL, all we need is this little method on our class::
From 89c43a73b1730c8634a8ea48f06ceccb534fb01c Mon Sep 17 00:00:00 2001
From: northernSage
Date: Tue, 10 Mar 2020 19:47:19 -0300
Subject: [PATCH 138/733] update reference code to import StringIO function
from io module
---
docs/quickstart.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/quickstart.rst b/docs/quickstart.rst
index 125787669..84cb17094 100644
--- a/docs/quickstart.rst
+++ b/docs/quickstart.rst
@@ -77,7 +77,7 @@ was transmitted in a POST/PUT request.
For testing purposes we can create a request object from supplied data
using the :meth:`~BaseRequest.from_values` method:
->>> from cStringIO import StringIO
+>>> from io import StringIO
>>> data = "name=this+is+encoded+form+data&another_key=another+one"
>>> request = Request.from_values(query_string='foo=bar&blah=blafasel',
... content_length=len(data), input_stream=StringIO(data),
From c5cce98338d736971eafa2c673ee912d347a623b Mon Sep 17 00:00:00 2001
From: David Lord
Date: Tue, 17 Mar 2020 07:57:37 -0700
Subject: [PATCH 139/733] skip watchdog tests on windows
---
tests/test_serving.py | 40 +++++++++++++++++++---------------------
1 file changed, 19 insertions(+), 21 deletions(-)
diff --git a/tests/test_serving.py b/tests/test_serving.py
index f605f7676..cd4283039 100644
--- a/tests/test_serving.py
+++ b/tests/test_serving.py
@@ -9,8 +9,8 @@
:license: BSD-3-Clause
"""
import os
+import platform
import socket
-import ssl
import subprocess
import sys
import textwrap
@@ -39,6 +39,15 @@
import httplib
+require_cryptography = pytest.mark.skipif(
+ cryptography is None, reason="cryptography not installed"
+)
+require_watchdog = pytest.mark.skipif(watchdog is None, reason="watchdog not installed")
+skip_windows = pytest.mark.skipif(
+ platform.system() == "Windows", reason="unreliable on Windows"
+)
+
+
def test_serving(dev_server):
server = dev_server("from werkzeug.testapp import test_app as app")
rv = requests.get("http://%s/?foo=bar&baz=blah" % server.addr).content
@@ -97,13 +106,7 @@ def app(environ, start_response):
assert "Internal Server Error" in r.text
-@pytest.mark.skipif(
- not hasattr(ssl, "SSLContext"),
- reason="Missing PEP 466 (Python 2.7.9+) or Python 3.",
-)
-@pytest.mark.skipif(
- cryptography is None, reason="cryptography is required for cert generation."
-)
+@require_cryptography
def test_stdlib_ssl_contexts(dev_server, tmpdir):
certificate, private_key = serving.make_ssl_devcert(str(tmpdir.mkdir("certs")))
@@ -126,7 +129,7 @@ def app(environ, start_response):
assert r.content == b"hello"
-@pytest.mark.skipif(cryptography is None, reason="cryptography is not installed.")
+@require_cryptography
def test_ssl_context_adhoc(dev_server):
server = dev_server(
"""
@@ -141,18 +144,15 @@ def app(environ, start_response):
assert r.content == b"hello"
-@pytest.mark.skipif(cryptography is None, reason="cryptography is not installed.")
+@require_cryptography
def test_make_ssl_devcert(tmpdir):
certificate, private_key = serving.make_ssl_devcert(str(tmpdir))
assert os.path.isfile(certificate)
assert os.path.isfile(private_key)
-@pytest.mark.skipif(watchdog is None, reason="Watchdog not installed.")
-@pytest.mark.xfail(
- sys.version_info.major == 2 and sys.platform == "win32",
- reason="TODO fix test for Python 2 on Windows",
-)
+@require_watchdog
+@skip_windows
def test_reloader_broken_imports(tmpdir, dev_server):
# We explicitly assert that the server reloads on change, even though in
# this case the import could've just been retried. This is to assert
@@ -200,7 +200,8 @@ def real_app(environ, start_response):
assert r.content == b"hello"
-@pytest.mark.skipif(watchdog is None, reason="Watchdog not installed.")
+@require_watchdog
+@skip_windows
def test_reloader_nested_broken_imports(tmpdir, dev_server):
real_app = tmpdir.mkdir("real_app")
real_app.join("__init__.py").write("from real_app.sub import real_app")
@@ -242,11 +243,8 @@ def real_app(environ, start_response):
assert r.content == b"hello"
-@pytest.mark.skipif(watchdog is None, reason="Watchdog not installed.")
-@pytest.mark.xfail(
- sys.version_info.major == 2 and sys.platform == "win32",
- reason="TODO fix test for Python 2 on Windows",
-)
+@require_watchdog
+@skip_windows
def test_reloader_reports_correct_file(tmpdir, dev_server):
real_app = tmpdir.join("real_app.py")
real_app.write(
From 88ec24d54318b9454841e99c833108e6a2d22824 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sat, 4 Jan 2020 21:59:00 +0000
Subject: [PATCH 140/733] Drop support for Python 2.7
This removes the official support for 2.7 without making any code
changes. It allows for code changes to be made.
---
.azure-pipelines.yml | 7 -------
CHANGES.rst | 1 +
setup.cfg | 3 ---
setup.py | 5 +----
tox.ini | 2 +-
5 files changed, 3 insertions(+), 15 deletions(-)
diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml
index eabcaf768..2c3465e55 100644
--- a/.azure-pipelines.yml
+++ b/.azure-pipelines.yml
@@ -22,13 +22,6 @@ strategy:
python.version: '3.7'
Python 3.6 Linux:
python.version: '3.6'
- Python 3.5 Linux:
- python.version: '3.5'
- Python 2.7 Linux:
- python.version: '2.7'
- Python 2.7 Windows:
- python.version: '2.7'
- vmImage: windows-latest
Docs:
TOXENV: docs
hasTestResults: 'false'
diff --git a/CHANGES.rst b/CHANGES.rst
index 3880345ba..b4f5a2c96 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -5,6 +5,7 @@ Version 2.0.0
Unreleased
+- Drop support for Python 2 and 3.5. :pr:`1693`
- Add a ``url_scheme`` argument to :meth:`~routing.MapAdapter.build`
to override the bound scheme. :pr:`1721`
- When passing a ``Headers`` object to a test client method or
diff --git a/setup.cfg b/setup.cfg
index 79d060d6f..3348dc305 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -2,9 +2,6 @@
license_file = LICENSE.rst
long_description_content_type = text/x-rst
-[bdist_wheel]
-universal = true
-
[tool:pytest]
testpaths = tests
norecursedirs = tests/hypothesis
diff --git a/setup.py b/setup.py
index a01bfbbd9..e96707dac 100644
--- a/setup.py
+++ b/setup.py
@@ -33,10 +33,7 @@
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
@@ -52,7 +49,7 @@
packages=find_packages("src"),
package_dir={"": "src"},
include_package_data=True,
- python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
+ python_requires=">=3.6",
extras_require={
"watchdog": ["watchdog"],
"dev": [
diff --git a/tox.ini b/tox.ini
index f98cbb7c8..d3ff05216 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
envlist =
- py{38,37,36,35,27,py3,py}
+ py{38,37,36,py3}
style
docs
coverage
From 9777c0821eb1a5a645185e35f7523a8725897634 Mon Sep 17 00:00:00 2001
From: pgjones
Date: Sun, 5 Jan 2020 18:47:03 +0000
Subject: [PATCH 141/733] Drop _compat for Python 2
This is possible now that Python 2 is not supported. The code that is
still required has been moved to the _internal module and renamed with
a leading `_` as per the other functions. As this change is targeting
Werkzeug 2.0 the compatibility with 3.5 is dropped, 2.0 will be Python
3.6+.
---
examples/plnt/utils.py | 7 +-
src/werkzeug/_compat.py | 228 --------------------
src/werkzeug/_internal.py | 88 ++++++--
src/werkzeug/_reloader.py | 20 +-
src/werkzeug/datastructures.py | 172 ++++++---------
src/werkzeug/debug/__init__.py | 5 +-
src/werkzeug/debug/repr.py | 26 +--
src/werkzeug/debug/tbtools.py | 55 ++---
src/werkzeug/exceptions.py | 32 ++-
src/werkzeug/formparser.py | 9 +-
src/werkzeug/http.py | 47 ++--
src/werkzeug/local.py | 17 +-
src/werkzeug/middleware/lint.py | 11 +-
src/werkzeug/middleware/shared_data.py | 7 +-
src/werkzeug/posixemulation.py | 6 +-
src/werkzeug/routing.py | 63 +++---
src/werkzeug/security.py | 32 ++-
src/werkzeug/serving.py | 72 ++-----
src/werkzeug/test.py | 41 ++--
src/werkzeug/urls.py | 109 ++++------
src/werkzeug/utils.py | 37 ++--
src/werkzeug/wrappers/base_request.py | 19 +-
src/werkzeug/wrappers/base_response.py | 31 ++-
src/werkzeug/wrappers/common_descriptors.py | 3 +-
src/werkzeug/wrappers/etag.py | 3 +-
src/werkzeug/wrappers/json.py | 3 +-
src/werkzeug/wsgi.py | 56 ++---
tests/conftest.py | 4 +-
tests/middleware/test_dispatcher.py | 6 +-
tests/middleware/test_shared_data.py | 4 +-
tests/test_datastructures.py | 113 +++-------
tests/test_debug.py | 20 +-
tests/test_exceptions.py | 7 +-
tests/test_formparser.py | 51 ++---
tests/test_http.py | 7 +-
tests/test_test.py | 10 +-
tests/test_urls.py | 17 +-
tests/test_utils.py | 3 +-
tests/test_wrappers.py | 8 +-
tests/test_wsgi.py | 85 ++++----
40 files changed, 518 insertions(+), 1016 deletions(-)
delete mode 100644 src/werkzeug/_compat.py
diff --git a/examples/plnt/utils.py b/examples/plnt/utils.py
index 936d22efd..0bf4cc708 100644
--- a/examples/plnt/utils.py
+++ b/examples/plnt/utils.py
@@ -13,7 +13,6 @@
from jinja2 import Environment
from jinja2 import FileSystemLoader
-from werkzeug._compat import unichr
from werkzeug.local import Local
from werkzeug.local import LocalManager
from werkzeug.routing import Map
@@ -96,15 +95,15 @@ def strip_tags(s):
def handle_match(m):
name = m.group(1)
if name in html_entities:
- return unichr(html_entities[name])
+ return chr(html_entities[name])
if name[:2] in ("#x", "#X"):
try:
- return unichr(int(name[2:], 16))
+ return chr(int(name[2:], 16))
except ValueError:
return u""
elif name.startswith("#"):
try:
- return unichr(int(name[1:]))
+ return chr(int(name[1:]))
except ValueError:
return u""
return u""
diff --git a/src/werkzeug/_compat.py b/src/werkzeug/_compat.py
deleted file mode 100644
index 1d8c81040..000000000
--- a/src/werkzeug/_compat.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# flake8: noqa
-# This whole file is full of lint errors
-import functools
-import operator
-import sys
-
-try:
- import builtins
-except ImportError:
- import __builtin__ as builtins
-
-
-PY2 = sys.version_info[0] == 2
-WIN = sys.platform.startswith("win")
-
-_identity = lambda x: x
-
-if PY2:
- unichr = unichr
- text_type = unicode
- string_types = (str, unicode)
- integer_types = (int, long)
-
- iterkeys = lambda d, *args, **kwargs: d.iterkeys(*args, **kwargs)
- itervalues = lambda d, *args, **kwargs: d.itervalues(*args, **kwargs)
- iteritems = lambda d, *args, **kwargs: d.iteritems(*args, **kwargs)
-
- iterlists = lambda d, *args, **kwargs: d.iterlists(*args, **kwargs)
- iterlistvalues = lambda d, *args, **kwargs: d.iterlistvalues(*args, **kwargs)
-
- int_to_byte = chr
- iter_bytes = iter
-
- import collections as collections_abc
-
- exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
-
- def fix_tuple_repr(obj):
- def __repr__(self):
- cls = self.__class__
- return "%s(%s)" % (
- cls.__name__,
- ", ".join(
- "%s=%r" % (field, self[index])
- for index, field in enumerate(cls._fields)
- ),
- )
-
- obj.__repr__ = __repr__
- return obj
-
- def implements_iterator(cls):
- cls.next = cls.__next__
- del cls.__next__
- return cls
-
- def implements_to_string(cls):
- cls.__unicode__ = cls.__str__
- cls.__str__ = lambda x: x.__unicode__().encode("utf-8")
- return cls
-
- def native_string_result(func):
- def wrapper(*args, **kwargs):
- return func(*args, **kwargs).encode("utf-8")
-
- return functools.update_wrapper(wrapper, func)
-
- def implements_bool(cls):
- cls.__nonzero__ = cls.__bool__
- del cls.__bool__
- return cls
-
- from itertools import imap, izip, ifilter
-
- range_type = xrange
-
- from StringIO import StringIO
- from cStringIO import StringIO as BytesIO
-
- NativeStringIO = BytesIO
-
- def make_literal_wrapper(reference):
- return _identity
-
- def normalize_string_tuple(tup):
- """Normalizes a string tuple to a common type. Following Python 2
- rules, upgrades to unicode are implicit.
- """
- if any(isinstance(x, text_type) for x in tup):
- return tuple(to_unicode(x) for x in tup)
- return tup
-
- def try_coerce_native(s):
- """Try to coerce a unicode string to native if possible. Otherwise,
- leave it as unicode.
- """
- try:
- return to_native(s)
- except UnicodeError:
- return s
-
- wsgi_get_bytes = _identity
-
- def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
- return s.decode(charset, errors)
-
- def wsgi_encoding_dance(s, charset="utf-8", errors="replace"):
- if isinstance(s, bytes):
- return s
- return s.encode(charset, errors)
-
- def to_bytes(x, charset=sys.getdefaultencoding(), errors="strict"):
- if x is None:
- return None
- if isinstance(x, (bytes, bytearray, buffer)):
- return bytes(x)
- if isinstance(x, unicode):
- return x.encode(charset, errors)
- raise TypeError("Expected bytes")
-
- def to_native(x, charset=sys.getdefaultencoding(), errors="strict"):
- if x is None or isinstance(x, str):
- return x
- return x.encode(charset, errors)
-
-
-else:
- unichr = chr
- text_type = str
- string_types = (str,)
- integer_types = (int,)
-
- iterkeys = lambda d, *args, **kwargs: iter(d.keys(*args, **kwargs))
- itervalues = lambda d, *args, **kwargs: iter(d.values(*args, **kwargs))
- iteritems = lambda d, *args, **kwargs: iter(d.items(*args, **kwargs))
-
- iterlists = lambda d, *args, **kwargs: iter(d.lists(*args, **kwargs))
- iterlistvalues = lambda d, *args, **kwargs: iter(d.listvalues(*args, **kwargs))
-
- int_to_byte = operator.methodcaller("to_bytes", 1, "big")
- iter_bytes = functools.partial(map, int_to_byte)
-
- import collections.abc as collections_abc
-
- def reraise(tp, value, tb=None):
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-
- fix_tuple_repr = _identity
- implements_iterator = _identity
- implements_to_string = _identity
- implements_bool = _identity
- native_string_result = _identity
- imap = map
- izip = zip
- ifilter = filter
- range_type = range
-
- from io import StringIO, BytesIO
-
- NativeStringIO = StringIO
-
- _latin1_encode = operator.methodcaller("encode", "latin1")
-
- def make_literal_wrapper(reference):
- if isinstance(reference, text_type):
- return _identity
- return _latin1_encode
-
- def normalize_string_tuple(tup):
- """Ensures that all types in the tuple are either strings
- or bytes.
- """
- tupiter = iter(tup)
- is_text = isinstance(next(tupiter, None), text_type)
- for arg in tupiter:
- if isinstance(arg, text_type) != is_text:
- raise TypeError(
- "Cannot mix str and bytes arguments (got %s)" % repr(tup)
- )
- return tup
-
- try_coerce_native = _identity
- wsgi_get_bytes = _latin1_encode
-
- def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
- return s.encode("latin1").decode(charset, errors)
-
- def wsgi_encoding_dance(s, charset="utf-8", errors="replace"):
- if isinstance(s, text_type):
- s = s.encode(charset)
- return s.decode("latin1", errors)
-
- def to_bytes(x, charset=sys.getdefaultencoding(), errors="strict"):
- if x is None:
- return None
- if isinstance(x, (bytes, bytearray, memoryview)): # noqa
- return bytes(x)
- if isinstance(x, str):
- return x.encode(charset, errors)
- raise TypeError("Expected bytes")
-
- def to_native(x, charset=sys.getdefaultencoding(), errors="strict"):
- if x is None or isinstance(x, str):
- return x
- return x.decode(charset, errors)
-
-
-def to_unicode(
- x, charset=sys.getdefaultencoding(), errors="strict", allow_none_charset=False
-):
- if x is None:
- return None
- if not isinstance(x, bytes):
- return text_type(x)
- if charset is None and allow_none_charset:
- return x
- return x.decode(charset, errors)
-
-
-try:
- from os import fspath
-except ImportError:
- # Python < 3.6
- # https://www.python.org/dev/peps/pep-0519/#backwards-compatibility
- def fspath(path):
- return path.__fspath__() if hasattr(path, "__fspath__") else path
diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py
index 1d2eaf5b1..b57fb79ff 100644
--- a/src/werkzeug/_internal.py
+++ b/src/werkzeug/_internal.py
@@ -10,19 +10,15 @@
"""
import inspect
import logging
+import operator
import re
import string
+import sys
from datetime import date
from datetime import datetime
from itertools import chain
from weakref import WeakKeyDictionary
-from ._compat import int_to_byte
-from ._compat import integer_types
-from ._compat import iter_bytes
-from ._compat import range_type
-from ._compat import text_type
-
_logger = None
_signature_cache = WeakKeyDictionary()
@@ -32,8 +28,10 @@
).encode("ascii")
_cookie_quoting_map = {b",": b"\\054", b";": b"\\073", b'"': b'\\"', b"\\": b"\\\\"}
-for _i in chain(range_type(32), range_type(127, 256)):
- _cookie_quoting_map[int_to_byte(_i)] = ("\\%03o" % _i).encode("latin1")
+for _i in chain(range(32), range(127, 256)):
+ _cookie_quoting_map[_i.to_bytes(1, sys.byteorder)] = ("\\%03o" % _i).encode(
+ "latin1"
+ )
_octal_re = re.compile(br"\\[0-3][0-7][0-7]")
_quote_re = re.compile(br"[\\].")
@@ -52,6 +50,8 @@
flags=re.VERBOSE,
)
+_WIN = sys.platform.startswith("win")
+
class _Missing(object):
def __repr__(self):
@@ -64,6 +64,69 @@ def __reduce__(self):
_missing = _Missing()
+def _make_literal_wrapper(reference):
+ if isinstance(reference, str):
+ return lambda x: x
+ return operator.methodcaller("encode", "latin1")
+
+
+def _normalize_string_tuple(tup):
+ """Ensures that all types in the tuple are either strings or bytes."""
+ tupiter = iter(tup)
+ is_text = isinstance(next(tupiter, None), str)
+ for arg in tupiter:
+ if isinstance(arg, str) != is_text:
+ raise TypeError("Cannot mix str and bytes arguments (got %s)" % repr(tup))
+ return tup
+
+
+def _to_bytes(x, charset=sys.getdefaultencoding(), errors="strict"): # noqa
+ if x is None:
+ return None
+ if isinstance(x, (bytes, bytearray, memoryview)): # noqa
+ return bytes(x)
+ if isinstance(x, str):
+ return x.encode(charset, errors)
+ raise TypeError("Expected bytes")
+
+
+def _to_native(x, charset=sys.getdefaultencoding(), errors="strict"): # noqa
+ if x is None or isinstance(x, str):
+ return x
+ return x.decode(charset, errors)
+
+
+def _to_unicode(
+ x,
+ charset=sys.getdefaultencoding(), # noqa
+ errors="strict",
+ allow_none_charset=False,
+):
+ if x is None:
+ return None
+ if not isinstance(x, bytes):
+ return str(x)
+ if charset is None and allow_none_charset:
+ return x
+ return x.decode(charset, errors)
+
+
+def _reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+
+def _wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+ return s.encode("latin1").decode(charset, errors)
+
+
+def _wsgi_encoding_dance(s, charset="utf-8", errors="replace"):
+ if isinstance(s, str):
+ s = s.encode(charset)
+ return s.decode("latin1", errors)
+
+
def _get_environ(obj):
env = getattr(obj, "environ", obj)
assert isinstance(env, dict), (
@@ -197,7 +260,7 @@ def _date_to_unix(arg):
"""
if isinstance(arg, datetime):
arg = arg.utctimetuple()
- elif isinstance(arg, integer_types + (float,)):
+ elif isinstance(arg, (int, float)):
return int(arg)
year, month, day, hour, minute, second = arg[:6]
days = date(year, month, 1).toordinal() - _epoch_ord + day - 1
@@ -265,7 +328,8 @@ def _cookie_quote(b):
_lookup = _cookie_quoting_map.get
_push = buf.extend
- for char in iter_bytes(b):
+ for char_int in b:
+ char = char_int.to_bytes(1, sys.byteorder)
if char not in _legal_cookie_chars:
all_legal = False
char = _lookup(char, char)
@@ -331,7 +395,7 @@ def _cookie_parse_impl(b):
def _encode_idna(domain):
# If we're given bytes, make sure they fit into ASCII
- if not isinstance(domain, text_type):
+ if not isinstance(domain, str):
domain.decode("ascii")
return domain
@@ -352,7 +416,7 @@ def _decode_idna(domain):
# If the input is a string try to encode it to ascii to
# do the idna decoding. if that fails because of an
# unicode error, then we already have a decoded idna domain
- if isinstance(domain, text_type):
+ if isinstance(domain, str):
try:
domain = domain.encode("ascii")
except UnicodeError:
diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py
index c2a9c8e05..05ab6f688 100644
--- a/src/werkzeug/_reloader.py
+++ b/src/werkzeug/_reloader.py
@@ -5,9 +5,6 @@
import time
from itertools import chain
-from ._compat import iteritems
-from ._compat import PY2
-from ._compat import text_type
from ._internal import _log
@@ -133,7 +130,7 @@ def _find_common_roots(paths):
rv = set()
def _walk(node, path):
- for prefix, child in iteritems(node):
+ for prefix, child in iter(node.items()):
_walk(child, path + (prefix,))
if not node:
rv.add("/".join(path))
@@ -165,20 +162,7 @@ def restart_with_reloader(self):
_log("info", " * Restarting with %s" % self.name)
args = _get_args_for_reloading()
- # a weird bug on windows. sometimes unicode strings end up in the
- # environment and subprocess.call does not like this, encode them
- # to latin1 and continue.
- if os.name == "nt" and PY2:
- new_environ = {}
- for key, value in iteritems(os.environ):
- if isinstance(key, text_type):
- key = key.encode("iso-8859-1")
- if isinstance(value, text_type):
- value = value.encode("iso-8859-1")
- new_environ[key] = value
- else:
- new_environ = os.environ.copy()
-
+ new_environ = os.environ.copy()
new_environ["WERKZEUG_RUN_MAIN"] = "true"
exit_code = subprocess.call(args, env=new_environ, close_fds=False)
if exit_code != 3:
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 1cda034fc..98e112f25 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -11,23 +11,16 @@
import codecs
import mimetypes
import re
+from collections.abc import Container
+from collections.abc import Iterable
+from collections.abc import MutableSet
from copy import deepcopy
+from io import BytesIO
from itertools import repeat
+from os import fspath
from . import exceptions
-from ._compat import BytesIO
-from ._compat import collections_abc
-from ._compat import fspath
-from ._compat import integer_types
-from ._compat import iteritems
-from ._compat import iterkeys
-from ._compat import iterlists
-from ._compat import itervalues
-from ._compat import make_literal_wrapper
-from ._compat import PY2
-from ._compat import string_types
-from ._compat import text_type
-from ._compat import to_native
+from ._internal import _make_literal_wrapper
from ._internal import _missing
from .filesystem import get_filesystem_encoding
@@ -41,10 +34,10 @@ def iter_multi_items(mapping):
without dropping any from more complex structures.
"""
if isinstance(mapping, MultiDict):
- for item in iteritems(mapping, multi=True):
+ for item in iter(mapping.items(multi=True)):
yield item
elif isinstance(mapping, dict):
- for key, value in iteritems(mapping):
+ for key, value in iter(mapping.items()):
if isinstance(value, (tuple, list)):
for v in value:
yield key, v
@@ -55,44 +48,6 @@ def iter_multi_items(mapping):
yield item
-def native_itermethods(names):
- if not PY2:
- return lambda x: x
-
- def setviewmethod(cls, name):
- viewmethod_name = "view%s" % name
- repr_name = "view_%s" % name
-
- def viewmethod(self, *a, **kw):
- return ViewItems(self, name, repr_name, *a, **kw)
-
- viewmethod.__name__ = viewmethod_name
- viewmethod.__doc__ = "`%s()` object providing a view on %s" % (
- viewmethod_name,
- name,
- )
- setattr(cls, viewmethod_name, viewmethod)
-
- def setitermethod(cls, name):
- itermethod = getattr(cls, name)
- setattr(cls, "iter%s" % name, itermethod)
-
- def listmethod(self, *a, **kw):
- return list(itermethod(self, *a, **kw))
-
- listmethod.__name__ = name
- listmethod.__doc__ = "Like :py:meth:`iter%s`, but returns a list." % name
- setattr(cls, name, listmethod)
-
- def wrap(cls):
- for name in names:
- setitermethod(cls, name)
- setviewmethod(cls, name)
- return cls
-
- return wrap
-
-
class ImmutableListMixin(object):
"""Makes a :class:`list` immutable.
@@ -176,7 +131,7 @@ def __reduce_ex__(self, protocol):
return type(self), (dict(self),)
def _iter_hashitems(self):
- return iteritems(self)
+ return iter(self.items())
def __hash__(self):
if self._hash_cache is not None:
@@ -215,10 +170,10 @@ class ImmutableMultiDictMixin(ImmutableDictMixin):
"""
def __reduce_ex__(self, protocol):
- return type(self), (list(iteritems(self, multi=True)),)
+ return type(self), (list(iter(self.items(multi=True))),)
def _iter_hashitems(self):
- return iteritems(self, multi=True)
+ return iter(self.items(multi=True))
def add(self, key, value):
is_immutable(self)
@@ -358,7 +313,6 @@ def __iter__(self):
return iter(self.__get_items())
-@native_itermethods(["keys", "values", "items", "lists", "listvalues"])
class MultiDict(TypeConversionDict):
"""A :class:`MultiDict` is a dictionary subclass customized to deal with
multiple values for the same key which is for example used by the parsing
@@ -402,10 +356,10 @@ class MultiDict(TypeConversionDict):
def __init__(self, mapping=None):
if isinstance(mapping, MultiDict):
- dict.__init__(self, ((k, l[:]) for k, l in iterlists(mapping)))
+ dict.__init__(self, ((k, l[:]) for k, l in iter(mapping.lists())))
elif isinstance(mapping, dict):
tmp = {}
- for key, value in iteritems(mapping):
+ for key, value in iter(mapping.items()):
if isinstance(value, (tuple, list)):
if len(value) == 0:
continue
@@ -549,7 +503,7 @@ def items(self, multi=False):
contain pairs for the first value of each key.
"""
- for key, values in iteritems(dict, self):
+ for key, values in iter(dict.items(self)):
if multi:
for value in values:
yield key, value
@@ -560,17 +514,17 @@ def lists(self):
"""Return a iterator of ``(key, values)`` pairs, where values is the list
of all values associated with the key."""
- for key, values in iteritems(dict, self):
+ for key, values in iter(dict.items(self)):
yield key, list(values)
def keys(self):
- return iterkeys(dict, self)
+ return iter(dict.keys(self))
__iter__ = keys
def values(self):
"""Returns an iterator of the first value on every key's value list."""
- for values in itervalues(dict, self):
+ for values in iter(dict.values(self)):
yield values[0]
def listvalues(self):
@@ -582,7 +536,7 @@ def listvalues(self):
True
"""
- return itervalues(dict, self)
+ return iter(dict.values(self))
def copy(self):
"""Return a shallow copy of this object."""
@@ -603,7 +557,7 @@ def to_dict(self, flat=True):
:return: a :class:`dict`
"""
if flat:
- return dict(iteritems(self))
+ return dict(iter(self.items()))
return dict(self.lists())
def update(self, other_dict):
@@ -689,7 +643,7 @@ def __deepcopy__(self, memo):
return self.deepcopy(memo=memo)
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, list(iteritems(self, multi=True)))
+ return "%s(%r)" % (self.__class__.__name__, list(iter(self.items(multi=True))))
class _omd_bucket(object):
@@ -724,7 +678,6 @@ def unlink(self, omd):
omd._last_bucket = self.prev
-@native_itermethods(["keys", "values", "items", "lists", "listvalues"])
class OrderedMultiDict(MultiDict):
"""Works like a regular :class:`MultiDict` but preserves the
order of the fields. To convert the ordered multi dict into a
@@ -751,8 +704,8 @@ def __eq__(self, other):
if not isinstance(other, MultiDict):
return NotImplemented
if isinstance(other, OrderedMultiDict):
- iter1 = iteritems(self, multi=True)
- iter2 = iteritems(other, multi=True)
+ iter1 = iter(self.items(multi=True))
+ iter2 = iter(other.items(multi=True))
try:
for k1, v1 in iter1:
k2, v2 = next(iter2)
@@ -767,7 +720,7 @@ def __eq__(self, other):
return False
if len(self) != len(other):
return False
- for key, values in iterlists(self):
+ for key, values in iter(self.lists()):
if other.getlist(key) != values:
return False
return True
@@ -778,10 +731,10 @@ def __ne__(self, other):
return not self.__eq__(other)
def __reduce_ex__(self, protocol):
- return type(self), (list(iteritems(self, multi=True)),)
+ return type(self), (list(iter(self.items(multi=True))),)
def __getstate__(self):
- return list(iteritems(self, multi=True))
+ return list(iter(self.items(multi=True)))
def __setstate__(self, values):
dict.clear(self)
@@ -801,12 +754,12 @@ def __delitem__(self, key):
self.pop(key)
def keys(self):
- return (key for key, value in iteritems(self))
+ return (key for key, value in iter(self.items()))
__iter__ = keys
def values(self):
- return (value for key, value in iteritems(self))
+ return (value for key, value in iter(self.items()))
def items(self, multi=False):
ptr = self._first_bucket
@@ -832,7 +785,7 @@ def lists(self):
ptr = ptr.next
def listvalues(self):
- for _key, values in iterlists(self):
+ for _key, values in iter(self.lists()):
yield values
def add(self, key, value):
@@ -910,12 +863,11 @@ def _options_header_vkw(value, kw):
def _unicodify_header_value(value):
if isinstance(value, bytes):
value = value.decode("latin-1")
- if not isinstance(value, text_type):
- value = text_type(value)
+ if not isinstance(value, str):
+ value = str(value)
return value
-@native_itermethods(["keys", "values", "items"])
class Headers(object):
"""An object that stores some headers. It has a dict-like interface
but is ordered and can store the same keys multiple times.
@@ -959,11 +911,11 @@ def __init__(self, defaults=None):
def __getitem__(self, key, _get_mode=False):
if not _get_mode:
- if isinstance(key, integer_types):
+ if isinstance(key, int):
return self._list[key]
elif isinstance(key, slice):
return self.__class__(self._list[key])
- if not isinstance(key, string_types):
+ if not isinstance(key, str):
raise exceptions.BadRequestKeyError(key)
ikey = key.lower()
for k, v in self._list:
@@ -1073,11 +1025,11 @@ def items(self, lower=False):
yield key, value
def keys(self, lower=False):
- for key, _ in iteritems(self, lower):
+ for key, _ in iter(self.items(lower)):
yield key
def values(self):
- for _, value in iteritems(self):
+ for _, value in iter(self.items()):
yield value
def extend(self, *args, **kwargs):
@@ -1105,7 +1057,7 @@ def extend(self, *args, **kwargs):
self.add(key, value)
def __delitem__(self, key, _index_operation=True):
- if _index_operation and isinstance(key, (integer_types, slice)):
+ if _index_operation and isinstance(key, (int, slice)):
del self._list[key]
return
key = key.lower()
@@ -1133,7 +1085,7 @@ def pop(self, key=None, default=_missing):
"""
if key is None:
return self._list.pop()
- if isinstance(key, integer_types):
+ if isinstance(key, int):
return self._list.pop(key)
try:
rv = self[key]
@@ -1189,7 +1141,7 @@ def add(self, _key, _value, **kw):
self._list.append((_key, _value))
def _validate_value(self, value):
- if not isinstance(value, text_type):
+ if not isinstance(value, str):
raise TypeError("Value should be unicode.")
if u"\n" in value or u"\r" in value:
raise ValueError(
@@ -1297,15 +1249,15 @@ def setlistdefault(self, key, default):
def __setitem__(self, key, value):
"""Like :meth:`set` but also supports index/slice based setting."""
- if isinstance(key, (slice, integer_types)):
- if isinstance(key, integer_types):
+ if isinstance(key, (slice, int)):
+ if isinstance(key, int):
value = [value]
value = [
(_unicodify_header_value(k), _unicodify_header_value(v))
for (k, v) in value
]
[self._validate_value(v) for (k, v) in value]
- if isinstance(key, integer_types):
+ if isinstance(key, int):
self._list[key] = value[0]
else:
self._list[key] = value
@@ -1335,7 +1287,7 @@ def update(self, *args, **kwargs):
for key in mapping.keys():
self.setlist(key, mapping.getlist(key))
elif isinstance(mapping, dict):
- for key, value in iteritems(mapping):
+ for key, value in mapping.items():
if isinstance(value, (list, tuple)):
self.setlist(key, value)
else:
@@ -1344,7 +1296,7 @@ def update(self, *args, **kwargs):
for key, value in mapping:
self.set(key, value)
- for key, value in iteritems(kwargs):
+ for key, value in kwargs.items():
if isinstance(value, (list, tuple)):
self.setlist(key, value)
else:
@@ -1358,8 +1310,6 @@ def to_wsgi_list(self):
:return: list
"""
- if PY2:
- return [(to_native(k), v.encode("latin1")) for k, v in self]
return list(self)
def copy(self):
@@ -1455,7 +1405,7 @@ def __eq__(self, other):
def __getitem__(self, key, _get_mode=False):
# _get_mode is a no-op for this class as there is no index but
# used because get() calls it.
- if not isinstance(key, string_types):
+ if not isinstance(key, str):
raise KeyError(key)
key = key.upper().replace("-", "_")
if key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
@@ -1468,7 +1418,7 @@ def __len__(self):
return len(list(iter(self)))
def __iter__(self):
- for key, value in iteritems(self.environ):
+ for key, value in iter(self.environ.items()):
if key.startswith("HTTP_") and key not in (
"HTTP_CONTENT_TYPE",
"HTTP_CONTENT_LENGTH",
@@ -1484,7 +1434,6 @@ def copy(self):
raise TypeError("cannot create %r copies" % self.__class__.__name__)
-@native_itermethods(["keys", "values", "items", "lists", "listvalues"])
class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict):
"""A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict`
instances as sequence and it will combine the return values of all wrapped
@@ -1550,7 +1499,7 @@ def _keys_impl(self):
"""
rv = set()
for d in self.dicts:
- rv.update(iterkeys(d))
+ rv.update(iter(d.keys()))
return rv
def keys(self):
@@ -1561,7 +1510,7 @@ def keys(self):
def items(self, multi=False):
found = set()
for d in self.dicts:
- for key, value in iteritems(d, multi):
+ for key, value in iter(d.items(multi)):
if multi:
yield key, value
elif key not in found:
@@ -1569,15 +1518,15 @@ def items(self, multi=False):
yield key, value
def values(self):
- for _key, value in iteritems(self):
+ for _key, value in iter(self.items()):
yield value
def lists(self):
rv = {}
for d in self.dicts:
- for key, values in iterlists(d):
+ for key, values in iter(d.lists()):
rv.setdefault(key, []).extend(values)
- return iteritems(rv)
+ return iter(rv.items())
def listvalues(self):
return (x[1] for x in self.lists())
@@ -1644,7 +1593,7 @@ def add_file(self, name, file, filename=None, content_type=None):
if isinstance(file, FileStorage):
value = file
else:
- if isinstance(file, string_types):
+ if isinstance(file, str):
if filename is None:
filename = file
file = open(file, "rb")
@@ -1701,7 +1650,7 @@ class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict):
"""
def _iter_hashitems(self):
- return enumerate(iteritems(self, multi=True))
+ return enumerate(iter(self.items(multi=True)))
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
@@ -1714,7 +1663,6 @@ def __copy__(self):
return self
-@native_itermethods(["values"])
class Accept(ImmutableList):
"""An :class:`Accept` object is just a list subclass for lists of
``(value, quality)`` tuples. It is automatically sorted by specificity
@@ -1778,7 +1726,7 @@ def __getitem__(self, key):
to get the quality for the item. If the item is not in the list, the
returned quality is ``0``.
"""
- if isinstance(key, string_types):
+ if isinstance(key, str):
return self.quality(key)
return list.__getitem__(self, key)
@@ -1815,7 +1763,7 @@ def index(self, key):
This used to raise :exc:`IndexError`, which was inconsistent
with the list API.
"""
- if isinstance(key, string_types):
+ if isinstance(key, str):
for idx, (item, _quality) in enumerate(self):
if self._value_matches(key, item):
return idx
@@ -2289,7 +2237,7 @@ def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, dict.__repr__(self))
-class HeaderSet(collections_abc.MutableSet):
+class HeaderSet(MutableSet):
"""Similar to the :class:`ETags` class this implements a set-like structure.
Unlike :class:`ETags` this is case insensitive and used for vary, allow, and
content-language headers.
@@ -2439,7 +2387,7 @@ def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._headers)
-class ETags(collections_abc.Container, collections_abc.Iterable):
+class ETags(Container, Iterable):
"""A set that can be used to check if one etag is present in a collection
of etags.
"""
@@ -2847,7 +2795,7 @@ def to_header(self):
value, allow_token=key not in self._require_quoting
),
)
- for key, value in iteritems(d)
+ for key, value in iter(d.items())
]
),
)
@@ -2983,14 +2931,14 @@ def __init__(
# special filenames with angular brackets.
if filename is None:
filename = getattr(stream, "name", None)
- s = make_literal_wrapper(filename)
+ s = _make_literal_wrapper(filename)
if filename and filename[0] == s("<") and filename[-1] == s(">"):
filename = None
- # On Python 3 we want to make sure the filename is always unicode.
+ # We want to make sure the filename is always unicode.
# This might not be if the name attribute is bytes due to the
# file being opened from the bytes API.
- if not PY2 and isinstance(filename, bytes):
+ if isinstance(filename, bytes):
filename = filename.decode(get_filesystem_encoding(), "replace")
self.filename = filename
@@ -3060,7 +3008,7 @@ def save(self, dst, buffer_size=16384):
close_dst = False
dst = fspath(dst)
- if isinstance(dst, string_types):
+ if isinstance(dst, str):
dst = open(dst, "wb")
close_dst = True
diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py
index f9f6e8531..575fa5b00 100644
--- a/src/werkzeug/debug/__init__.py
+++ b/src/werkzeug/debug/__init__.py
@@ -22,7 +22,6 @@
from os.path import basename
from os.path import join
-from .._compat import text_type
from .._internal import _log
from ..http import parse_cookie
from ..security import gen_salt
@@ -37,7 +36,7 @@
def hash_pin(pin):
- if isinstance(pin, text_type):
+ if isinstance(pin, str):
pin = pin.encode("utf-8", "replace")
return hashlib.md5(pin + b"shittysalt").hexdigest()[:12]
@@ -188,7 +187,7 @@ def get_pin_and_cookie_name(app):
for bit in chain(probably_public_bits, private_bits):
if not bit:
continue
- if isinstance(bit, text_type):
+ if isinstance(bit, str):
bit = bit.encode("utf-8")
h.update(bit)
h.update(b"cookiesalt")
diff --git a/src/werkzeug/debug/repr.py b/src/werkzeug/debug/repr.py
index d7a7285ca..cb245ceca 100644
--- a/src/werkzeug/debug/repr.py
+++ b/src/werkzeug/debug/repr.py
@@ -19,11 +19,6 @@
from collections import deque
from traceback import format_exception_only
-from .._compat import integer_types
-from .._compat import iteritems
-from .._compat import PY2
-from .._compat import string_types
-from .._compat import text_type
from ..utils import escape
@@ -143,10 +138,7 @@ def proxy(self, obj, recursive):
def regex_repr(self, obj):
pattern = repr(obj.pattern)
- if PY2:
- pattern = pattern.decode("string-escape", "ignore")
- else:
- pattern = codecs.decode(pattern, "unicode-escape", "ignore")
+ pattern = codecs.decode(pattern, "unicode-escape", "ignore")
if pattern[:1] == "u":
pattern = "ur" + pattern[1:]
else:
@@ -175,7 +167,7 @@ def string_repr(self, obj, limit=70):
# if the repr looks like a standard string, add subclass info if needed
if r[0] in "'\"" or (r[0] in "ub" and r[1] in "'\""):
- return _add_subclass_info(out, obj, (bytes, text_type))
+ return _add_subclass_info(out, obj, (bytes, str))
# otherwise, assume the repr distinguishes the subclass already
return out
@@ -185,7 +177,7 @@ def dict_repr(self, d, recursive, limit=5):
return _add_subclass_info(u"{...}", d, dict)
buf = ["{"]
have_extended_section = False
- for idx, (key, value) in enumerate(iteritems(d)):
+ for idx, (key, value) in enumerate(iter(d.items())):
if idx:
buf.append(", ")
if idx == limit - 1:
@@ -203,16 +195,14 @@ def dict_repr(self, d, recursive, limit=5):
def object_repr(self, obj):
r = repr(obj)
- if PY2:
- r = r.decode("utf-8", "replace")
return u'%s' % escape(r)
def dispatch_repr(self, obj, recursive):
if obj is helper:
return u'%r' % helper
- if isinstance(obj, (integer_types, float, complex)):
+ if isinstance(obj, (int, float, complex)):
return u'%r' % obj
- if isinstance(obj, string_types) or isinstance(obj, bytes):
+ if isinstance(obj, str) or isinstance(obj, bytes):
return self.string_repr(obj)
if isinstance(obj, RegexType):
return self.regex_repr(obj)
@@ -235,8 +225,6 @@ def fallback_repr(self):
info = "".join(format_exception_only(*sys.exc_info()[:2]))
except Exception: # pragma: no cover
info = "?"
- if PY2:
- info = info.decode("utf-8", "ignore")
return u'<broken repr (%s)>' u"" % escape(
info.strip()
)
@@ -261,8 +249,8 @@ def dump_object(self, obj):
if isinstance(obj, dict):
title = "Contents of"
items = []
- for key, value in iteritems(obj):
- if not isinstance(key, string_types):
+ for key, value in obj.items():
+ if not isinstance(key, str):
items = None
break
items.append((key, self.repr(value)))
diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py
index b34403dce..da4f68c15 100644
--- a/src/werkzeug/debug/tbtools.py
+++ b/src/werkzeug/debug/tbtools.py
@@ -18,13 +18,9 @@
import traceback
from tokenize import TokenError
-from .._compat import PY2
-from .._compat import range_type
-from .._compat import reraise
-from .._compat import string_types
-from .._compat import text_type
-from .._compat import to_native
-from .._compat import to_unicode
+from .._internal import _reraise
+from .._internal import _to_native
+from .._internal import _to_unicode
from ..filesystem import get_filesystem_encoding
from ..utils import cached_property
from ..utils import escape
@@ -192,8 +188,8 @@ def get_current_traceback(
"""
exc_type, exc_value, tb = sys.exc_info()
if ignore_system_exceptions and exc_type in system_exceptions:
- reraise(exc_type, exc_value, tb)
- for _ in range_type(skip):
+ _reraise(exc_type, exc_value, tb)
+ for _ in range(skip):
if tb.tb_next is None:
break
tb = tb.tb_next
@@ -249,8 +245,6 @@ def __init__(self, exc_type, exc_value, tb):
while True:
self.groups.append(Group(exc_type, exc_value, tb))
memo.add(id(exc_value))
- if PY2:
- break
exc_value = exc_value.__cause__ or exc_value.__context__
if exc_value is None or id(exc_value) in memo:
break
@@ -281,7 +275,7 @@ def log(self, logfile=None):
if logfile is None:
logfile = sys.stderr
tb = self.plaintext.rstrip() + u"\n"
- logfile.write(to_native(tb, "utf-8", "replace"))
+ logfile.write(_to_native(tb, "utf-8", "replace"))
def paste(self):
"""Create a paste and return the paste id."""
@@ -367,17 +361,16 @@ def __init__(self, exc_type, exc_value, tb):
self.exc_type = exc_type
self.exc_value = exc_value
self.info = None
- if not PY2:
- if exc_value.__cause__ is not None:
- self.info = (
- u"The above exception was the direct cause of the"
- u" following exception"
- )
- elif exc_value.__context__ is not None:
- self.info = (
- u"During handling of the above exception, another"
- u" exception occurred"
- )
+ if exc_value.__cause__ is not None:
+ self.info = (
+ u"The above exception was the direct cause of the"
+ u" following exception"
+ )
+ elif exc_value.__context__ is not None:
+ self.info = (
+ u"During handling of the above exception, another"
+ u" exception occurred"
+ )
self.frames = []
while tb is not None:
@@ -421,7 +414,7 @@ def exception(self):
"""String representation of the exception."""
buf = traceback.format_exception_only(self.exc_type, self.exc_value)
rv = "".join(buf).strip()
- return to_unicode(rv, "utf-8", "replace")
+ return _to_unicode(rv, "utf-8", "replace")
def render(self, mark_lib=True):
out = []
@@ -463,7 +456,7 @@ def __init__(self, exc_type, exc_value, tb):
# if it's a file on the file system resolve the real filename.
if os.path.isfile(fn):
fn = os.path.realpath(fn)
- self.filename = to_unicode(fn, get_filesystem_encoding())
+ self.filename = _to_unicode(fn, get_filesystem_encoding())
self.module = self.globals.get("__name__", self.locals.get("__name__"))
self.loader = self.globals.get("__loader__", self.locals.get("__loader__"))
self.code = tb.tb_frame.f_code
@@ -472,7 +465,7 @@ def __init__(self, exc_type, exc_value, tb):
self.hide = self.locals.get("__traceback_hide__", False)
info = self.locals.get("__traceback_info__")
if info is not None:
- info = to_unicode(info, "utf-8", "replace")
+ info = _to_unicode(info, "utf-8", "replace")
self.info = info
def render(self, mark_lib=True):
@@ -549,9 +542,7 @@ def get_annotated_lines(self):
def eval(self, code, mode="single"):
"""Evaluate code in the context of the frame."""
- if isinstance(code, string_types):
- if PY2 and isinstance(code, text_type): # noqa
- code = UTF8_COOKIE + code.encode("utf-8")
+ if isinstance(code, str):
code = compile(code, "", mode)
return eval(code, self.globals, self.locals)
@@ -574,14 +565,14 @@ def sourcelines(self):
if source is None:
try:
with open(
- to_native(self.filename, get_filesystem_encoding()), mode="rb"
+ _to_native(self.filename, get_filesystem_encoding()), mode="rb"
) as f:
source = f.read()
except IOError:
return []
# already unicode? return right away
- if isinstance(source, text_type):
+ if isinstance(source, str):
return source.splitlines()
# yes. it should be ascii, but we don't want to reject too many
@@ -599,7 +590,7 @@ def sourcelines(self):
break
# on broken cookies we fall back to utf-8 too
- charset = to_native(charset)
+ charset = _to_native(charset)
try:
codecs.lookup(charset)
except LookupError:
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index 82e99c2e5..2215e07c9 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -60,15 +60,10 @@ def application(environ, start_response):
import sys
from datetime import datetime
-from ._compat import implements_to_string
-from ._compat import integer_types
-from ._compat import iteritems
-from ._compat import text_type
from ._internal import _get_environ
from .utils import escape
-@implements_to_string
class HTTPException(Exception):
"""Baseclass for all HTTP exceptions. This exception can be called as WSGI
application to render a default error page or you can catch the subclasses
@@ -146,19 +141,16 @@ def get_description(self, environ=None):
def get_body(self, environ=None):
"""Get the HTML body."""
- return text_type(
- (
- u'\n'
- u"%(code)s %(name)s\n"
- u"