--- /dev/null
- distributions (eg. not dev, bet or alpha) would be prefered or not.
+"""Classes representing releases and distributions retrieved from indexes.
+
+A project (= unique name) can have several releases (= versions) and
+each release can have several distributions (= sdist and bdists).
+
+Release objects contain metadata-related information (see PEP 376);
+distribution objects contain download-related information.
+"""
+
+import re
+import hashlib
+import tempfile
+import urllib.request
+import urllib.parse
+import urllib.error
+import urllib.parse
+from shutil import unpack_archive
+
+from packaging.errors import IrrationalVersionError
+from packaging.version import (suggest_normalized_version, NormalizedVersion,
+ get_version_predicate)
+from packaging.metadata import Metadata
+from packaging.pypi.errors import (HashDoesNotMatch, UnsupportedHashName,
+ CantParseArchiveName)
+
+
+__all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url']
+
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz .egg".split()
+MD5_HASH = re.compile(r'^.*#md5=([a-f0-9]+)$')
+DIST_TYPES = ['bdist', 'sdist']
+
+
+class IndexReference:
+ """Mixin used to store the index reference"""
+ def set_index(self, index=None):
+ self._index = index
+
+
+class ReleaseInfo(IndexReference):
+ """Represent a release of a project (a project with a specific version).
+ The release contain the _metadata informations related to this specific
+ version, and is also a container for distribution related informations.
+
+ See the DistInfo class for more information about distributions.
+ """
+
+ def __init__(self, name, version, metadata=None, hidden=False,
+ index=None, **kwargs):
+ """
+ :param name: the name of the distribution
+ :param version: the version of the distribution
+ :param metadata: the metadata fields of the release.
+ :type metadata: dict
+ :param kwargs: optional arguments for a new distribution.
+ """
+ self.set_index(index)
+ self.name = name
+ self._version = None
+ self.version = version
+ if metadata:
+ self.metadata = Metadata(mapping=metadata)
+ else:
+ self.metadata = None
+ self.dists = {}
+ self.hidden = hidden
+
+ if 'dist_type' in kwargs:
+ dist_type = kwargs.pop('dist_type')
+ self.add_distribution(dist_type, **kwargs)
+
+ def set_version(self, version):
+ try:
+ self._version = NormalizedVersion(version)
+ except IrrationalVersionError:
+ suggestion = suggest_normalized_version(version)
+ if suggestion:
+ self.version = suggestion
+ else:
+ raise IrrationalVersionError(version)
+
+ def get_version(self):
+ return self._version
+
+ version = property(get_version, set_version)
+
+ def fetch_metadata(self):
+ """If the metadata is not set, use the indexes to get it"""
+ if not self.metadata:
+ self._index.get_metadata(self.name, str(self.version))
+ return self.metadata
+
+ @property
+ def is_final(self):
+ """proxy to version.is_final"""
+ return self.version.is_final
+
+ def fetch_distributions(self):
+ if self.dists is None:
+ self._index.get_distributions(self.name, str(self.version))
+ if self.dists is None:
+ self.dists = {}
+ return self.dists
+
+ def add_distribution(self, dist_type='sdist', python_version=None,
+ **params):
+ """Add distribution informations to this release.
+ If distribution information is already set for this distribution type,
+ add the given url paths to the distribution. This can be useful while
+ some of them fails to download.
+
+ :param dist_type: the distribution type (eg. "sdist", "bdist", etc.)
+ :param params: the fields to be passed to the distribution object
+ (see the :class:DistInfo constructor).
+ """
+ if dist_type not in DIST_TYPES:
+ raise ValueError(dist_type)
+ if dist_type in self.dists:
+ self.dists[dist_type].add_url(**params)
+ else:
+ self.dists[dist_type] = DistInfo(self, dist_type,
+ index=self._index, **params)
+ if python_version:
+ self.dists[dist_type].python_version = python_version
+
+ def get_distribution(self, dist_type=None, prefer_source=True):
+ """Return a distribution.
+
+ If dist_type is set, find first for this distribution type, and just
+ act as an alias of __get_item__.
+
+ If prefer_source is True, search first for source distribution, and if
+ not return one existing distribution.
+ """
+ if len(self.dists) == 0:
+ raise LookupError
+ if dist_type:
+ return self[dist_type]
+ if prefer_source:
+ if "sdist" in self.dists:
+ dist = self["sdist"]
+ else:
+ dist = next(self.dists.values())
+ return dist
+
+ def unpack(self, path=None, prefer_source=True):
+ """Unpack the distribution to the given path.
+
+ If not destination is given, creates a temporary location.
+
+ Returns the location of the extracted files (root).
+ """
+ return self.get_distribution(prefer_source=prefer_source)\
+ .unpack(path=path)
+
+ def download(self, temp_path=None, prefer_source=True):
+ """Download the distribution, using the requirements.
+
+ If more than one distribution match the requirements, use the last
+ version.
+ Download the distribution, and put it in the temp_path. If no temp_path
+ is given, creates and return one.
+
+ Returns the complete absolute path to the downloaded archive.
+ """
+ return self.get_distribution(prefer_source=prefer_source)\
+ .download(path=temp_path)
+
+ def set_metadata(self, metadata):
+ if not self.metadata:
+ self.metadata = Metadata()
+ self.metadata.update(metadata)
+
+ def __getitem__(self, item):
+ """distributions are available using release["sdist"]"""
+ return self.dists[item]
+
+ def _check_is_comparable(self, other):
+ if not isinstance(other, ReleaseInfo):
+ raise TypeError("cannot compare %s and %s"
+ % (type(self).__name__, type(other).__name__))
+ elif self.name != other.name:
+ raise TypeError("cannot compare %s and %s"
+ % (self.name, other.name))
+
+ def __repr__(self):
+ return "<%s %s>" % (self.name, self.version)
+
+ def __eq__(self, other):
+ self._check_is_comparable(other)
+ return self.version == other.version
+
+ def __lt__(self, other):
+ self._check_is_comparable(other)
+ return self.version < other.version
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __gt__(self, other):
+ return not (self.__lt__(other) or self.__eq__(other))
+
+ def __le__(self, other):
+ return self.__eq__(other) or self.__lt__(other)
+
+ def __ge__(self, other):
+ return self.__eq__(other) or self.__gt__(other)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ __hash__ = object.__hash__
+
+
+class DistInfo(IndexReference):
+ """Represents a distribution retrieved from an index (sdist, bdist, ...)
+ """
+
+ def __init__(self, release, dist_type=None, url=None, hashname=None,
+ hashval=None, is_external=True, python_version=None,
+ index=None):
+ """Create a new instance of DistInfo.
+
+ :param release: a DistInfo class is relative to a release.
+ :param dist_type: the type of the dist (eg. source, bin-*, etc.)
+ :param url: URL where we found this distribution
+ :param hashname: the name of the hash we want to use. Refer to the
+ hashlib.new documentation for more information.
+ :param hashval: the hash value.
+ :param is_external: we need to know if the provided url comes from
+ an index browsing, or from an external resource.
+
+ """
+ self.set_index(index)
+ self.release = release
+ self.dist_type = dist_type
+ self.python_version = python_version
+ self._unpacked_dir = None
+ # set the downloaded path to None by default. The goal here
+ # is to not download distributions multiple times
+ self.downloaded_location = None
+ # We store urls in dict, because we need to have a bit more infos
+ # than the simple URL. It will be used later to find the good url to
+ # use.
+ # We have two _url* attributes: _url and urls. urls contains a list
+ # of dict for the different urls, and _url contains the choosen url, in
+ # order to dont make the selection process multiple times.
+ self.urls = []
+ self._url = None
+ self.add_url(url, hashname, hashval, is_external)
+
+ def add_url(self, url=None, hashname=None, hashval=None, is_external=True):
+ """Add a new url to the list of urls"""
+ if hashname is not None:
+ try:
+ hashlib.new(hashname)
+ except ValueError:
+ raise UnsupportedHashName(hashname)
+ if url not in [u['url'] for u in self.urls]:
+ self.urls.append({
+ 'url': url,
+ 'hashname': hashname,
+ 'hashval': hashval,
+ 'is_external': is_external,
+ })
+ # reset the url selection process
+ self._url = None
+
+ @property
+ def url(self):
+ """Pick up the right url for the list of urls in self.urls"""
+ # We return internal urls over externals.
+ # If there is more than one internal or external, return the first
+ # one.
+ if self._url is None:
+ if len(self.urls) > 1:
+ internals_urls = [u for u in self.urls \
+ if u['is_external'] == False]
+ if len(internals_urls) >= 1:
+ self._url = internals_urls[0]
+ if self._url is None:
+ self._url = self.urls[0]
+ return self._url
+
+ @property
+ def is_source(self):
+ """return if the distribution is a source one or not"""
+ return self.dist_type == 'sdist'
+
+ def download(self, path=None):
+ """Download the distribution to a path, and return it.
+
+ If the path is given in path, use this, otherwise, generates a new one
+ Return the download location.
+ """
+ if path is None:
+ path = tempfile.mkdtemp()
+
+ # if we do not have downloaded it yet, do it.
+ if self.downloaded_location is None:
+ url = self.url['url']
+ archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
+ filename, headers = urllib.request.urlretrieve(url,
+ path + "/" + archive_name)
+ self.downloaded_location = filename
+ self._check_md5(filename)
+ return self.downloaded_location
+
+ def unpack(self, path=None):
+ """Unpack the distribution to the given path.
+
+ If not destination is given, creates a temporary location.
+
+ Returns the location of the extracted files (root).
+ """
+ if not self._unpacked_dir:
+ if path is None:
+ path = tempfile.mkdtemp()
+
+ filename = self.download(path)
+ unpack_archive(filename, path)
+ self._unpacked_dir = path
+
+ return path
+
+ def _check_md5(self, filename):
+ """Check that the md5 checksum of the given file matches the one in
+ url param"""
+ hashname = self.url['hashname']
+ expected_hashval = self.url['hashval']
+ if None not in (expected_hashval, hashname):
+ with open(filename, 'rb') as f:
+ hashval = hashlib.new(hashname)
+ hashval.update(f.read())
+
+ if hashval.hexdigest() != expected_hashval:
+ raise HashDoesNotMatch("got %s instead of %s"
+ % (hashval.hexdigest(), expected_hashval))
+
+ def __repr__(self):
+ if self.release is None:
+ return "<? ? %s>" % self.dist_type
+
+ return "<%s %s %s>" % (
+ self.release.name, self.release.version, self.dist_type or "")
+
+
+class ReleasesList(IndexReference):
+ """A container of Release.
+
+ Provides useful methods and facilities to sort and filter releases.
+ """
+ def __init__(self, name, releases=None, contains_hidden=False, index=None):
+ self.set_index(index)
+ self.releases = []
+ self.name = name
+ self.contains_hidden = contains_hidden
+ if releases:
+ self.add_releases(releases)
+
+ def fetch_releases(self):
+ self._index.get_releases(self.name)
+ return self.releases
+
+ def filter(self, predicate):
+ """Filter and return a subset of releases matching the given predicate.
+ """
+ return ReleasesList(self.name, [release for release in self.releases
+ if predicate.match(release.version)],
+ index=self._index)
+
+ def get_last(self, requirements, prefer_final=None):
+ """Return the "last" release, that satisfy the given predicates.
+
+ "last" is defined by the version number of the releases, you also could
+ set prefer_final parameter to True or False to change the order results
+ """
+ predicate = get_version_predicate(requirements)
+ releases = self.filter(predicate)
+ if len(releases) == 0:
+ return None
+ releases.sort_releases(prefer_final, reverse=True)
+ return releases[0]
+
+ def add_releases(self, releases):
+ """Add releases in the release list.
+
+ :param: releases is a list of ReleaseInfo objects.
+ """
+ for r in releases:
+ self.add_release(release=r)
+
+ def add_release(self, version=None, dist_type='sdist', release=None,
+ **dist_args):
+ """Add a release to the list.
+
+ The release can be passed in the `release` parameter, and in this case,
+ it will be crawled to extract the useful informations if necessary, or
+ the release informations can be directly passed in the `version` and
+ `dist_type` arguments.
+
+ Other keywords arguments can be provided, and will be forwarded to the
+ distribution creation (eg. the arguments of the DistInfo constructor).
+ """
+ if release:
+ if release.name.lower() != self.name.lower():
+ raise ValueError("%s is not the same project as %s" %
+ (release.name, self.name))
+ version = str(release.version)
+
+ if version not in self.get_versions():
+ # append only if not already exists
+ self.releases.append(release)
+ for dist in release.dists.values():
+ for url in dist.urls:
+ self.add_release(version, dist.dist_type, **url)
+ else:
+ matches = [r for r in self.releases
+ if str(r.version) == version and r.name == self.name]
+ if not matches:
+ release = ReleaseInfo(self.name, version, index=self._index)
+ self.releases.append(release)
+ else:
+ release = matches[0]
+
+ release.add_distribution(dist_type=dist_type, **dist_args)
+
+ def sort_releases(self, prefer_final=False, reverse=True, *args, **kwargs):
+ """Sort the results with the given properties.
+
+ The `prefer_final` argument can be used to specify if final
++ distributions (eg. not dev, beta or alpha) would be preferred or not.
+
+ Results can be inverted by using `reverse`.
+
+ Any other parameter provided will be forwarded to the sorted call. You
+ cannot redefine the key argument of "sorted" here, as it is used
+ internally to sort the releases.
+ """
+
+ sort_by = []
+ if prefer_final:
+ sort_by.append("is_final")
+ sort_by.append("version")
+
+ self.releases.sort(
+ key=lambda i: tuple(getattr(i, arg) for arg in sort_by),
+ reverse=reverse, *args, **kwargs)
+
+ def get_release(self, version):
+ """Return a release from its version."""
+ matches = [r for r in self.releases if str(r.version) == version]
+ if len(matches) != 1:
+ raise KeyError(version)
+ return matches[0]
+
+ def get_versions(self):
+ """Return a list of releases versions contained"""
+ return [str(r.version) for r in self.releases]
+
+ def __getitem__(self, key):
+ return self.releases[key]
+
+ def __len__(self):
+ return len(self.releases)
+
+ def __repr__(self):
+ string = 'Project "%s"' % self.name
+ if self.get_versions():
+ string += ' versions: %s' % ', '.join(self.get_versions())
+ return '<%s>' % string
+
+
+def get_infos_from_url(url, probable_dist_name=None, is_external=True):
+ """Get useful informations from an URL.
+
+ Return a dict of (name, version, url, hashtype, hash, is_external)
+
+ :param url: complete url of the distribution
+ :param probable_dist_name: A probable name of the project.
+ :param is_external: Tell if the url commes from an index or from
+ an external URL.
+ """
+ # if the url contains a md5 hash, get it.
+ md5_hash = None
+ match = MD5_HASH.match(url)
+ if match is not None:
+ md5_hash = match.group(1)
+ # remove the hash
+ url = url.replace("#md5=%s" % md5_hash, "")
+
+ # parse the archive name to find dist name and version
+ archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
+ extension_matched = False
+ # remove the extension from the name
+ for ext in EXTENSIONS:
+ if archive_name.endswith(ext):
+ archive_name = archive_name[:-len(ext)]
+ extension_matched = True
+
+ name, version = split_archive_name(archive_name)
+ if extension_matched is True:
+ return {'name': name,
+ 'version': version,
+ 'url': url,
+ 'hashname': "md5",
+ 'hashval': md5_hash,
+ 'is_external': is_external,
+ 'dist_type': 'sdist'}
+
+
+def split_archive_name(archive_name, probable_name=None):
+ """Split an archive name into two parts: name and version.
+
+ Return the tuple (name, version)
+ """
+ # Try to determine wich part is the name and wich is the version using the
+ # "-" separator. Take the larger part to be the version number then reduce
+ # if this not works.
+ def eager_split(str, maxsplit=2):
+ # split using the "-" separator
+ splits = str.rsplit("-", maxsplit)
+ name = splits[0]
+ version = "-".join(splits[1:])
+ if version.startswith("-"):
+ version = version[1:]
+ if suggest_normalized_version(version) is None and maxsplit >= 0:
+ # we dont get a good version number: recurse !
+ return eager_split(str, maxsplit - 1)
+ else:
+ return name, version
+ if probable_name is not None:
+ probable_name = probable_name.lower()
+ name = None
+ if probable_name is not None and probable_name in archive_name:
+ # we get the name from probable_name, if given.
+ name = probable_name
+ version = archive_name.lstrip(name)
+ else:
+ name, version = eager_split(archive_name)
+
+ version = suggest_normalized_version(version)
+ if version is not None and name != "":
+ return name.lower(), version
+ else:
+ raise CantParseArchiveName(archive_name)
--- /dev/null
- Both a release or a dict of release_info can be provided, the prefered
+"""Spider using the screen-scraping "simple" PyPI API.
+
+This module contains the class Crawler, a simple spider that
+can be used to find and retrieve distributions from a project index
+(like the Python Package Index), using its so-called simple API (see
+reference implementation available at http://pypi.python.org/simple/).
+"""
+
+import http.client
+import re
+import socket
+import sys
+import urllib.request
+import urllib.parse
+import urllib.error
+import os
+
+from fnmatch import translate
+from functools import wraps
+from packaging import logger
+from packaging.metadata import Metadata
+from packaging.version import get_version_predicate
+from packaging import __version__ as packaging_version
+from packaging.pypi.base import BaseClient
+from packaging.pypi.dist import (ReleasesList, EXTENSIONS,
+ get_infos_from_url, MD5_HASH)
+from packaging.pypi.errors import (PackagingPyPIError, DownloadError,
+ UnableToDownload, CantParseArchiveName,
+ ReleaseNotFound, ProjectNotFound)
+from packaging.pypi.mirrors import get_mirrors
+
+__all__ = ['Crawler', 'DEFAULT_SIMPLE_INDEX_URL']
+
+# -- Constants -----------------------------------------------
+DEFAULT_SIMPLE_INDEX_URL = "http://a.pypi.python.org/simple/"
+DEFAULT_HOSTS = ("*",)
+SOCKET_TIMEOUT = 15
+USER_AGENT = "Python-urllib/%s packaging/%s" % (
+ sys.version[:3], packaging_version)
+
+# -- Regexps -------------------------------------------------
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
+HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
+
+# This pattern matches a character entity reference (a decimal numeric
+# references, a hexadecimal numeric reference, or a named reference).
+ENTITY_SUB = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
+REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
+
+
+def socket_timeout(timeout=SOCKET_TIMEOUT):
+ """Decorator to add a socket timeout when requesting pages on PyPI.
+ """
+ def wrapper(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ old_timeout = socket.getdefaulttimeout()
+ if hasattr(self, "_timeout"):
+ timeout = self._timeout
+ socket.setdefaulttimeout(timeout)
+ try:
+ return func(self, *args, **kwargs)
+ finally:
+ socket.setdefaulttimeout(old_timeout)
+ return wrapped
+ return wrapper
+
+
+def with_mirror_support():
+ """Decorator that makes the mirroring support easier"""
+ def wrapper(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ try:
+ return func(self, *args, **kwargs)
+ except DownloadError:
+ # if an error occurs, try with the next index_url
+ if self._mirrors_tries >= self._mirrors_max_tries:
+ try:
+ self._switch_to_next_mirror()
+ except KeyError:
+ raise UnableToDownload("Tried all mirrors")
+ else:
+ self._mirrors_tries += 1
+ self._projects.clear()
+ return wrapped(self, *args, **kwargs)
+ return wrapped
+ return wrapper
+
+
+class Crawler(BaseClient):
+ """Provides useful tools to request the Python Package Index simple API.
+
+ You can specify both mirrors and mirrors_url, but mirrors_url will only be
+ used if mirrors is set to None.
+
+ :param index_url: the url of the simple index to search on.
+ :param prefer_final: if the version is not mentioned, and the last
+ version is not a "final" one (alpha, beta, etc.),
+ pick up the last final version.
+ :param prefer_source: if the distribution type is not mentioned, pick up
+ the source one if available.
+ :param follow_externals: tell if following external links is needed or
+ not. Default is False.
+ :param hosts: a list of hosts allowed to be processed while using
+ follow_externals=True. Default behavior is to follow all
+ hosts.
+ :param follow_externals: tell if following external links is needed or
+ not. Default is False.
+ :param mirrors_url: the url to look on for DNS records giving mirror
+ addresses.
+ :param mirrors: a list of mirrors (see PEP 381).
+ :param timeout: time in seconds to consider a url has timeouted.
+ :param mirrors_max_tries": number of times to try requesting informations
+ on mirrors before switching.
+ """
+
+ def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False,
+ prefer_source=True, hosts=DEFAULT_HOSTS,
+ follow_externals=False, mirrors_url=None, mirrors=None,
+ timeout=SOCKET_TIMEOUT, mirrors_max_tries=0):
+ super(Crawler, self).__init__(prefer_final, prefer_source)
+ self.follow_externals = follow_externals
+
+ # mirroring attributes.
+ parsed = urllib.parse.urlparse(index_url)
+ self.scheme = parsed[0]
+ if self.scheme == 'file':
+ ender = os.path.sep
+ else:
+ ender = '/'
+ if not index_url.endswith(ender):
+ index_url += ender
+ # if no mirrors are defined, use the method described in PEP 381.
+ if mirrors is None:
+ mirrors = get_mirrors(mirrors_url)
+ self._mirrors = set(mirrors)
+ self._mirrors_used = set()
+ self.index_url = index_url
+ self._mirrors_max_tries = mirrors_max_tries
+ self._mirrors_tries = 0
+ self._timeout = timeout
+
+ # create a regexp to match all given hosts
+ self._allowed_hosts = re.compile('|'.join(map(translate, hosts))).match
+
+ # we keep an index of pages we have processed, in order to avoid
+ # scanning them multple time (eg. if there is multiple pages pointing
+ # on one)
+ self._processed_urls = []
+ self._projects = {}
+
+ @with_mirror_support()
+ def search_projects(self, name=None, **kwargs):
+ """Search the index for projects containing the given name.
+
+ Return a list of names.
+ """
+ if '*' in name:
+ name.replace('*', '.*')
+ else:
+ name = "%s%s%s" % ('*.?', name, '*.?')
+ name = name.replace('*', '[^<]*') # avoid matching end tag
+ pattern = ('<a[^>]*>(%s)</a>' % name).encode('utf-8')
+ projectname = re.compile(pattern, re.I)
+ matching_projects = []
+
+ with self._open_url(self.index_url) as index:
+ index_content = index.read()
+
+ for match in projectname.finditer(index_content):
+ project_name = match.group(1).decode('utf-8')
+ matching_projects.append(self._get_project(project_name))
+ return matching_projects
+
+ def get_releases(self, requirements, prefer_final=None,
+ force_update=False):
+ """Search for releases and return a ReleasesList object containing
+ the results.
+ """
+ predicate = get_version_predicate(requirements)
+ if predicate.name.lower() in self._projects and not force_update:
+ return self._projects.get(predicate.name.lower())
+ prefer_final = self._get_prefer_final(prefer_final)
+ logger.debug('Reading info on PyPI about %s', predicate.name)
+ self._process_index_page(predicate.name)
+
+ if predicate.name.lower() not in self._projects:
+ raise ProjectNotFound
+
+ releases = self._projects.get(predicate.name.lower())
+ releases.sort_releases(prefer_final=prefer_final)
+ return releases
+
+ def get_release(self, requirements, prefer_final=None):
+ """Return only one release that fulfill the given requirements"""
+ predicate = get_version_predicate(requirements)
+ release = self.get_releases(predicate, prefer_final)\
+ .get_last(predicate)
+ if not release:
+ raise ReleaseNotFound("No release matches the given criterias")
+ return release
+
+ def get_distributions(self, project_name, version):
+ """Return the distributions found on the index for the specific given
+ release"""
+ # as the default behavior of get_release is to return a release
+ # containing the distributions, just alias it.
+ return self.get_release("%s (%s)" % (project_name, version))
+
+ def get_metadata(self, project_name, version):
+ """Return the metadatas from the simple index.
+
+ Currently, download one archive, extract it and use the PKG-INFO file.
+ """
+ release = self.get_distributions(project_name, version)
+ if not release.metadata:
+ location = release.get_distribution().unpack()
+ pkg_info = os.path.join(location, 'PKG-INFO')
+ release.metadata = Metadata(pkg_info)
+ return release
+
+ def _switch_to_next_mirror(self):
+ """Switch to the next mirror (eg. point self.index_url to the next
+ mirror url.
+
+ Raise a KeyError if all mirrors have been tried.
+ """
+ self._mirrors_used.add(self.index_url)
+ index_url = self._mirrors.pop()
+ # XXX use urllib.parse for a real check of missing scheme part
+ if not index_url.startswith(("http://", "https://", "file://")):
+ index_url = "http://%s" % index_url
+
+ if not index_url.endswith("/simple"):
+ index_url = "%s/simple/" % index_url
+
+ self.index_url = index_url
+
+ def _is_browsable(self, url):
+ """Tell if the given URL can be browsed or not.
+
+ It uses the follow_externals and the hosts list to tell if the given
+ url is browsable or not.
+ """
+ # if _index_url is contained in the given URL, we are browsing the
+ # index, and it's always "browsable".
+ # local files are always considered browable resources
+ if self.index_url in url or urllib.parse.urlparse(url)[0] == "file":
+ return True
+ elif self.follow_externals:
+ if self._allowed_hosts(urllib.parse.urlparse(url)[1]): # 1 is netloc
+ return True
+ else:
+ return False
+ return False
+
+ def _is_distribution(self, link):
+ """Tell if the given URL matches to a distribution name or not.
+ """
+ #XXX find a better way to check that links are distributions
+ # Using a regexp ?
+ for ext in EXTENSIONS:
+ if ext in link:
+ return True
+ return False
+
+ def _register_release(self, release=None, release_info={}):
+ """Register a new release.
+
++ Both a release or a dict of release_info can be provided, the preferred
+ way (eg. the quicker) is the dict one.
+
+ Return the list of existing releases for the given project.
+ """
+ # Check if the project already has a list of releases (refering to
+ # the project name). If not, create a new release list.
+ # Then, add the release to the list.
+ if release:
+ name = release.name
+ else:
+ name = release_info['name']
+ if name.lower() not in self._projects:
+ self._projects[name.lower()] = ReleasesList(name, index=self._index)
+
+ if release:
+ self._projects[name.lower()].add_release(release=release)
+ else:
+ name = release_info.pop('name')
+ version = release_info.pop('version')
+ dist_type = release_info.pop('dist_type')
+ self._projects[name.lower()].add_release(version, dist_type,
+ **release_info)
+ return self._projects[name.lower()]
+
+ def _process_url(self, url, project_name=None, follow_links=True):
+ """Process an url and search for distributions packages.
+
+ For each URL found, if it's a download, creates a PyPIdistribution
+ object. If it's a homepage and we can follow links, process it too.
+
+ :param url: the url to process
+ :param project_name: the project name we are searching for.
+ :param follow_links: Do not want to follow links more than from one
+ level. This parameter tells if we want to follow
+ the links we find (eg. run recursively this
+ method on it)
+ """
+ with self._open_url(url) as f:
+ base_url = f.url
+ if url not in self._processed_urls:
+ self._processed_urls.append(url)
+ link_matcher = self._get_link_matcher(url)
+ for link, is_download in link_matcher(f.read().decode(), base_url):
+ if link not in self._processed_urls:
+ if self._is_distribution(link) or is_download:
+ self._processed_urls.append(link)
+ # it's a distribution, so create a dist object
+ try:
+ infos = get_infos_from_url(link, project_name,
+ is_external=self.index_url not in url)
+ except CantParseArchiveName as e:
+ logger.warning(
+ "version has not been parsed: %s", e)
+ else:
+ self._register_release(release_info=infos)
+ else:
+ if self._is_browsable(link) and follow_links:
+ self._process_url(link, project_name,
+ follow_links=False)
+
+ def _get_link_matcher(self, url):
+ """Returns the right link matcher function of the given url
+ """
+ if self.index_url in url:
+ return self._simple_link_matcher
+ else:
+ return self._default_link_matcher
+
+ def _get_full_url(self, url, base_url):
+ return urllib.parse.urljoin(base_url, self._htmldecode(url))
+
+ def _simple_link_matcher(self, content, base_url):
+ """Yield all links with a rel="download" or rel="homepage".
+
+ This matches the simple index requirements for matching links.
+ If follow_externals is set to False, dont yeld the external
+ urls.
+
+ :param content: the content of the page we want to parse
+ :param base_url: the url of this page.
+ """
+ for match in HREF.finditer(content):
+ url = self._get_full_url(match.group(1), base_url)
+ if MD5_HASH.match(url):
+ yield (url, True)
+
+ for match in REL.finditer(content):
+ # search for rel links.
+ tag, rel = match.groups()
+ rels = [s.strip() for s in rel.lower().split(',')]
+ if 'homepage' in rels or 'download' in rels:
+ for match in HREF.finditer(tag):
+ url = self._get_full_url(match.group(1), base_url)
+ if 'download' in rels or self._is_browsable(url):
+ # yield a list of (url, is_download)
+ yield (url, 'download' in rels)
+
+ def _default_link_matcher(self, content, base_url):
+ """Yield all links found on the page.
+ """
+ for match in HREF.finditer(content):
+ url = self._get_full_url(match.group(1), base_url)
+ if self._is_browsable(url):
+ yield (url, False)
+
+ @with_mirror_support()
+ def _process_index_page(self, name):
+ """Find and process a PyPI page for the given project name.
+
+ :param name: the name of the project to find the page
+ """
+ # Browse and index the content of the given PyPI page.
+ if self.scheme == 'file':
+ ender = os.path.sep
+ else:
+ ender = '/'
+ url = self.index_url + name + ender
+ self._process_url(url, name)
+
+ @socket_timeout()
+ def _open_url(self, url):
+ """Open a urllib2 request, handling HTTP authentication, and local
+ files support.
+
+ """
+ scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
+
+ # authentication stuff
+ if scheme in ('http', 'https'):
+ auth, host = urllib.parse.splituser(netloc)
+ else:
+ auth = None
+
+ # add index.html automatically for filesystem paths
+ if scheme == 'file':
+ if url.endswith(os.path.sep):
+ url += "index.html"
+
+ # add authorization headers if auth is provided
+ if auth:
+ auth = "Basic " + \
+ urllib.parse.unquote(auth).encode('base64').strip()
+ new_url = urllib.parse.urlunparse((
+ scheme, host, path, params, query, frag))
+ request = urllib.request.Request(new_url)
+ request.add_header("Authorization", auth)
+ else:
+ request = urllib.request.Request(url)
+ request.add_header('User-Agent', USER_AGENT)
+ try:
+ fp = urllib.request.urlopen(request)
+ except (ValueError, http.client.InvalidURL) as v:
+ msg = ' '.join([str(arg) for arg in v.args])
+ raise PackagingPyPIError('%s %s' % (url, msg))
+ except urllib.error.HTTPError as v:
+ return v
+ except urllib.error.URLError as v:
+ raise DownloadError("Download error for %s: %s" % (url, v.reason))
+ except http.client.BadStatusLine as v:
+ raise DownloadError('%s returned a bad status line. '
+ 'The server might be down, %s' % (url, v.line))
+ except http.client.HTTPException as v:
+ raise DownloadError("Download error for %s: %s" % (url, v))
+ except socket.timeout:
+ raise DownloadError("The server timeouted")
+
+ if auth:
+ # Put authentication info back into request URL if same host,
+ # so that links found on the page will work
+ s2, h2, path2, param2, query2, frag2 = \
+ urllib.parse.urlparse(fp.url)
+ if s2 == scheme and h2 == host:
+ fp.url = urllib.parse.urlunparse(
+ (s2, netloc, path2, param2, query2, frag2))
+ return fp
+
+ def _decode_entity(self, match):
+ what = match.group(1)
+ if what.startswith('#x'):
+ what = int(what[2:], 16)
+ elif what.startswith('#'):
+ what = int(what[1:])
+ else:
+ from html.entities import name2codepoint
+ what = name2codepoint.get(what, match.group(0))
+ return chr(what)
+
+ def _htmldecode(self, text):
+ """Decode HTML entities in the given text."""
+ return ENTITY_SUB(self._decode_entity, text)
logging.raiseExceptions = old_raise_exceptions
- # level is not explicitely set
+class FakeHandler:
+
+ def __init__(self, identifier, called):
+ for method in ('acquire', 'flush', 'close', 'release'):
+ setattr(self, method, self.record_call(identifier, method, called))
+
+ def record_call(self, identifier, method_name, called):
+ def inner():
+ called.append('{} - {}'.format(identifier, method_name))
+ return inner
+
+
+class RecordingHandler(logging.NullHandler):
+
+ def __init__(self, *args, **kwargs):
+ super(RecordingHandler, self).__init__(*args, **kwargs)
+ self.records = []
+
+ def handle(self, record):
+ """Keep track of all the emitted records."""
+ self.records.append(record)
+
+
+class ShutdownTest(BaseTest):
+
+ """Test suite for the shutdown method."""
+
+ def setUp(self):
+ super(ShutdownTest, self).setUp()
+ self.called = []
+
+ raise_exceptions = logging.raiseExceptions
+ self.addCleanup(setattr, logging, 'raiseExceptions', raise_exceptions)
+
+ def raise_error(self, error):
+ def inner():
+ raise error()
+ return inner
+
+ def test_no_failure(self):
+ # create some fake handlers
+ handler0 = FakeHandler(0, self.called)
+ handler1 = FakeHandler(1, self.called)
+ handler2 = FakeHandler(2, self.called)
+
+ # create live weakref to those handlers
+ handlers = map(logging.weakref.ref, [handler0, handler1, handler2])
+
+ logging.shutdown(handlerList=list(handlers))
+
+ expected = ['2 - acquire', '2 - flush', '2 - close', '2 - release',
+ '1 - acquire', '1 - flush', '1 - close', '1 - release',
+ '0 - acquire', '0 - flush', '0 - close', '0 - release']
+ self.assertEqual(expected, self.called)
+
+ def _test_with_failure_in_method(self, method, error):
+ handler = FakeHandler(0, self.called)
+ setattr(handler, method, self.raise_error(error))
+ handlers = [logging.weakref.ref(handler)]
+
+ logging.shutdown(handlerList=list(handlers))
+
+ self.assertEqual('0 - release', self.called[-1])
+
+ def test_with_ioerror_in_acquire(self):
+ self._test_with_failure_in_method('acquire', IOError)
+
+ def test_with_ioerror_in_flush(self):
+ self._test_with_failure_in_method('flush', IOError)
+
+ def test_with_ioerror_in_close(self):
+ self._test_with_failure_in_method('close', IOError)
+
+ def test_with_valueerror_in_acquire(self):
+ self._test_with_failure_in_method('acquire', ValueError)
+
+ def test_with_valueerror_in_flush(self):
+ self._test_with_failure_in_method('flush', ValueError)
+
+ def test_with_valueerror_in_close(self):
+ self._test_with_failure_in_method('close', ValueError)
+
+ def test_with_other_error_in_acquire_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('acquire', IndexError)
+
+ def test_with_other_error_in_flush_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('flush', IndexError)
+
+ def test_with_other_error_in_close_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('close', IndexError)
+
+ def test_with_other_error_in_acquire_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'acquire', IndexError)
+
+ def test_with_other_error_in_flush_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'flush', IndexError)
+
+ def test_with_other_error_in_close_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'close', IndexError)
+
+
+class ModuleLevelMiscTest(BaseTest):
+
+ """Test suite for some module level methods."""
+
+ def test_disable(self):
+ old_disable = logging.root.manager.disable
+ # confirm our assumptions are correct
+ self.assertEqual(old_disable, 0)
+ self.addCleanup(logging.disable, old_disable)
+
+ logging.disable(83)
+ self.assertEqual(logging.root.manager.disable, 83)
+
+ def _test_log(self, method, level=None):
+ called = []
+ patch(self, logging, 'basicConfig',
+ lambda *a, **kw: called.append((a, kw)))
+
+ recording = RecordingHandler()
+ logging.root.addHandler(recording)
+
+ log_method = getattr(logging, method)
+ if level is not None:
+ log_method(level, "test me: %r", recording)
+ else:
+ log_method("test me: %r", recording)
+
+ self.assertEqual(len(recording.records), 1)
+ record = recording.records[0]
+ self.assertEqual(record.getMessage(), "test me: %r" % recording)
+
+ expected_level = level if level is not None else getattr(logging, method.upper())
+ self.assertEqual(record.levelno, expected_level)
+
+ # basicConfig was not called!
+ self.assertEqual(called, [])
+
+ def test_log(self):
+ self._test_log('log', logging.ERROR)
+
+ def test_debug(self):
+ self._test_log('debug')
+
+ def test_info(self):
+ self._test_log('info')
+
+ def test_warning(self):
+ self._test_log('warning')
+
+ def test_error(self):
+ self._test_log('error')
+
+ def test_critical(self):
+ self._test_log('critical')
+
+ def test_set_logger_class(self):
+ self.assertRaises(TypeError, logging.setLoggerClass, object)
+
+ class MyLogger(logging.Logger):
+ pass
+
+ logging.setLoggerClass(MyLogger)
+ self.assertEqual(logging.getLoggerClass(), MyLogger)
+
+ logging.setLoggerClass(logging.Logger)
+ self.assertEqual(logging.getLoggerClass(), logging.Logger)
+
+class LogRecordTest(BaseTest):
+ def test_str_rep(self):
+ r = logging.makeLogRecord({})
+ s = str(r)
+ self.assertTrue(s.startswith('<LogRecord: '))
+ self.assertTrue(s.endswith('>'))
+
+ def test_dict_arg(self):
+ h = RecordingHandler()
+ r = logging.getLogger()
+ r.addHandler(h)
+ d = {'less' : 'more' }
+ logging.warning('less is %(less)s', d)
+ self.assertIs(h.records[0].args, d)
+ self.assertEqual(h.records[0].message, 'less is more')
+ r.removeHandler(h)
+ h.close()
+
+ def test_multiprocessing(self):
+ r = logging.makeLogRecord({})
+ self.assertEqual(r.processName, 'MainProcess')
+ try:
+ import multiprocessing as mp
+ r = logging.makeLogRecord({})
+ self.assertEqual(r.processName, mp.current_process().name)
+ except ImportError:
+ pass
+
+ def test_optional(self):
+ r = logging.makeLogRecord({})
+ NOT_NONE = self.assertIsNotNone
+ if threading:
+ NOT_NONE(r.thread)
+ NOT_NONE(r.threadName)
+ NOT_NONE(r.process)
+ NOT_NONE(r.processName)
+ log_threads = logging.logThreads
+ log_processes = logging.logProcesses
+ log_multiprocessing = logging.logMultiprocessing
+ try:
+ logging.logThreads = False
+ logging.logProcesses = False
+ logging.logMultiprocessing = False
+ r = logging.makeLogRecord({})
+ NONE = self.assertIsNone
+ NONE(r.thread)
+ NONE(r.threadName)
+ NONE(r.process)
+ NONE(r.processName)
+ finally:
+ logging.logThreads = log_threads
+ logging.logProcesses = log_processes
+ logging.logMultiprocessing = log_multiprocessing
+
+class BasicConfigTest(unittest.TestCase):
+
+ """Test suite for logging.basicConfig."""
+
+ def setUp(self):
+ super(BasicConfigTest, self).setUp()
+ self.handlers = logging.root.handlers
+ self.saved_handlers = logging._handlers.copy()
+ self.saved_handler_list = logging._handlerList[:]
+ self.original_logging_level = logging.root.level
+ self.addCleanup(self.cleanup)
+ logging.root.handlers = []
+
+ def tearDown(self):
+ for h in logging.root.handlers[:]:
+ logging.root.removeHandler(h)
+ h.close()
+ super(BasicConfigTest, self).tearDown()
+
+ def cleanup(self):
+ setattr(logging.root, 'handlers', self.handlers)
+ logging._handlers.clear()
+ logging._handlers.update(self.saved_handlers)
+ logging._handlerList[:] = self.saved_handler_list
+ logging.root.level = self.original_logging_level
+
+ def test_no_kwargs(self):
+ logging.basicConfig()
+
+ # handler defaults to a StreamHandler to sys.stderr
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.StreamHandler)
+ self.assertEqual(handler.stream, sys.stderr)
+
+ formatter = handler.formatter
+ # format defaults to logging.BASIC_FORMAT
+ self.assertEqual(formatter._style._fmt, logging.BASIC_FORMAT)
+ # datefmt defaults to None
+ self.assertIsNone(formatter.datefmt)
+ # style defaults to %
+ self.assertIsInstance(formatter._style, logging.PercentStyle)
+
++ # level is not explicitly set
+ self.assertEqual(logging.root.level, self.original_logging_level)
+
+ def test_filename(self):
+ logging.basicConfig(filename='test.log')
+
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.FileHandler)
+
+ expected = logging.FileHandler('test.log', 'a')
+ self.addCleanup(expected.close)
+ self.assertEqual(handler.stream.mode, expected.stream.mode)
+ self.assertEqual(handler.stream.name, expected.stream.name)
+
+ def test_filemode(self):
+ logging.basicConfig(filename='test.log', filemode='wb')
+
+ handler = logging.root.handlers[0]
+ expected = logging.FileHandler('test.log', 'wb')
+ self.addCleanup(expected.close)
+ self.assertEqual(handler.stream.mode, expected.stream.mode)
+
+ def test_stream(self):
+ stream = io.StringIO()
+ self.addCleanup(stream.close)
+ logging.basicConfig(stream=stream)
+
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.StreamHandler)
+ self.assertEqual(handler.stream, stream)
+
+ def test_format(self):
+ logging.basicConfig(format='foo')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertEqual(formatter._style._fmt, 'foo')
+
+ def test_datefmt(self):
+ logging.basicConfig(datefmt='bar')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertEqual(formatter.datefmt, 'bar')
+
+ def test_style(self):
+ logging.basicConfig(style='$')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertIsInstance(formatter._style, logging.StringTemplateStyle)
+
+ def test_level(self):
+ old_level = logging.root.level
+ self.addCleanup(logging.root.setLevel, old_level)
+
+ logging.basicConfig(level=57)
+ self.assertEqual(logging.root.level, 57)
+ # Test that second call has no effect
+ logging.basicConfig(level=58)
+ self.assertEqual(logging.root.level, 57)
+
+ def test_incompatible(self):
+ assertRaises = self.assertRaises
+ handlers = [logging.StreamHandler()]
+ stream = sys.stderr
+ assertRaises(ValueError, logging.basicConfig, filename='test.log',
+ stream=stream)
+ assertRaises(ValueError, logging.basicConfig, filename='test.log',
+ handlers=handlers)
+ assertRaises(ValueError, logging.basicConfig, stream=stream,
+ handlers=handlers)
+
+ def test_handlers(self):
+ handlers = [
+ logging.StreamHandler(),
+ logging.StreamHandler(sys.stdout),
+ logging.StreamHandler(),
+ ]
+ f = logging.Formatter()
+ handlers[2].setFormatter(f)
+ logging.basicConfig(handlers=handlers)
+ self.assertIs(handlers[0], logging.root.handlers[0])
+ self.assertIs(handlers[1], logging.root.handlers[1])
+ self.assertIs(handlers[2], logging.root.handlers[2])
+ self.assertIsNotNone(handlers[0].formatter)
+ self.assertIsNotNone(handlers[1].formatter)
+ self.assertIs(handlers[2].formatter, f)
+ self.assertIs(handlers[0].formatter, handlers[1].formatter)
+
+ def _test_log(self, method, level=None):
+ # logging.root has no handlers so basicConfig should be called
+ called = []
+
+ old_basic_config = logging.basicConfig
+ def my_basic_config(*a, **kw):
+ old_basic_config()
+ old_level = logging.root.level
+ logging.root.setLevel(100) # avoid having messages in stderr
+ self.addCleanup(logging.root.setLevel, old_level)
+ called.append((a, kw))
+
+ patch(self, logging, 'basicConfig', my_basic_config)
+
+ log_method = getattr(logging, method)
+ if level is not None:
+ log_method(level, "test me")
+ else:
+ log_method("test me")
+
+ # basicConfig was called with no arguments
+ self.assertEqual(called, [((), {})])
+
+ def test_log(self):
+ self._test_log('log', logging.WARNING)
+
+ def test_debug(self):
+ self._test_log('debug')
+
+ def test_info(self):
+ self._test_log('info')
+
+ def test_warning(self):
+ self._test_log('warning')
+
+ def test_error(self):
+ self._test_log('error')
+
+ def test_critical(self):
+ self._test_log('critical')
+
+
+class LoggerAdapterTest(unittest.TestCase):
+
+ def setUp(self):
+ super(LoggerAdapterTest, self).setUp()
+ old_handler_list = logging._handlerList[:]
+
+ self.recording = RecordingHandler()
+ self.logger = logging.root
+ self.logger.addHandler(self.recording)
+ self.addCleanup(self.logger.removeHandler, self.recording)
+ self.addCleanup(self.recording.close)
+
+ def cleanup():
+ logging._handlerList[:] = old_handler_list
+
+ self.addCleanup(cleanup)
+ self.addCleanup(logging.shutdown)
+ self.adapter = logging.LoggerAdapter(logger=self.logger, extra=None)
+
+ def test_exception(self):
+ msg = 'testing exception: %r'
+ exc = None
+ try:
+ 1 / 0
+ except ZeroDivisionError as e:
+ exc = e
+ self.adapter.exception(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+ self.assertEqual(record.exc_info,
+ (exc.__class__, exc, exc.__traceback__))
+
+ def test_critical(self):
+ msg = 'critical test! %r'
+ self.adapter.critical(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.CRITICAL)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+
+ def test_is_enabled_for(self):
+ old_disable = self.adapter.logger.manager.disable
+ self.adapter.logger.manager.disable = 33
+ self.addCleanup(setattr, self.adapter.logger.manager, 'disable',
+ old_disable)
+ self.assertFalse(self.adapter.isEnabledFor(32))
+
+ def test_has_handlers(self):
+ self.assertTrue(self.adapter.hasHandlers())
+
+ for handler in self.logger.handlers:
+ self.logger.removeHandler(handler)
+
+ self.assertFalse(self.logger.hasHandlers())
+ self.assertFalse(self.adapter.hasHandlers())
+
+
+class LoggerTest(BaseTest):
+
+ def setUp(self):
+ super(LoggerTest, self).setUp()
+ self.recording = RecordingHandler()
+ self.logger = logging.Logger(name='blah')
+ self.logger.addHandler(self.recording)
+ self.addCleanup(self.logger.removeHandler, self.recording)
+ self.addCleanup(self.recording.close)
+ self.addCleanup(logging.shutdown)
+
+ def test_set_invalid_level(self):
+ self.assertRaises(TypeError, self.logger.setLevel, object())
+
+ def test_exception(self):
+ msg = 'testing exception: %r'
+ exc = None
+ try:
+ 1 / 0
+ except ZeroDivisionError as e:
+ exc = e
+ self.logger.exception(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+ self.assertEqual(record.exc_info,
+ (exc.__class__, exc, exc.__traceback__))
+
+ def test_log_invalid_level_with_raise(self):
+ old_raise = logging.raiseExceptions
+ self.addCleanup(setattr, logging, 'raiseExecptions', old_raise)
+
+ logging.raiseExceptions = True
+ self.assertRaises(TypeError, self.logger.log, '10', 'test message')
+
+ def test_log_invalid_level_no_raise(self):
+ old_raise = logging.raiseExceptions
+ self.addCleanup(setattr, logging, 'raiseExecptions', old_raise)
+
+ logging.raiseExceptions = False
+ self.logger.log('10', 'test message') # no exception happens
+
+ def test_find_caller_with_stack_info(self):
+ called = []
+ patch(self, logging.traceback, 'print_stack',
+ lambda f, file: called.append(file.getvalue()))
+
+ self.logger.findCaller(stack_info=True)
+
+ self.assertEqual(len(called), 1)
+ self.assertEqual('Stack (most recent call last):\n', called[0])
+
+ def test_make_record_with_extra_overwrite(self):
+ name = 'my record'
+ level = 13
+ fn = lno = msg = args = exc_info = func = sinfo = None
+ rv = logging._logRecordFactory(name, level, fn, lno, msg, args,
+ exc_info, func, sinfo)
+
+ for key in ('message', 'asctime') + tuple(rv.__dict__.keys()):
+ extra = {key: 'some value'}
+ self.assertRaises(KeyError, self.logger.makeRecord, name, level,
+ fn, lno, msg, args, exc_info,
+ extra=extra, sinfo=sinfo)
+
+ def test_make_record_with_extra_no_overwrite(self):
+ name = 'my record'
+ level = 13
+ fn = lno = msg = args = exc_info = func = sinfo = None
+ extra = {'valid_key': 'some value'}
+ result = self.logger.makeRecord(name, level, fn, lno, msg, args,
+ exc_info, extra=extra, sinfo=sinfo)
+ self.assertIn('valid_key', result.__dict__)
+
+ def test_has_handlers(self):
+ self.assertTrue(self.logger.hasHandlers())
+
+ for handler in self.logger.handlers:
+ self.logger.removeHandler(handler)
+ self.assertFalse(self.logger.hasHandlers())
+
+ def test_has_handlers_no_propagate(self):
+ child_logger = logging.getLogger('blah.child')
+ child_logger.propagate = False
+ self.assertFalse(child_logger.hasHandlers())
+
+ def test_is_enabled_for(self):
+ old_disable = self.logger.manager.disable
+ self.logger.manager.disable = 23
+ self.addCleanup(setattr, self.logger.manager, 'disable', old_disable)
+ self.assertFalse(self.logger.isEnabledFor(22))
+
+ def test_root_logger_aliases(self):
+ root = logging.getLogger()
+ self.assertIs(root, logging.root)
+ self.assertIs(root, logging.getLogger(None))
+ self.assertIs(root, logging.getLogger(''))
+ self.assertIs(root, logging.getLogger('foo').root)
+ self.assertIs(root, logging.getLogger('foo.bar').root)
+ self.assertIs(root, logging.getLogger('foo').parent)
+
+ self.assertIsNot(root, logging.getLogger('\0'))
+ self.assertIsNot(root, logging.getLogger('foo.bar').parent)
+
+ def test_invalid_names(self):
+ self.assertRaises(TypeError, logging.getLogger, any)
+ self.assertRaises(TypeError, logging.getLogger, b'foo')
+
+
class BaseFileTest(BaseTest):
"Base class for handler tests that write log files"
finally:
fp.close()
- # the first pread() shoudn't disturb the file offset
+ @unittest.skipUnless(hasattr(posix, 'truncate'), "test needs posix.truncate()")
+ def test_truncate(self):
+ with open(support.TESTFN, 'w') as fp:
+ fp.write('test')
+ fp.flush()
+ posix.truncate(support.TESTFN, 0)
+
+ @unittest.skipUnless(hasattr(posix, 'fexecve'), "test needs posix.fexecve()")
+ @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
+ @unittest.skipUnless(hasattr(os, 'waitpid'), "test needs os.waitpid()")
+ def test_fexecve(self):
+ fp = os.open(sys.executable, os.O_RDONLY)
+ try:
+ pid = os.fork()
+ if pid == 0:
+ os.chdir(os.path.split(sys.executable)[0])
+ posix.fexecve(fp, [sys.executable, '-c', 'pass'], os.environ)
+ else:
+ self.assertEqual(os.waitpid(pid, 0), (pid, 0))
+ finally:
+ os.close(fp)
+
+ @unittest.skipUnless(hasattr(posix, 'waitid'), "test needs posix.waitid()")
+ @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
+ def test_waitid(self):
+ pid = os.fork()
+ if pid == 0:
+ os.chdir(os.path.split(sys.executable)[0])
+ posix.execve(sys.executable, [sys.executable, '-c', 'pass'], os.environ)
+ else:
+ res = posix.waitid(posix.P_PID, pid, posix.WEXITED)
+ self.assertEqual(pid, res.si_pid)
+
+ @unittest.skipUnless(hasattr(posix, 'lockf'), "test needs posix.lockf()")
+ def test_lockf(self):
+ fd = os.open(support.TESTFN, os.O_WRONLY | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ posix.lockf(fd, posix.F_LOCK, 4)
+ # section is locked
+ posix.lockf(fd, posix.F_ULOCK, 4)
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'pread'), "test needs posix.pread()")
+ def test_pread(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ self.assertEqual(b'es', posix.pread(fd, 2, 1))
++ # the first pread() shouldn't disturb the file offset
+ self.assertEqual(b'te', posix.read(fd, 2))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'pwrite'), "test needs posix.pwrite()")
+ def test_pwrite(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ posix.pwrite(fd, b'xx', 1)
+ self.assertEqual(b'txxt', posix.read(fd, 4))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'posix_fallocate'),
+ "test needs posix.posix_fallocate()")
+ def test_posix_fallocate(self):
+ fd = os.open(support.TESTFN, os.O_WRONLY | os.O_CREAT)
+ try:
+ posix.posix_fallocate(fd, 0, 10)
+ except OSError as inst:
+ # issue10812, ZFS doesn't appear to support posix_fallocate,
+ # so skip Solaris-based since they are likely to have ZFS.
+ if inst.errno != errno.EINVAL or not sys.platform.startswith("sunos"):
+ raise
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'posix_fadvise'),
+ "test needs posix.posix_fadvise()")
+ def test_posix_fadvise(self):
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ posix.posix_fadvise(fd, 0, 0, posix.POSIX_FADV_WILLNEED)
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'futimes'), "test needs posix.futimes()")
+ def test_futimes(self):
+ now = time.time()
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ posix.futimes(fd, None)
+ posix.futimes(fd)
+ self.assertRaises(TypeError, posix.futimes, fd, (None, None))
+ self.assertRaises(TypeError, posix.futimes, fd, (now, None))
+ self.assertRaises(TypeError, posix.futimes, fd, (None, now))
+ posix.futimes(fd, (int(now), int(now)))
+ posix.futimes(fd, (now, now))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'lutimes'), "test needs posix.lutimes()")
+ def test_lutimes(self):
+ now = time.time()
+ posix.lutimes(support.TESTFN, None)
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (None, None))
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (now, None))
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (None, now))
+ posix.lutimes(support.TESTFN, (int(now), int(now)))
+ posix.lutimes(support.TESTFN, (now, now))
+ posix.lutimes(support.TESTFN)
+
+ @unittest.skipUnless(hasattr(posix, 'futimens'), "test needs posix.futimens()")
+ def test_futimens(self):
+ now = time.time()
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ self.assertRaises(TypeError, posix.futimens, fd, (None, None), (None, None))
+ self.assertRaises(TypeError, posix.futimens, fd, (now, 0), None)
+ self.assertRaises(TypeError, posix.futimens, fd, None, (now, 0))
+ posix.futimens(fd, (int(now), int((now - int(now)) * 1e9)),
+ (int(now), int((now - int(now)) * 1e9)))
+ posix.futimens(fd)
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'writev'), "test needs posix.writev()")
+ def test_writev(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.writev(fd, (b'test1', b'tt2', b't3'))
+ os.lseek(fd, 0, os.SEEK_SET)
+ self.assertEqual(b'test1tt2t3', posix.read(fd, 10))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'readv'), "test needs posix.readv()")
+ def test_readv(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test1tt2t3')
+ os.lseek(fd, 0, os.SEEK_SET)
+ buf = [bytearray(i) for i in [5, 3, 2]]
+ self.assertEqual(posix.readv(fd, buf), 10)
+ self.assertEqual([b'test1', b'tt2', b't3'], [bytes(i) for i in buf])
+ finally:
+ os.close(fd)
+
def test_dup(self):
if hasattr(posix, 'dup'):
fp = open(support.TESTFN)