Skip to content

Commit

Permalink
Upgrade distlib to 0.2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
dstufft committed Dec 17, 2014
1 parent 32e729f commit 320a07f
Show file tree
Hide file tree
Showing 17 changed files with 268 additions and 90 deletions.
3 changes: 3 additions & 0 deletions CHANGES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,9 @@
* Reduce the verbosity of the pip command by default. (:pull:`2175`,
:pull:`2177`, :pull:`2178`)

* Fixed :issue:`2031` - Respect sys.executable on OSX when installing from
Wheels.


**1.5.6 (2014-05-16)**

Expand Down
2 changes: 1 addition & 1 deletion pip/_vendor/distlib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
import logging

__version__ = '0.1.9'
__version__ = '0.2.0'

class DistlibException(Exception):
pass
Expand Down
72 changes: 55 additions & 17 deletions pip/_vendor/distlib/compat.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Vinay Sajip.
# Copyright (C) 2013-2014 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
Expand Down Expand Up @@ -86,35 +86,73 @@ class CertificateError(ValueError):
pass


def _dnsname_to_pat(dn):
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
for frag in dn.split(r'.'):
if frag == '*':
# When '*' is a fragment by itself, it matches a non-empty
# dotless fragment.
pats.append('[^.]+')
else:
# Otherwise, '*' matches any dotless fragment.
frag = re.escape(frag)
pats.append(frag.replace(r'\*', '[^.]*'))
return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
if not dn:
return False

parts = dn.split('.')
leftmost, remainder = parts[0], parts[1:]

wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survery of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))

# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()

# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))

# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))

pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)


def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
are mostly followed, but IP addresses are not accepted for *hostname*.
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
raise ValueError("empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_to_pat(value).match(hostname):
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
Expand All @@ -125,7 +163,7 @@ def match_hostname(cert, hostname):
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_to_pat(value).match(hostname):
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
Expand Down
6 changes: 4 additions & 2 deletions pip/_vendor/distlib/database.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 The Python Software Foundation.
# Copyright (C) 2012-2014 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""PEP 376 implementation."""
Expand Down Expand Up @@ -334,6 +334,8 @@ def __init__(self, metadata):
self.digest = None
self.extras = None # additional features requested
self.context = None # environment marker overrides
self.download_urls = set()
self.digests = {}

@property
def source_url(self):
Expand Down Expand Up @@ -925,9 +927,9 @@ def parse_requires_path(req_path):
requires = None
elif path.endswith('.egg-info'):
if os.path.isdir(path):
path = os.path.join(path, 'PKG-INFO')
req_path = os.path.join(path, 'requires.txt')
requires = parse_requires_path(req_path)
path = os.path.join(path, 'PKG-INFO')
metadata = Metadata(path=path, scheme='legacy')
else:
raise DistlibException('path must end with .egg-info or .egg, '
Expand Down
73 changes: 56 additions & 17 deletions pip/_vendor/distlib/locators.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Vinay Sajip.
# Copyright (C) 2012-2014 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
Expand Down Expand Up @@ -290,9 +290,9 @@ def _get_digest(self, info):

def _update_version_data(self, result, info):
"""
Update a result dictionary (the final result from _get_project) with a dictionary for a
specific version, whih typically holds information gleaned from a filename or URL for an
archive for the distribution.
Update a result dictionary (the final result from _get_project) with a
dictionary for a specific version, which typically holds information
gleaned from a filename or URL for an archive for the distribution.
"""
name = info.pop('name')
version = info.pop('version')
Expand All @@ -302,9 +302,12 @@ def _update_version_data(self, result, info):
else:
dist = make_dist(name, version, scheme=self.scheme)
md = dist.metadata
dist.digest = self._get_digest(info)
dist.digest = digest = self._get_digest(info)
url = info['url']
result['digests'][url] = digest
if md.source_url != info['url']:
md.source_url = self.prefer_url(md.source_url, info['url'])
md.source_url = self.prefer_url(md.source_url, url)
result['urls'].setdefault(version, set()).add(url)
dist.locator = self
result[version] = dist

Expand Down Expand Up @@ -350,9 +353,18 @@ def locate(self, requirement, prereleases=False):
slist = sorted(slist, key=scheme.key)
if slist:
logger.debug('sorted list: %s', slist)
result = versions[slist[-1]]
if result and r.extras:
result.extras = r.extras
version = slist[-1]
result = versions[version]
if result:
if r.extras:
result.extras = r.extras
result.download_urls = versions.get('urls', {}).get(version, set())
d = {}
sd = versions.get('digests', {})
for url in result.download_urls:
if url in sd:
d[url] = sd[url]
result.digests = d
self.matcher = None
return result

Expand Down Expand Up @@ -380,7 +392,7 @@ def get_distribution_names(self):
return set(self.client.list_packages())

def _get_project(self, name):
result = {}
result = {'urls': {}, 'digests': {}}
versions = self.client.package_releases(name, True)
for v in versions:
urls = self.client.release_urls(name, v)
Expand All @@ -398,12 +410,17 @@ def _get_project(self, name):
dist.digest = self._get_digest(info)
dist.locator = self
result[v] = dist
for info in urls:
url = info['url']
digest = self._get_digest(info)
result['urls'].setdefault(v, set()).add(url)
result['digests'][url] = digest
return result

class PyPIJSONLocator(Locator):
"""
This locator uses PyPI's JSON interface. It's very limited in functionality
nad probably not worth using.
and probably not worth using.
"""
def __init__(self, url, **kwargs):
super(PyPIJSONLocator, self).__init__(**kwargs)
Expand All @@ -416,7 +433,7 @@ def get_distribution_names(self):
raise NotImplementedError('Not available from this locator')

def _get_project(self, name):
result = {}
result = {'urls': {}, 'digests': {}}
url = urljoin(self.base_url, '%s/json' % quote(name))
try:
resp = self.opener.open(url)
Expand All @@ -437,6 +454,10 @@ def _get_project(self, name):
dist.digest = self._get_digest(info)
dist.locator = self
result[md.version] = dist
for info in urls:
url = info['url']
result['urls'].setdefault(md.version, set()).add(url)
result['digests'][url] = digest
except Exception as e:
logger.exception('JSON fetch failed: %s', e)
return result
Expand Down Expand Up @@ -567,7 +588,7 @@ def _wait_threads(self):
self._threads = []

def _get_project(self, name):
result = {}
result = {'urls': {}, 'digests': {}}
with self._gplock:
self.result = result
self.project_name = name
Expand Down Expand Up @@ -774,7 +795,7 @@ def should_include(self, filename, parent):
return filename.endswith(self.downloadable_extensions)

def _get_project(self, name):
result = {}
result = {'urls': {}, 'digests': {}}
for root, dirs, files in os.walk(self.base_dir):
for fn in files:
if self.should_include(fn, root):
Expand Down Expand Up @@ -822,7 +843,7 @@ def get_distribution_names(self):
raise NotImplementedError('Not available from this locator')

def _get_project(self, name):
result = {}
result = {'urls': {}, 'digests': {}}
data = get_project_data(name)
if data:
for info in data.get('files', []):
Expand All @@ -843,6 +864,7 @@ def _get_project(self, name):
md.dependencies = info.get('requirements', {})
dist.exports = info.get('exports', {})
result[dist.version] = dist
result['urls'].setdefault(dist.version, set()).add(info['url'])
return result

class DistPathLocator(Locator):
Expand All @@ -865,7 +887,10 @@ def _get_project(self, name):
if dist is None:
result = {}
else:
result = { dist.version: dist }
result = {
dist.version: dist,
'urls': {dist.version: set([dist.source_url])}
}
return result


Expand Down Expand Up @@ -907,7 +932,20 @@ def _get_project(self, name):
d = locator.get_project(name)
if d:
if self.merge:
files = result.get('urls', {})
digests = result.get('digests', {})
# next line could overwrite result['urls'], result['digests']
result.update(d)
df = result.get('urls')
if files and df:
for k, v in files.items():
if k in df:
df[k] |= v
else:
df[k] = v
dd = result.get('digests')
if digests and dd:
dd.update(digests)
else:
# See issue #18. If any dists are found and we're looking
# for specific constraints, we only return something if
Expand Down Expand Up @@ -1071,7 +1109,8 @@ def try_to_replace(self, provider, other, problems):
unmatched.add(s)
if unmatched:
# can't replace other with provider
problems.add(('cantreplace', provider, other, unmatched))
problems.add(('cantreplace', provider, other,
frozenset(unmatched)))
result = False
else:
# can replace other with provider
Expand Down
25 changes: 14 additions & 11 deletions pip/_vendor/distlib/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,9 +147,10 @@ def process_directive(self, directive):

elif action == 'exclude':
for pattern in patterns:
if not self._exclude_pattern(pattern, anchor=True):
logger.warning('no previously-included files '
'found matching %r', pattern)
found = self._exclude_pattern(pattern, anchor=True)
#if not found:
# logger.warning('no previously-included files '
# 'found matching %r', pattern)

elif action == 'global-include':
for pattern in patterns:
Expand All @@ -159,10 +160,11 @@ def process_directive(self, directive):

elif action == 'global-exclude':
for pattern in patterns:
if not self._exclude_pattern(pattern, anchor=False):
logger.warning('no previously-included files '
'matching %r found anywhere in '
'distribution', pattern)
found = self._exclude_pattern(pattern, anchor=False)
#if not found:
# logger.warning('no previously-included files '
# 'matching %r found anywhere in '
# 'distribution', pattern)

elif action == 'recursive-include':
for pattern in patterns:
Expand All @@ -172,10 +174,11 @@ def process_directive(self, directive):

elif action == 'recursive-exclude':
for pattern in patterns:
if not self._exclude_pattern(pattern, prefix=thedir):
logger.warning('no previously-included files '
'matching %r found under directory %r',
pattern, thedir)
found = self._exclude_pattern(pattern, prefix=thedir)
#if not found:
# logger.warning('no previously-included files '
# 'matching %r found under directory %r',
# pattern, thedir)

elif action == 'graft':
if not self._include_pattern(None, prefix=dirpattern):
Expand Down
Loading

0 comments on commit 320a07f

Please sign in to comment.