comparison venv/lib/python2.7/site-packages/pip/wheel.py @ 0:d67268158946 draft

planemo upload commit a3f181f5f126803c654b3a66dd4e83a48f7e203b
author bcclaywell
date Mon, 12 Oct 2015 17:43:33 -0400
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:d67268158946
1 """
2 Support for installing and building the "wheel" binary package format.
3 """
4 from __future__ import absolute_import
5
6 import compileall
7 import csv
8 import functools
9 import hashlib
10 import logging
11 import os
12 import re
13 import shutil
14 import stat
15 import sys
16 import warnings
17
18 from base64 import urlsafe_b64encode
19 from email.parser import Parser
20
21 from pip._vendor.six import StringIO
22
23 from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
24 from pip.locations import distutils_scheme
25 from pip import pep425tags
26 from pip.utils import (call_subprocess, normalize_path, make_path_relative,
27 captured_stdout)
28 from pip.utils.logging import indent_log
29 from pip._vendor.distlib.scripts import ScriptMaker
30 from pip._vendor import pkg_resources
31 from pip._vendor.six.moves import configparser
32
33
34 wheel_ext = '.whl'
35
36 VERSION_COMPATIBLE = (1, 0)
37
38
39 logger = logging.getLogger(__name__)
40
41
42 def rehash(path, algo='sha256', blocksize=1 << 20):
43 """Return (hash, length) for path using hashlib.new(algo)"""
44 h = hashlib.new(algo)
45 length = 0
46 with open(path, 'rb') as f:
47 block = f.read(blocksize)
48 while block:
49 length += len(block)
50 h.update(block)
51 block = f.read(blocksize)
52 digest = 'sha256=' + urlsafe_b64encode(
53 h.digest()
54 ).decode('latin1').rstrip('=')
55 return (digest, length)
56
57
58 def open_for_csv(name, mode):
59 if sys.version_info[0] < 3:
60 nl = {}
61 bin = 'b'
62 else:
63 nl = {'newline': ''}
64 bin = ''
65 return open(name, mode + bin, **nl)
66
67
68 def fix_script(path):
69 """Replace #!python with #!/path/to/python
70 Return True if file was changed."""
71 # XXX RECORD hashes will need to be updated
72 if os.path.isfile(path):
73 with open(path, 'rb') as script:
74 firstline = script.readline()
75 if not firstline.startswith(b'#!python'):
76 return False
77 exename = sys.executable.encode(sys.getfilesystemencoding())
78 firstline = b'#!' + exename + os.linesep.encode("ascii")
79 rest = script.read()
80 with open(path, 'wb') as script:
81 script.write(firstline)
82 script.write(rest)
83 return True
84
85 dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
86 \.dist-info$""", re.VERBOSE)
87
88
89 def root_is_purelib(name, wheeldir):
90 """
91 Return True if the extracted wheel in wheeldir should go into purelib.
92 """
93 name_folded = name.replace("-", "_")
94 for item in os.listdir(wheeldir):
95 match = dist_info_re.match(item)
96 if match and match.group('name') == name_folded:
97 with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
98 for line in wheel:
99 line = line.lower().rstrip()
100 if line == "root-is-purelib: true":
101 return True
102 return False
103
104
105 def get_entrypoints(filename):
106 if not os.path.exists(filename):
107 return {}, {}
108
109 # This is done because you can pass a string to entry_points wrappers which
110 # means that they may or may not be valid INI files. The attempt here is to
111 # strip leading and trailing whitespace in order to make them valid INI
112 # files.
113 with open(filename) as fp:
114 data = StringIO()
115 for line in fp:
116 data.write(line.strip())
117 data.write("\n")
118 data.seek(0)
119
120 cp = configparser.RawConfigParser()
121 cp.readfp(data)
122
123 console = {}
124 gui = {}
125 if cp.has_section('console_scripts'):
126 console = dict(cp.items('console_scripts'))
127 if cp.has_section('gui_scripts'):
128 gui = dict(cp.items('gui_scripts'))
129 return console, gui
130
131
132 def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
133 pycompile=True, scheme=None, isolated=False):
134 """Install a wheel"""
135
136 if not scheme:
137 scheme = distutils_scheme(
138 name, user=user, home=home, root=root, isolated=isolated
139 )
140
141 if root_is_purelib(name, wheeldir):
142 lib_dir = scheme['purelib']
143 else:
144 lib_dir = scheme['platlib']
145
146 info_dir = []
147 data_dirs = []
148 source = wheeldir.rstrip(os.path.sep) + os.path.sep
149
150 # Record details of the files moved
151 # installed = files copied from the wheel to the destination
152 # changed = files changed while installing (scripts #! line typically)
153 # generated = files newly generated during the install (script wrappers)
154 installed = {}
155 changed = set()
156 generated = []
157
158 # Compile all of the pyc files that we're going to be installing
159 if pycompile:
160 with captured_stdout() as stdout:
161 with warnings.catch_warnings():
162 warnings.filterwarnings('ignore')
163 compileall.compile_dir(source, force=True, quiet=True)
164 logger.debug(stdout.getvalue())
165
166 def normpath(src, p):
167 return make_path_relative(src, p).replace(os.path.sep, '/')
168
169 def record_installed(srcfile, destfile, modified=False):
170 """Map archive RECORD paths to installation RECORD paths."""
171 oldpath = normpath(srcfile, wheeldir)
172 newpath = normpath(destfile, lib_dir)
173 installed[oldpath] = newpath
174 if modified:
175 changed.add(destfile)
176
177 def clobber(source, dest, is_base, fixer=None, filter=None):
178 if not os.path.exists(dest): # common for the 'include' path
179 os.makedirs(dest)
180
181 for dir, subdirs, files in os.walk(source):
182 basedir = dir[len(source):].lstrip(os.path.sep)
183 destdir = os.path.join(dest, basedir)
184 if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
185 continue
186 for s in subdirs:
187 destsubdir = os.path.join(dest, basedir, s)
188 if is_base and basedir == '' and destsubdir.endswith('.data'):
189 data_dirs.append(s)
190 continue
191 elif (is_base and
192 s.endswith('.dist-info') and
193 # is self.req.project_name case preserving?
194 s.lower().startswith(
195 req.project_name.replace('-', '_').lower())):
196 assert not info_dir, 'Multiple .dist-info directories'
197 info_dir.append(destsubdir)
198 for f in files:
199 # Skip unwanted files
200 if filter and filter(f):
201 continue
202 srcfile = os.path.join(dir, f)
203 destfile = os.path.join(dest, basedir, f)
204 # directory creation is lazy and after the file filtering above
205 # to ensure we don't install empty dirs; empty dirs can't be
206 # uninstalled.
207 if not os.path.exists(destdir):
208 os.makedirs(destdir)
209
210 # We use copyfile (not move, copy, or copy2) to be extra sure
211 # that we are not moving directories over (copyfile fails for
212 # directories) as well as to ensure that we are not copying
213 # over any metadata because we want more control over what
214 # metadata we actually copy over.
215 shutil.copyfile(srcfile, destfile)
216
217 # Copy over the metadata for the file, currently this only
218 # includes the atime and mtime.
219 st = os.stat(srcfile)
220 if hasattr(os, "utime"):
221 os.utime(destfile, (st.st_atime, st.st_mtime))
222
223 # If our file is executable, then make our destination file
224 # executable.
225 if os.access(srcfile, os.X_OK):
226 st = os.stat(srcfile)
227 permissions = (
228 st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
229 )
230 os.chmod(destfile, permissions)
231
232 changed = False
233 if fixer:
234 changed = fixer(destfile)
235 record_installed(srcfile, destfile, changed)
236
237 clobber(source, lib_dir, True)
238
239 assert info_dir, "%s .dist-info directory not found" % req
240
241 # Get the defined entry points
242 ep_file = os.path.join(info_dir[0], 'entry_points.txt')
243 console, gui = get_entrypoints(ep_file)
244
245 def is_entrypoint_wrapper(name):
246 # EP, EP.exe and EP-script.py are scripts generated for
247 # entry point EP by setuptools
248 if name.lower().endswith('.exe'):
249 matchname = name[:-4]
250 elif name.lower().endswith('-script.py'):
251 matchname = name[:-10]
252 elif name.lower().endswith(".pya"):
253 matchname = name[:-4]
254 else:
255 matchname = name
256 # Ignore setuptools-generated scripts
257 return (matchname in console or matchname in gui)
258
259 for datadir in data_dirs:
260 fixer = None
261 filter = None
262 for subdir in os.listdir(os.path.join(wheeldir, datadir)):
263 fixer = None
264 if subdir == 'scripts':
265 fixer = fix_script
266 filter = is_entrypoint_wrapper
267 source = os.path.join(wheeldir, datadir, subdir)
268 dest = scheme[subdir]
269 clobber(source, dest, False, fixer=fixer, filter=filter)
270
271 maker = ScriptMaker(None, scheme['scripts'])
272
273 # Ensure old scripts are overwritten.
274 # See https://github.com/pypa/pip/issues/1800
275 maker.clobber = True
276
277 # Ensure we don't generate any variants for scripts because this is almost
278 # never what somebody wants.
279 # See https://bitbucket.org/pypa/distlib/issue/35/
280 maker.variants = set(('', ))
281
282 # This is required because otherwise distlib creates scripts that are not
283 # executable.
284 # See https://bitbucket.org/pypa/distlib/issue/32/
285 maker.set_mode = True
286
287 # Simplify the script and fix the fact that the default script swallows
288 # every single stack trace.
289 # See https://bitbucket.org/pypa/distlib/issue/34/
290 # See https://bitbucket.org/pypa/distlib/issue/33/
291 def _get_script_text(entry):
292 return maker.script_template % {
293 "module": entry.prefix,
294 "import_name": entry.suffix.split(".")[0],
295 "func": entry.suffix,
296 }
297
298 maker._get_script_text = _get_script_text
299 maker.script_template = """# -*- coding: utf-8 -*-
300 import re
301 import sys
302
303 from %(module)s import %(import_name)s
304
305 if __name__ == '__main__':
306 sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
307 sys.exit(%(func)s())
308 """
309
310 # Special case pip and setuptools to generate versioned wrappers
311 #
312 # The issue is that some projects (specifically, pip and setuptools) use
313 # code in setup.py to create "versioned" entry points - pip2.7 on Python
314 # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
315 # the wheel metadata at build time, and so if the wheel is installed with
316 # a *different* version of Python the entry points will be wrong. The
317 # correct fix for this is to enhance the metadata to be able to describe
318 # such versioned entry points, but that won't happen till Metadata 2.0 is
319 # available.
320 # In the meantime, projects using versioned entry points will either have
321 # incorrect versioned entry points, or they will not be able to distribute
322 # "universal" wheels (i.e., they will need a wheel per Python version).
323 #
324 # Because setuptools and pip are bundled with _ensurepip and virtualenv,
325 # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
326 # override the versioned entry points in the wheel and generate the
327 # correct ones. This code is purely a short-term measure until Metadat 2.0
328 # is available.
329 #
330 # To add the level of hack in this section of code, in order to support
331 # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
332 # variable which will control which version scripts get installed.
333 #
334 # ENSUREPIP_OPTIONS=altinstall
335 # - Only pipX.Y and easy_install-X.Y will be generated and installed
336 # ENSUREPIP_OPTIONS=install
337 # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
338 # that this option is technically if ENSUREPIP_OPTIONS is set and is
339 # not altinstall
340 # DEFAULT
341 # - The default behavior is to install pip, pipX, pipX.Y, easy_install
342 # and easy_install-X.Y.
343 pip_script = console.pop('pip', None)
344 if pip_script:
345 if "ENSUREPIP_OPTIONS" not in os.environ:
346 spec = 'pip = ' + pip_script
347 generated.extend(maker.make(spec))
348
349 if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
350 spec = 'pip%s = %s' % (sys.version[:1], pip_script)
351 generated.extend(maker.make(spec))
352
353 spec = 'pip%s = %s' % (sys.version[:3], pip_script)
354 generated.extend(maker.make(spec))
355 # Delete any other versioned pip entry points
356 pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
357 for k in pip_ep:
358 del console[k]
359 easy_install_script = console.pop('easy_install', None)
360 if easy_install_script:
361 if "ENSUREPIP_OPTIONS" not in os.environ:
362 spec = 'easy_install = ' + easy_install_script
363 generated.extend(maker.make(spec))
364
365 spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
366 generated.extend(maker.make(spec))
367 # Delete any other versioned easy_install entry points
368 easy_install_ep = [
369 k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
370 ]
371 for k in easy_install_ep:
372 del console[k]
373
374 # Generate the console and GUI entry points specified in the wheel
375 if len(console) > 0:
376 generated.extend(
377 maker.make_multiple(['%s = %s' % kv for kv in console.items()])
378 )
379 if len(gui) > 0:
380 generated.extend(
381 maker.make_multiple(
382 ['%s = %s' % kv for kv in gui.items()],
383 {'gui': True}
384 )
385 )
386
387 record = os.path.join(info_dir[0], 'RECORD')
388 temp_record = os.path.join(info_dir[0], 'RECORD.pip')
389 with open_for_csv(record, 'r') as record_in:
390 with open_for_csv(temp_record, 'w+') as record_out:
391 reader = csv.reader(record_in)
392 writer = csv.writer(record_out)
393 for row in reader:
394 row[0] = installed.pop(row[0], row[0])
395 if row[0] in changed:
396 row[1], row[2] = rehash(row[0])
397 writer.writerow(row)
398 for f in generated:
399 h, l = rehash(f)
400 writer.writerow((f, h, l))
401 for f in installed:
402 writer.writerow((installed[f], '', ''))
403 shutil.move(temp_record, record)
404
405
406 def _unique(fn):
407 @functools.wraps(fn)
408 def unique(*args, **kw):
409 seen = set()
410 for item in fn(*args, **kw):
411 if item not in seen:
412 seen.add(item)
413 yield item
414 return unique
415
416
417 # TODO: this goes somewhere besides the wheel module
418 @_unique
419 def uninstallation_paths(dist):
420 """
421 Yield all the uninstallation paths for dist based on RECORD-without-.pyc
422
423 Yield paths to all the files in RECORD. For each .py file in RECORD, add
424 the .pyc in the same directory.
425
426 UninstallPathSet.add() takes care of the __pycache__ .pyc.
427 """
428 from pip.utils import FakeFile # circular import
429 r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
430 for row in r:
431 path = os.path.join(dist.location, row[0])
432 yield path
433 if path.endswith('.py'):
434 dn, fn = os.path.split(path)
435 base = fn[:-3]
436 path = os.path.join(dn, base + '.pyc')
437 yield path
438
439
440 def wheel_version(source_dir):
441 """
442 Return the Wheel-Version of an extracted wheel, if possible.
443
444 Otherwise, return False if we couldn't parse / extract it.
445 """
446 try:
447 dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
448
449 wheel_data = dist.get_metadata('WHEEL')
450 wheel_data = Parser().parsestr(wheel_data)
451
452 version = wheel_data['Wheel-Version'].strip()
453 version = tuple(map(int, version.split('.')))
454 return version
455 except:
456 return False
457
458
459 def check_compatibility(version, name):
460 """
461 Raises errors or warns if called with an incompatible Wheel-Version.
462
463 Pip should refuse to install a Wheel-Version that's a major series
464 ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
465 installing a version only minor version ahead (e.g 1.2 > 1.1).
466
467 version: a 2-tuple representing a Wheel-Version (Major, Minor)
468 name: name of wheel or package to raise exception about
469
470 :raises UnsupportedWheel: when an incompatible Wheel-Version is given
471 """
472 if not version:
473 raise UnsupportedWheel(
474 "%s is in an unsupported or invalid wheel" % name
475 )
476 if version[0] > VERSION_COMPATIBLE[0]:
477 raise UnsupportedWheel(
478 "%s's Wheel-Version (%s) is not compatible with this version "
479 "of pip" % (name, '.'.join(map(str, version)))
480 )
481 elif version > VERSION_COMPATIBLE:
482 logger.warning(
483 'Installing from a newer Wheel-Version (%s)',
484 '.'.join(map(str, version)),
485 )
486
487
488 class Wheel(object):
489 """A wheel file"""
490
491 # TODO: maybe move the install code into this class
492
493 wheel_file_re = re.compile(
494 r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
495 ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
496 \.whl|\.dist-info)$""",
497 re.VERBOSE
498 )
499
500 def __init__(self, filename):
501 """
502 :raises InvalidWheelFilename: when the filename is invalid for a wheel
503 """
504 wheel_info = self.wheel_file_re.match(filename)
505 if not wheel_info:
506 raise InvalidWheelFilename(
507 "%s is not a valid wheel filename." % filename
508 )
509 self.filename = filename
510 self.name = wheel_info.group('name').replace('_', '-')
511 # we'll assume "_" means "-" due to wheel naming scheme
512 # (https://github.com/pypa/pip/issues/1150)
513 self.version = wheel_info.group('ver').replace('_', '-')
514 self.pyversions = wheel_info.group('pyver').split('.')
515 self.abis = wheel_info.group('abi').split('.')
516 self.plats = wheel_info.group('plat').split('.')
517
518 # All the tag combinations from this file
519 self.file_tags = set(
520 (x, y, z) for x in self.pyversions
521 for y in self.abis for z in self.plats
522 )
523
524 def support_index_min(self, tags=None):
525 """
526 Return the lowest index that one of the wheel's file_tag combinations
527 achieves in the supported_tags list e.g. if there are 8 supported tags,
528 and one of the file tags is first in the list, then return 0. Returns
529 None is the wheel is not supported.
530 """
531 if tags is None: # for mock
532 tags = pep425tags.supported_tags
533 indexes = [tags.index(c) for c in self.file_tags if c in tags]
534 return min(indexes) if indexes else None
535
536 def supported(self, tags=None):
537 """Is this wheel supported on this system?"""
538 if tags is None: # for mock
539 tags = pep425tags.supported_tags
540 return bool(set(tags).intersection(self.file_tags))
541
542
543 class WheelBuilder(object):
544 """Build wheels from a RequirementSet."""
545
546 def __init__(self, requirement_set, finder, wheel_dir, build_options=None,
547 global_options=None):
548 self.requirement_set = requirement_set
549 self.finder = finder
550 self.wheel_dir = normalize_path(wheel_dir)
551 self.build_options = build_options or []
552 self.global_options = global_options or []
553
554 def _build_one(self, req):
555 """Build one wheel."""
556
557 base_args = [
558 sys.executable, '-c',
559 "import setuptools;__file__=%r;"
560 "exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), "
561 "__file__, 'exec'))" % req.setup_py
562 ] + list(self.global_options)
563
564 logger.info('Running setup.py bdist_wheel for %s', req.name)
565 logger.info('Destination directory: %s', self.wheel_dir)
566 wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] \
567 + self.build_options
568 try:
569 call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False)
570 return True
571 except:
572 logger.error('Failed building wheel for %s', req.name)
573 return False
574
575 def build(self):
576 """Build wheels."""
577
578 # unpack and constructs req set
579 self.requirement_set.prepare_files(self.finder)
580
581 reqset = self.requirement_set.requirements.values()
582
583 buildset = []
584 for req in reqset:
585 if req.is_wheel:
586 logger.info(
587 'Skipping %s, due to already being wheel.', req.name,
588 )
589 elif req.editable:
590 logger.info(
591 'Skipping %s, due to being editable', req.name,
592 )
593 else:
594 buildset.append(req)
595
596 if not buildset:
597 return True
598
599 # Build the wheels.
600 logger.info(
601 'Building wheels for collected packages: %s',
602 ', '.join([req.name for req in buildset]),
603 )
604 with indent_log():
605 build_success, build_failure = [], []
606 for req in buildset:
607 if self._build_one(req):
608 build_success.append(req)
609 else:
610 build_failure.append(req)
611
612 # notify success/failure
613 if build_success:
614 logger.info(
615 'Successfully built %s',
616 ' '.join([req.name for req in build_success]),
617 )
618 if build_failure:
619 logger.info(
620 'Failed to build %s',
621 ' '.join([req.name for req in build_failure]),
622 )
623 # Return True if all builds were successful
624 return len(build_failure) == 0