• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1"""setuptools.command.egg_info
2
3Create a distribution's .egg-info directory and contents"""
4
5from distutils.filelist import FileList as _FileList
6from distutils.errors import DistutilsInternalError
7from distutils.util import convert_path
8from distutils import log
9import distutils.errors
10import distutils.filelist
11import os
12import re
13import sys
14import io
15import warnings
16import time
17import collections
18
19from setuptools.extern import six
20from setuptools.extern.six.moves import map
21
22from setuptools import Command
23from setuptools.command.sdist import sdist
24from setuptools.command.sdist import walk_revctrl
25from setuptools.command.setopt import edit_config
26from setuptools.command import bdist_egg
27from pkg_resources import (
28    parse_requirements, safe_name, parse_version,
29    safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
30import setuptools.unicode_utils as unicode_utils
31from setuptools.glob import glob
32
33from setuptools.extern import packaging
34
35
36def translate_pattern(glob):
37    """
38    Translate a file path glob like '*.txt' in to a regular expression.
39    This differs from fnmatch.translate which allows wildcards to match
40    directory separators. It also knows about '**/' which matches any number of
41    directories.
42    """
43    pat = ''
44
45    # This will split on '/' within [character classes]. This is deliberate.
46    chunks = glob.split(os.path.sep)
47
48    sep = re.escape(os.sep)
49    valid_char = '[^%s]' % (sep,)
50
51    for c, chunk in enumerate(chunks):
52        last_chunk = c == len(chunks) - 1
53
54        # Chunks that are a literal ** are globstars. They match anything.
55        if chunk == '**':
56            if last_chunk:
57                # Match anything if this is the last component
58                pat += '.*'
59            else:
60                # Match '(name/)*'
61                pat += '(?:%s+%s)*' % (valid_char, sep)
62            continue  # Break here as the whole path component has been handled
63
64        # Find any special characters in the remainder
65        i = 0
66        chunk_len = len(chunk)
67        while i < chunk_len:
68            char = chunk[i]
69            if char == '*':
70                # Match any number of name characters
71                pat += valid_char + '*'
72            elif char == '?':
73                # Match a name character
74                pat += valid_char
75            elif char == '[':
76                # Character class
77                inner_i = i + 1
78                # Skip initial !/] chars
79                if inner_i < chunk_len and chunk[inner_i] == '!':
80                    inner_i = inner_i + 1
81                if inner_i < chunk_len and chunk[inner_i] == ']':
82                    inner_i = inner_i + 1
83
84                # Loop till the closing ] is found
85                while inner_i < chunk_len and chunk[inner_i] != ']':
86                    inner_i = inner_i + 1
87
88                if inner_i >= chunk_len:
89                    # Got to the end of the string without finding a closing ]
90                    # Do not treat this as a matching group, but as a literal [
91                    pat += re.escape(char)
92                else:
93                    # Grab the insides of the [brackets]
94                    inner = chunk[i + 1:inner_i]
95                    char_class = ''
96
97                    # Class negation
98                    if inner[0] == '!':
99                        char_class = '^'
100                        inner = inner[1:]
101
102                    char_class += re.escape(inner)
103                    pat += '[%s]' % (char_class,)
104
105                    # Skip to the end ]
106                    i = inner_i
107            else:
108                pat += re.escape(char)
109            i += 1
110
111        # Join each chunk with the dir separator
112        if not last_chunk:
113            pat += sep
114
115    pat += r'\Z'
116    return re.compile(pat, flags=re.MULTILINE|re.DOTALL)
117
118
119class egg_info(Command):
120    description = "create a distribution's .egg-info directory"
121
122    user_options = [
123        ('egg-base=', 'e', "directory containing .egg-info directories"
124                           " (default: top of the source tree)"),
125        ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
126        ('tag-build=', 'b', "Specify explicit tag to add to version number"),
127        ('no-date', 'D', "Don't include date stamp [default]"),
128    ]
129
130    boolean_options = ['tag-date']
131    negative_opt = {
132        'no-date': 'tag-date',
133    }
134
135    def initialize_options(self):
136        self.egg_name = None
137        self.egg_version = None
138        self.egg_base = None
139        self.egg_info = None
140        self.tag_build = None
141        self.tag_date = 0
142        self.broken_egg_info = False
143        self.vtags = None
144
145    ####################################
146    # allow the 'tag_svn_revision' to be detected and
147    # set, supporting sdists built on older Setuptools.
148    @property
149    def tag_svn_revision(self):
150        pass
151
152    @tag_svn_revision.setter
153    def tag_svn_revision(self, value):
154        pass
155    ####################################
156
157    def save_version_info(self, filename):
158        """
159        Materialize the value of date into the
160        build tag. Install build keys in a deterministic order
161        to avoid arbitrary reordering on subsequent builds.
162        """
163        egg_info = collections.OrderedDict()
164        # follow the order these keys would have been added
165        # when PYTHONHASHSEED=0
166        egg_info['tag_build'] = self.tags()
167        egg_info['tag_date'] = 0
168        edit_config(filename, dict(egg_info=egg_info))
169
170    def finalize_options(self):
171        self.egg_name = safe_name(self.distribution.get_name())
172        self.vtags = self.tags()
173        self.egg_version = self.tagged_version()
174
175        parsed_version = parse_version(self.egg_version)
176
177        try:
178            is_version = isinstance(parsed_version, packaging.version.Version)
179            spec = (
180                "%s==%s" if is_version else "%s===%s"
181            )
182            list(
183                parse_requirements(spec % (self.egg_name, self.egg_version))
184            )
185        except ValueError:
186            raise distutils.errors.DistutilsOptionError(
187                "Invalid distribution name or version syntax: %s-%s" %
188                (self.egg_name, self.egg_version)
189            )
190
191        if self.egg_base is None:
192            dirs = self.distribution.package_dir
193            self.egg_base = (dirs or {}).get('', os.curdir)
194
195        self.ensure_dirname('egg_base')
196        self.egg_info = to_filename(self.egg_name) + '.egg-info'
197        if self.egg_base != os.curdir:
198            self.egg_info = os.path.join(self.egg_base, self.egg_info)
199        if '-' in self.egg_name:
200            self.check_broken_egg_info()
201
202        # Set package version for the benefit of dumber commands
203        # (e.g. sdist, bdist_wininst, etc.)
204        #
205        self.distribution.metadata.version = self.egg_version
206
207        # If we bootstrapped around the lack of a PKG-INFO, as might be the
208        # case in a fresh checkout, make sure that any special tags get added
209        # to the version info
210        #
211        pd = self.distribution._patched_dist
212        if pd is not None and pd.key == self.egg_name.lower():
213            pd._version = self.egg_version
214            pd._parsed_version = parse_version(self.egg_version)
215            self.distribution._patched_dist = None
216
217    def write_or_delete_file(self, what, filename, data, force=False):
218        """Write `data` to `filename` or delete if empty
219
220        If `data` is non-empty, this routine is the same as ``write_file()``.
221        If `data` is empty but not ``None``, this is the same as calling
222        ``delete_file(filename)`.  If `data` is ``None``, then this is a no-op
223        unless `filename` exists, in which case a warning is issued about the
224        orphaned file (if `force` is false), or deleted (if `force` is true).
225        """
226        if data:
227            self.write_file(what, filename, data)
228        elif os.path.exists(filename):
229            if data is None and not force:
230                log.warn(
231                    "%s not set in setup(), but %s exists", what, filename
232                )
233                return
234            else:
235                self.delete_file(filename)
236
237    def write_file(self, what, filename, data):
238        """Write `data` to `filename` (if not a dry run) after announcing it
239
240        `what` is used in a log message to identify what is being written
241        to the file.
242        """
243        log.info("writing %s to %s", what, filename)
244        if six.PY3:
245            data = data.encode("utf-8")
246        if not self.dry_run:
247            f = open(filename, 'wb')
248            f.write(data)
249            f.close()
250
251    def delete_file(self, filename):
252        """Delete `filename` (if not a dry run) after announcing it"""
253        log.info("deleting %s", filename)
254        if not self.dry_run:
255            os.unlink(filename)
256
257    def tagged_version(self):
258        version = self.distribution.get_version()
259        # egg_info may be called more than once for a distribution,
260        # in which case the version string already contains all tags.
261        if self.vtags and version.endswith(self.vtags):
262            return safe_version(version)
263        return safe_version(version + self.vtags)
264
265    def run(self):
266        self.mkpath(self.egg_info)
267        installer = self.distribution.fetch_build_egg
268        for ep in iter_entry_points('egg_info.writers'):
269            ep.require(installer=installer)
270            writer = ep.resolve()
271            writer(self, ep.name, os.path.join(self.egg_info, ep.name))
272
273        # Get rid of native_libs.txt if it was put there by older bdist_egg
274        nl = os.path.join(self.egg_info, "native_libs.txt")
275        if os.path.exists(nl):
276            self.delete_file(nl)
277
278        self.find_sources()
279
280    def tags(self):
281        version = ''
282        if self.tag_build:
283            version += self.tag_build
284        if self.tag_date:
285            version += time.strftime("-%Y%m%d")
286        return version
287
288    def find_sources(self):
289        """Generate SOURCES.txt manifest file"""
290        manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
291        mm = manifest_maker(self.distribution)
292        mm.manifest = manifest_filename
293        mm.run()
294        self.filelist = mm.filelist
295
296    def check_broken_egg_info(self):
297        bei = self.egg_name + '.egg-info'
298        if self.egg_base != os.curdir:
299            bei = os.path.join(self.egg_base, bei)
300        if os.path.exists(bei):
301            log.warn(
302                "-" * 78 + '\n'
303                "Note: Your current .egg-info directory has a '-' in its name;"
304                '\nthis will not work correctly with "setup.py develop".\n\n'
305                'Please rename %s to %s to correct this problem.\n' + '-' * 78,
306                bei, self.egg_info
307            )
308            self.broken_egg_info = self.egg_info
309            self.egg_info = bei  # make it work for now
310
311
312class FileList(_FileList):
313    # Implementations of the various MANIFEST.in commands
314
315    def process_template_line(self, line):
316        # Parse the line: split it up, make sure the right number of words
317        # is there, and return the relevant words.  'action' is always
318        # defined: it's the first word of the line.  Which of the other
319        # three are defined depends on the action; it'll be either
320        # patterns, (dir and patterns), or (dir_pattern).
321        (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
322
323        # OK, now we know that the action is valid and we have the
324        # right number of words on the line for that action -- so we
325        # can proceed with minimal error-checking.
326        if action == 'include':
327            self.debug_print("include " + ' '.join(patterns))
328            for pattern in patterns:
329                if not self.include(pattern):
330                    log.warn("warning: no files found matching '%s'", pattern)
331
332        elif action == 'exclude':
333            self.debug_print("exclude " + ' '.join(patterns))
334            for pattern in patterns:
335                if not self.exclude(pattern):
336                    log.warn(("warning: no previously-included files "
337                              "found matching '%s'"), pattern)
338
339        elif action == 'global-include':
340            self.debug_print("global-include " + ' '.join(patterns))
341            for pattern in patterns:
342                if not self.global_include(pattern):
343                    log.warn(("warning: no files found matching '%s' "
344                              "anywhere in distribution"), pattern)
345
346        elif action == 'global-exclude':
347            self.debug_print("global-exclude " + ' '.join(patterns))
348            for pattern in patterns:
349                if not self.global_exclude(pattern):
350                    log.warn(("warning: no previously-included files matching "
351                              "'%s' found anywhere in distribution"),
352                             pattern)
353
354        elif action == 'recursive-include':
355            self.debug_print("recursive-include %s %s" %
356                             (dir, ' '.join(patterns)))
357            for pattern in patterns:
358                if not self.recursive_include(dir, pattern):
359                    log.warn(("warning: no files found matching '%s' "
360                              "under directory '%s'"),
361                             pattern, dir)
362
363        elif action == 'recursive-exclude':
364            self.debug_print("recursive-exclude %s %s" %
365                             (dir, ' '.join(patterns)))
366            for pattern in patterns:
367                if not self.recursive_exclude(dir, pattern):
368                    log.warn(("warning: no previously-included files matching "
369                              "'%s' found under directory '%s'"),
370                             pattern, dir)
371
372        elif action == 'graft':
373            self.debug_print("graft " + dir_pattern)
374            if not self.graft(dir_pattern):
375                log.warn("warning: no directories found matching '%s'",
376                         dir_pattern)
377
378        elif action == 'prune':
379            self.debug_print("prune " + dir_pattern)
380            if not self.prune(dir_pattern):
381                log.warn(("no previously-included directories found "
382                          "matching '%s'"), dir_pattern)
383
384        else:
385            raise DistutilsInternalError(
386                "this cannot happen: invalid action '%s'" % action)
387
388    def _remove_files(self, predicate):
389        """
390        Remove all files from the file list that match the predicate.
391        Return True if any matching files were removed
392        """
393        found = False
394        for i in range(len(self.files) - 1, -1, -1):
395            if predicate(self.files[i]):
396                self.debug_print(" removing " + self.files[i])
397                del self.files[i]
398                found = True
399        return found
400
401    def include(self, pattern):
402        """Include files that match 'pattern'."""
403        found = [f for f in glob(pattern) if not os.path.isdir(f)]
404        self.extend(found)
405        return bool(found)
406
407    def exclude(self, pattern):
408        """Exclude files that match 'pattern'."""
409        match = translate_pattern(pattern)
410        return self._remove_files(match.match)
411
412    def recursive_include(self, dir, pattern):
413        """
414        Include all files anywhere in 'dir/' that match the pattern.
415        """
416        full_pattern = os.path.join(dir, '**', pattern)
417        found = [f for f in glob(full_pattern, recursive=True)
418                 if not os.path.isdir(f)]
419        self.extend(found)
420        return bool(found)
421
422    def recursive_exclude(self, dir, pattern):
423        """
424        Exclude any file anywhere in 'dir/' that match the pattern.
425        """
426        match = translate_pattern(os.path.join(dir, '**', pattern))
427        return self._remove_files(match.match)
428
429    def graft(self, dir):
430        """Include all files from 'dir/'."""
431        found = [
432            item
433            for match_dir in glob(dir)
434            for item in distutils.filelist.findall(match_dir)
435        ]
436        self.extend(found)
437        return bool(found)
438
439    def prune(self, dir):
440        """Filter out files from 'dir/'."""
441        match = translate_pattern(os.path.join(dir, '**'))
442        return self._remove_files(match.match)
443
444    def global_include(self, pattern):
445        """
446        Include all files anywhere in the current directory that match the
447        pattern. This is very inefficient on large file trees.
448        """
449        if self.allfiles is None:
450            self.findall()
451        match = translate_pattern(os.path.join('**', pattern))
452        found = [f for f in self.allfiles if match.match(f)]
453        self.extend(found)
454        return bool(found)
455
456    def global_exclude(self, pattern):
457        """
458        Exclude all files anywhere that match the pattern.
459        """
460        match = translate_pattern(os.path.join('**', pattern))
461        return self._remove_files(match.match)
462
463    def append(self, item):
464        if item.endswith('\r'):  # Fix older sdists built on Windows
465            item = item[:-1]
466        path = convert_path(item)
467
468        if self._safe_path(path):
469            self.files.append(path)
470
471    def extend(self, paths):
472        self.files.extend(filter(self._safe_path, paths))
473
474    def _repair(self):
475        """
476        Replace self.files with only safe paths
477
478        Because some owners of FileList manipulate the underlying
479        ``files`` attribute directly, this method must be called to
480        repair those paths.
481        """
482        self.files = list(filter(self._safe_path, self.files))
483
484    def _safe_path(self, path):
485        enc_warn = "'%s' not %s encodable -- skipping"
486
487        # To avoid accidental trans-codings errors, first to unicode
488        u_path = unicode_utils.filesys_decode(path)
489        if u_path is None:
490            log.warn("'%s' in unexpected encoding -- skipping" % path)
491            return False
492
493        # Must ensure utf-8 encodability
494        utf8_path = unicode_utils.try_encode(u_path, "utf-8")
495        if utf8_path is None:
496            log.warn(enc_warn, path, 'utf-8')
497            return False
498
499        try:
500            # accept is either way checks out
501            if os.path.exists(u_path) or os.path.exists(utf8_path):
502                return True
503        # this will catch any encode errors decoding u_path
504        except UnicodeEncodeError:
505            log.warn(enc_warn, path, sys.getfilesystemencoding())
506
507
508class manifest_maker(sdist):
509    template = "MANIFEST.in"
510
511    def initialize_options(self):
512        self.use_defaults = 1
513        self.prune = 1
514        self.manifest_only = 1
515        self.force_manifest = 1
516
517    def finalize_options(self):
518        pass
519
520    def run(self):
521        self.filelist = FileList()
522        if not os.path.exists(self.manifest):
523            self.write_manifest()  # it must exist so it'll get in the list
524        self.add_defaults()
525        if os.path.exists(self.template):
526            self.read_template()
527        self.prune_file_list()
528        self.filelist.sort()
529        self.filelist.remove_duplicates()
530        self.write_manifest()
531
532    def _manifest_normalize(self, path):
533        path = unicode_utils.filesys_decode(path)
534        return path.replace(os.sep, '/')
535
536    def write_manifest(self):
537        """
538        Write the file list in 'self.filelist' to the manifest file
539        named by 'self.manifest'.
540        """
541        self.filelist._repair()
542
543        # Now _repairs should encodability, but not unicode
544        files = [self._manifest_normalize(f) for f in self.filelist.files]
545        msg = "writing manifest file '%s'" % self.manifest
546        self.execute(write_file, (self.manifest, files), msg)
547
548    def warn(self, msg):
549        if not self._should_suppress_warning(msg):
550            sdist.warn(self, msg)
551
552    @staticmethod
553    def _should_suppress_warning(msg):
554        """
555        suppress missing-file warnings from sdist
556        """
557        return re.match(r"standard file .*not found", msg)
558
559    def add_defaults(self):
560        sdist.add_defaults(self)
561        self.filelist.append(self.template)
562        self.filelist.append(self.manifest)
563        rcfiles = list(walk_revctrl())
564        if rcfiles:
565            self.filelist.extend(rcfiles)
566        elif os.path.exists(self.manifest):
567            self.read_manifest()
568        ei_cmd = self.get_finalized_command('egg_info')
569        self.filelist.graft(ei_cmd.egg_info)
570
571    def prune_file_list(self):
572        build = self.get_finalized_command('build')
573        base_dir = self.distribution.get_fullname()
574        self.filelist.prune(build.build_base)
575        self.filelist.prune(base_dir)
576        sep = re.escape(os.sep)
577        self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
578                                      is_regex=1)
579
580
581def write_file(filename, contents):
582    """Create a file with the specified name and write 'contents' (a
583    sequence of strings without line terminators) to it.
584    """
585    contents = "\n".join(contents)
586
587    # assuming the contents has been vetted for utf-8 encoding
588    contents = contents.encode("utf-8")
589
590    with open(filename, "wb") as f:  # always write POSIX-style manifest
591        f.write(contents)
592
593
594def write_pkg_info(cmd, basename, filename):
595    log.info("writing %s", filename)
596    if not cmd.dry_run:
597        metadata = cmd.distribution.metadata
598        metadata.version, oldver = cmd.egg_version, metadata.version
599        metadata.name, oldname = cmd.egg_name, metadata.name
600
601        try:
602            # write unescaped data to PKG-INFO, so older pkg_resources
603            # can still parse it
604            metadata.write_pkg_info(cmd.egg_info)
605        finally:
606            metadata.name, metadata.version = oldname, oldver
607
608        safe = getattr(cmd.distribution, 'zip_safe', None)
609
610        bdist_egg.write_safety_flag(cmd.egg_info, safe)
611
612
613def warn_depends_obsolete(cmd, basename, filename):
614    if os.path.exists(filename):
615        log.warn(
616            "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
617            "Use the install_requires/extras_require setup() args instead."
618        )
619
620
621def _write_requirements(stream, reqs):
622    lines = yield_lines(reqs or ())
623    append_cr = lambda line: line + '\n'
624    lines = map(append_cr, lines)
625    stream.writelines(lines)
626
627
628def write_requirements(cmd, basename, filename):
629    dist = cmd.distribution
630    data = six.StringIO()
631    _write_requirements(data, dist.install_requires)
632    extras_require = dist.extras_require or {}
633    for extra in sorted(extras_require):
634        data.write('\n[{extra}]\n'.format(**vars()))
635        _write_requirements(data, extras_require[extra])
636    cmd.write_or_delete_file("requirements", filename, data.getvalue())
637
638
639def write_setup_requirements(cmd, basename, filename):
640    data = io.StringIO()
641    _write_requirements(data, cmd.distribution.setup_requires)
642    cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
643
644
645def write_toplevel_names(cmd, basename, filename):
646    pkgs = dict.fromkeys(
647        [
648            k.split('.', 1)[0]
649            for k in cmd.distribution.iter_distribution_names()
650        ]
651    )
652    cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
653
654
655def overwrite_arg(cmd, basename, filename):
656    write_arg(cmd, basename, filename, True)
657
658
659def write_arg(cmd, basename, filename, force=False):
660    argname = os.path.splitext(basename)[0]
661    value = getattr(cmd.distribution, argname, None)
662    if value is not None:
663        value = '\n'.join(value) + '\n'
664    cmd.write_or_delete_file(argname, filename, value, force)
665
666
667def write_entries(cmd, basename, filename):
668    ep = cmd.distribution.entry_points
669
670    if isinstance(ep, six.string_types) or ep is None:
671        data = ep
672    elif ep is not None:
673        data = []
674        for section, contents in sorted(ep.items()):
675            if not isinstance(contents, six.string_types):
676                contents = EntryPoint.parse_group(section, contents)
677                contents = '\n'.join(sorted(map(str, contents.values())))
678            data.append('[%s]\n%s\n\n' % (section, contents))
679        data = ''.join(data)
680
681    cmd.write_or_delete_file('entry points', filename, data, True)
682
683
684def get_pkg_info_revision():
685    """
686    Get a -r### off of PKG-INFO Version in case this is an sdist of
687    a subversion revision.
688    """
689    warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning)
690    if os.path.exists('PKG-INFO'):
691        with io.open('PKG-INFO') as f:
692            for line in f:
693                match = re.match(r"Version:.*-r(\d+)\s*$", line)
694                if match:
695                    return int(match.group(1))
696    return 0
697