summaryrefslogtreecommitdiff
path: root/Lib/setuptools
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/setuptools')
-rw-r--r--Lib/setuptools/__init__.py64
-rwxr-xr-xLib/setuptools/archive_util.py200
-rwxr-xr-xLib/setuptools/cli.exebin6144 -> 0 bytes
-rw-r--r--Lib/setuptools/command/__init__.py19
-rwxr-xr-xLib/setuptools/command/alias.py79
-rw-r--r--Lib/setuptools/command/bdist_egg.py449
-rwxr-xr-xLib/setuptools/command/bdist_rpm.py37
-rw-r--r--Lib/setuptools/command/build_ext.py285
-rw-r--r--Lib/setuptools/command/build_py.py192
-rwxr-xr-xLib/setuptools/command/develop.py116
-rwxr-xr-xLib/setuptools/command/easy_install.py1555
-rwxr-xr-xLib/setuptools/command/egg_info.py365
-rw-r--r--Lib/setuptools/command/install.py101
-rwxr-xr-xLib/setuptools/command/install_egg_info.py81
-rw-r--r--Lib/setuptools/command/install_lib.py76
-rwxr-xr-xLib/setuptools/command/install_scripts.py56
-rwxr-xr-xLib/setuptools/command/rotate.py57
-rwxr-xr-xLib/setuptools/command/saveopts.py24
-rwxr-xr-xLib/setuptools/command/sdist.py163
-rwxr-xr-xLib/setuptools/command/setopt.py158
-rw-r--r--Lib/setuptools/command/test.py119
-rwxr-xr-xLib/setuptools/command/upload.py178
-rw-r--r--Lib/setuptools/depends.py239
-rw-r--r--Lib/setuptools/dist.py798
-rw-r--r--Lib/setuptools/extension.py35
-rwxr-xr-xLib/setuptools/gui.exebin6144 -> 0 bytes
-rwxr-xr-xLib/setuptools/package_index.py674
-rwxr-xr-xLib/setuptools/sandbox.py203
-rwxr-xr-xLib/setuptools/site-patch.py74
-rw-r--r--Lib/setuptools/tests/__init__.py364
-rwxr-xr-xLib/setuptools/tests/api_tests.txt330
-rw-r--r--Lib/setuptools/tests/test_resources.py483
32 files changed, 0 insertions, 7574 deletions
diff --git a/Lib/setuptools/__init__.py b/Lib/setuptools/__init__.py
deleted file mode 100644
index 3921ce2ee9..0000000000
--- a/Lib/setuptools/__init__.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""Extensions to the 'distutils' for large or complex distributions"""
-from setuptools.extension import Extension, Library
-from setuptools.dist import Distribution, Feature, _get_unpatched
-import distutils.core, setuptools.command
-from setuptools.depends import Require
-from distutils.core import Command as _Command
-from distutils.util import convert_path
-import os.path
-
-__version__ = '0.7a1'
-__all__ = [
- 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
- 'find_packages'
-]
-
-bootstrap_install_from = None
-
-def find_packages(where='.', exclude=()):
- """Return a list all Python packages found within directory 'where'
-
- 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
- will be converted to the appropriate local path syntax. 'exclude' is a
- sequence of package names to exclude; '*' can be used as a wildcard in the
- names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
- 'foo' itself).
- """
- out = []
- stack=[(convert_path(where), '')]
- while stack:
- where,prefix = stack.pop(0)
- for name in os.listdir(where):
- fn = os.path.join(where,name)
- if (os.path.isdir(fn) and
- os.path.isfile(os.path.join(fn,'__init__.py'))
- ):
- out.append(prefix+name); stack.append((fn,prefix+name+'.'))
- for pat in exclude:
- from fnmatch import fnmatchcase
- out = [item for item in out if not fnmatchcase(item,pat)]
- return out
-
-setup = distutils.core.setup
-
-_Command = _get_unpatched(_Command)
-
-class Command(_Command):
- __doc__ = _Command.__doc__
-
- command_consumes_arguments = False
-
- def __init__(self, dist, **kw):
- # Add support for keyword arguments
- _Command.__init__(self,dist)
- for k,v in kw.items():
- setattr(self,k,v)
-
- def reinitialize_command(self, command, reinit_subcommands=0, **kw):
- cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
- for k,v in kw.items():
- setattr(cmd,k,v) # update command with keywords
- return cmd
-
-import distutils.core
-distutils.core.Command = Command # we can't patch distutils.cmd, alas
diff --git a/Lib/setuptools/archive_util.py b/Lib/setuptools/archive_util.py
deleted file mode 100755
index dd9c684d14..0000000000
--- a/Lib/setuptools/archive_util.py
+++ /dev/null
@@ -1,200 +0,0 @@
-"""Utilities for extracting common archive formats"""
-
-
-__all__ = [
- "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
- "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
-]
-
-import zipfile, tarfile, os, shutil
-from pkg_resources import ensure_directory
-from distutils.errors import DistutilsError
-
-class UnrecognizedFormat(DistutilsError):
- """Couldn't recognize the archive type"""
-
-def default_filter(src,dst):
- """The default progress/filter callback; returns True for all files"""
- return dst
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def unpack_archive(filename, extract_dir, progress_filter=default_filter,
- drivers=None
-):
- """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
-
- `progress_filter` is a function taking two arguments: a source path
- internal to the archive ('/'-separated), and a filesystem path where it
- will be extracted. The callback must return the desired extract path
- (which may be the same as the one passed in), or else ``None`` to skip
- that file or directory. The callback can thus be used to report on the
- progress of the extraction, as well as to filter the items extracted or
- alter their extraction paths.
-
- `drivers`, if supplied, must be a non-empty sequence of functions with the
- same signature as this function (minus the `drivers` argument), that raise
- ``UnrecognizedFormat`` if they do not support extracting the designated
- archive type. The `drivers` are tried in sequence until one is found that
- does not raise an error, or until all are exhausted (in which case
- ``UnrecognizedFormat`` is raised). If you do not supply a sequence of
- drivers, the module's ``extraction_drivers`` constant will be used, which
- means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
- order.
- """
- for driver in drivers or extraction_drivers:
- try:
- driver(filename, extract_dir, progress_filter)
- except UnrecognizedFormat:
- continue
- else:
- return
- else:
- raise UnrecognizedFormat(
- "Not a recognized archive type: %s" % filename
- )
-
-
-
-
-
-
-
-def unpack_directory(filename, extract_dir, progress_filter=default_filter):
- """"Unpack" a directory, using the same interface as for archives
-
- Raises ``UnrecognizedFormat`` if `filename` is not a directory
- """
- if not os.path.isdir(filename):
- raise UnrecognizedFormat("%s is not a directory" % (filename,))
-
- paths = {filename:('',extract_dir)}
- for base, dirs, files in os.walk(filename):
- src,dst = paths[base]
- for d in dirs:
- paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
- for f in files:
- name = src+f
- target = os.path.join(dst,f)
- target = progress_filter(src+f, target)
- if not target:
- continue # skip non-files
- ensure_directory(target)
- f = os.path.join(base,f)
- shutil.copyfile(f, target)
- shutil.copystat(f, target)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
- """Unpack zip `filename` to `extract_dir`
-
- Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
- by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
- of the `progress_filter` argument.
- """
-
- if not zipfile.is_zipfile(filename):
- raise UnrecognizedFormat("%s is not a zip file" % (filename,))
-
- z = zipfile.ZipFile(filename)
- try:
- for info in z.infolist():
- name = info.filename
-
- # don't extract absolute paths or ones with .. in them
- if name.startswith('/') or '..' in name:
- continue
-
- target = os.path.join(extract_dir, *name.split('/'))
- target = progress_filter(name, target)
- if not target:
- continue
- if name.endswith('/'):
- # directory
- ensure_directory(target)
- else:
- # file
- ensure_directory(target)
- data = z.read(info.filename)
- f = open(target,'wb')
- try:
- f.write(data)
- finally:
- f.close()
- del data
- finally:
- z.close()
-
-
-def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
- """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
-
- Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
- by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
- of the `progress_filter` argument.
- """
-
- try:
- tarobj = tarfile.open(filename)
- except tarfile.TarError:
- raise UnrecognizedFormat(
- "%s is not a compressed or uncompressed tar file" % (filename,)
- )
-
- try:
- tarobj.chown = lambda *args: None # don't do any chowning!
- for member in tarobj:
- if member.isfile() or member.isdir():
- name = member.name
- # don't extract absolute paths or ones with .. in them
- if not name.startswith('/') and '..' not in name:
- dst = os.path.join(extract_dir, *name.split('/'))
- dst = progress_filter(name, dst)
- if dst:
- if dst.endswith(os.sep):
- dst = dst[:-1]
- tarobj._extract_member(member,dst) # XXX Ugh
- return True
- finally:
- tarobj.close()
-
-
-
-
-extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
diff --git a/Lib/setuptools/cli.exe b/Lib/setuptools/cli.exe
deleted file mode 100755
index fc8333965e..0000000000
--- a/Lib/setuptools/cli.exe
+++ /dev/null
Binary files differ
diff --git a/Lib/setuptools/command/__init__.py b/Lib/setuptools/command/__init__.py
deleted file mode 100644
index bff53e744d..0000000000
--- a/Lib/setuptools/command/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-__all__ = [
- 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
- 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
- 'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts',
-]
-
-import sys
-if sys.version>='2.5':
- # In Python 2.5 and above, distutils includes its own upload command
- __all__.remove('upload')
-
-
-from distutils.command.bdist import bdist
-
-if 'egg' not in bdist.format_commands:
- bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
- bdist.format_commands.append('egg')
-
-del bdist, sys
diff --git a/Lib/setuptools/command/alias.py b/Lib/setuptools/command/alias.py
deleted file mode 100755
index 1df474a34b..0000000000
--- a/Lib/setuptools/command/alias.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-from setuptools.command.setopt import edit_config, option_base, config_file
-
-def shquote(arg):
- """Quote an argument for later parsing by shlex.split()"""
- for c in '"', "'", "\\", "#":
- if c in arg: return repr(arg)
- if arg.split()<>[arg]:
- return repr(arg)
- return arg
-
-
-class alias(option_base):
- """Define a shortcut that invokes one or more commands"""
-
- description = "define a shortcut to invoke one or more commands"
- command_consumes_arguments = True
-
- user_options = [
- ('remove', 'r', 'remove (unset) the alias'),
- ] + option_base.user_options
-
- boolean_options = option_base.boolean_options + ['remove']
-
- def initialize_options(self):
- option_base.initialize_options(self)
- self.args = None
- self.remove = None
-
- def finalize_options(self):
- option_base.finalize_options(self)
- if self.remove and len(self.args)<>1:
- raise DistutilsOptionError(
- "Must specify exactly one argument (the alias name) when "
- "using --remove"
- )
-
- def run(self):
- aliases = self.distribution.get_option_dict('aliases')
-
- if not self.args:
- print "Command Aliases"
- print "---------------"
- for alias in aliases:
- print "setup.py alias", format_alias(alias, aliases)
- return
-
- elif len(self.args)==1:
- alias, = self.args
- if self.remove:
- command = None
- elif alias in aliases:
- print "setup.py alias", format_alias(alias, aliases)
- return
- else:
- print "No alias definition found for %r" % alias
- return
- else:
- alias = self.args[0]
- command = ' '.join(map(shquote,self.args[1:]))
-
- edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
-
-
-def format_alias(name, aliases):
- source, command = aliases[name]
- if source == config_file('global'):
- source = '--global-config '
- elif source == config_file('user'):
- source = '--user-config '
- elif source == config_file('local'):
- source = ''
- else:
- source = '--filename=%r' % source
- return source+name+' '+command
diff --git a/Lib/setuptools/command/bdist_egg.py b/Lib/setuptools/command/bdist_egg.py
deleted file mode 100644
index 617d88d139..0000000000
--- a/Lib/setuptools/command/bdist_egg.py
+++ /dev/null
@@ -1,449 +0,0 @@
-"""setuptools.command.bdist_egg
-
-Build .egg distributions"""
-
-# This module should be kept compatible with Python 2.3
-import sys, os, marshal
-from setuptools import Command
-from distutils.dir_util import remove_tree, mkpath
-from distutils.sysconfig import get_python_version, get_python_lib
-from distutils import log
-from pkg_resources import get_build_platform, Distribution
-from types import CodeType
-from setuptools.extension import Library
-
-def write_stub(resource, pyfile):
- f = open(pyfile,'w')
- f.write('\n'.join([
- "def __bootstrap__():",
- " global __bootstrap__, __loader__, __file__",
- " import sys, pkg_resources, imp",
- " __file__ = pkg_resources.resource_filename(__name__,%r)"
- % resource,
- " del __bootstrap__, __loader__",
- " imp.load_dynamic(__name__,__file__)",
- "__bootstrap__()",
- "" # terminal \n
- ]))
- f.close()
-
-# stub __init__.py for packages distributed without one
-NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
-
-
-
-
-
-
-
-
-
-
-class bdist_egg(Command):
-
- description = "create an \"egg\" distribution"
-
- user_options = [
- ('bdist-dir=', 'b',
- "temporary directory for creating the distribution"),
- ('plat-name=', 'p',
- "platform name to embed in generated filenames "
- "(default: %s)" % get_build_platform()),
- ('exclude-source-files', None,
- "remove all .py files from the generated egg"),
- ('keep-temp', 'k',
- "keep the pseudo-installation tree around after " +
- "creating the distribution archive"),
- ('dist-dir=', 'd',
- "directory to put final built distributions in"),
- ('skip-build', None,
- "skip rebuilding everything (for testing/debugging)"),
- ]
-
- boolean_options = [
- 'keep-temp', 'skip-build', 'exclude-source-files'
- ]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def initialize_options (self):
- self.bdist_dir = None
- self.plat_name = None
- self.keep_temp = 0
- self.dist_dir = None
- self.skip_build = 0
- self.egg_output = None
- self.exclude_source_files = None
-
-
- def finalize_options(self):
- ei_cmd = self.get_finalized_command("egg_info")
- self.egg_info = ei_cmd.egg_info
-
- if self.bdist_dir is None:
- bdist_base = self.get_finalized_command('bdist').bdist_base
- self.bdist_dir = os.path.join(bdist_base, 'egg')
-
- if self.plat_name is None:
- self.plat_name = get_build_platform()
-
- self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
-
- if self.egg_output is None:
-
- # Compute filename of the output egg
- basename = Distribution(
- None, None, ei_cmd.egg_name, ei_cmd.egg_version,
- get_python_version(),
- self.distribution.has_ext_modules() and self.plat_name
- ).egg_name()
-
- self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
-
-
-
-
-
-
-
-
- def do_install_data(self):
- # Hack for packages that install data to install's --install-lib
- self.get_finalized_command('install').install_lib = self.bdist_dir
-
- site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
- old, self.distribution.data_files = self.distribution.data_files,[]
-
- for item in old:
- if isinstance(item,tuple) and len(item)==2:
- if os.path.isabs(item[0]):
- realpath = os.path.realpath(item[0])
- normalized = os.path.normcase(realpath)
- if normalized==site_packages or normalized.startswith(
- site_packages+os.sep
- ):
- item = realpath[len(site_packages)+1:], item[1]
- # XXX else: raise ???
- self.distribution.data_files.append(item)
-
- try:
- log.info("installing package data to %s" % self.bdist_dir)
- self.call_command('install_data', force=0, root=None)
- finally:
- self.distribution.data_files = old
-
-
- def get_outputs(self):
- return [self.egg_output]
-
-
- def call_command(self,cmdname,**kw):
- """Invoke reinitialized command `cmdname` with keyword args"""
- for dirname in INSTALL_DIRECTORY_ATTRS:
- kw.setdefault(dirname,self.bdist_dir)
- kw.setdefault('skip_build',self.skip_build)
- kw.setdefault('dry_run', self.dry_run)
- cmd = self.reinitialize_command(cmdname, **kw)
- self.run_command(cmdname)
- return cmd
-
-
- def run(self):
- # Generate metadata first
- self.run_command("egg_info")
-
- # We run install_lib before install_data, because some data hacks
- # pull their data path from the install_lib command.
- log.info("installing library code to %s" % self.bdist_dir)
- instcmd = self.get_finalized_command('install')
- old_root = instcmd.root; instcmd.root = None
- cmd = self.call_command('install_lib', warn_dir=0)
- instcmd.root = old_root
-
- all_outputs, ext_outputs = self.get_ext_outputs()
- self.stubs = []
- to_compile = []
- for (p,ext_name) in enumerate(ext_outputs):
- filename,ext = os.path.splitext(ext_name)
- pyfile = os.path.join(self.bdist_dir, filename + '.py')
- self.stubs.append(pyfile)
- log.info("creating stub loader for %s" % ext_name)
- if not self.dry_run:
- write_stub(os.path.basename(ext_name), pyfile)
- to_compile.append(pyfile)
- ext_outputs[p] = ext_name.replace(os.sep,'/')
-
- to_compile.extend(self.make_init_files())
- if to_compile:
- cmd.byte_compile(to_compile)
-
- if self.distribution.data_files:
- self.do_install_data()
-
- # Make the EGG-INFO directory
- archive_root = self.bdist_dir
- egg_info = os.path.join(archive_root,'EGG-INFO')
- self.mkpath(egg_info)
- if self.distribution.scripts:
- script_dir = os.path.join(egg_info, 'scripts')
- log.info("installing scripts to %s" % script_dir)
- self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
-
- native_libs = os.path.join(self.egg_info,"native_libs.txt")
- if all_outputs:
- log.info("writing %s" % native_libs)
- if not self.dry_run:
- libs_file = open(native_libs, 'wt')
- libs_file.write('\n'.join(all_outputs))
- libs_file.write('\n')
- libs_file.close()
- elif os.path.isfile(native_libs):
- log.info("removing %s" % native_libs)
- if not self.dry_run:
- os.unlink(native_libs)
-
- for filename in os.listdir(self.egg_info):
- path = os.path.join(self.egg_info,filename)
- if os.path.isfile(path):
- self.copy_file(path,os.path.join(egg_info,filename))
-
- write_safety_flag(
- os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
- )
-
- if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
- log.warn(
- "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
- "Use the install_requires/extras_require setup() args instead."
- )
-
- if self.exclude_source_files:
- self.zap_pyfiles()
-
- # Make the archive
- make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
- dry_run=self.dry_run)
- if not self.keep_temp:
- remove_tree(self.bdist_dir, dry_run=self.dry_run)
-
- # Add to 'Distribution.dist_files' so that the "upload" command works
- getattr(self.distribution,'dist_files',[]).append(
- ('bdist_egg',get_python_version(),self.egg_output))
-
- def zap_pyfiles(self):
- log.info("Removing .py files from temporary directory")
- for base,dirs,files in walk_egg(self.bdist_dir):
- for name in files:
- if name.endswith('.py'):
- path = os.path.join(base,name)
- log.debug("Deleting %s", path)
- os.unlink(path)
-
- def zip_safe(self):
- safe = getattr(self.distribution,'zip_safe',None)
- if safe is not None:
- return safe
- log.warn("zip_safe flag not set; analyzing archive contents...")
- return analyze_egg(self.bdist_dir, self.stubs)
-
- def make_init_files(self):
- """Create missing package __init__ files"""
- init_files = []
- for base,dirs,files in walk_egg(self.bdist_dir):
- if base==self.bdist_dir:
- # don't put an __init__ in the root
- continue
- for name in files:
- if name.endswith('.py'):
- if '__init__.py' not in files:
- pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
- if self.distribution.has_contents_for(pkg):
- log.warn("Creating missing __init__.py for %s",pkg)
- filename = os.path.join(base,'__init__.py')
- if not self.dry_run:
- f = open(filename,'w'); f.write(NS_PKG_STUB)
- f.close()
- init_files.append(filename)
- break
- else:
- # not a package, don't traverse to subdirectories
- dirs[:] = []
-
- return init_files
-
- def get_ext_outputs(self):
- """Get a list of relative paths to C extensions in the output distro"""
-
- all_outputs = []
- ext_outputs = []
-
- paths = {self.bdist_dir:''}
- for base, dirs, files in os.walk(self.bdist_dir):
- for filename in files:
- if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
- all_outputs.append(paths[base]+filename)
- for filename in dirs:
- paths[os.path.join(base,filename)] = paths[base]+filename+'/'
-
- if self.distribution.has_ext_modules():
- build_cmd = self.get_finalized_command('build_ext')
- for ext in build_cmd.extensions:
- if isinstance(ext,Library):
- continue
- fullname = build_cmd.get_ext_fullname(ext.name)
- filename = build_cmd.get_ext_filename(fullname)
- if not os.path.basename(filename).startswith('dl-'):
- if os.path.exists(os.path.join(self.bdist_dir,filename)):
- ext_outputs.append(filename)
-
- return all_outputs, ext_outputs
-
-
-NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
-
-
-
-
-
-
-
-
-
-
-
-
-def walk_egg(egg_dir):
- """Walk an unpacked egg's contents, skipping the metadata directory"""
- walker = os.walk(egg_dir)
- base,dirs,files = walker.next()
- if 'EGG-INFO' in dirs:
- dirs.remove('EGG-INFO')
- yield base,dirs,files
- for bdf in walker:
- yield bdf
-
-def analyze_egg(egg_dir, stubs):
- # check for existing flag in EGG-INFO
- for flag,fn in safety_flags.items():
- if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
- return flag
-
- safe = True
- for base, dirs, files in walk_egg(egg_dir):
- for name in files:
- if name.endswith('.py') or name.endswith('.pyw'):
- continue
- elif name.endswith('.pyc') or name.endswith('.pyo'):
- # always scan, even if we already know we're not safe
- safe = scan_module(egg_dir, base, name, stubs) and safe
- return safe
-
-def write_safety_flag(egg_dir, safe):
- # Write or remove zip safety flag file(s)
- for flag,fn in safety_flags.items():
- fn = os.path.join(egg_dir, fn)
- if os.path.exists(fn):
- if safe is None or bool(safe)<>flag:
- os.unlink(fn)
- elif safe is not None and bool(safe)==flag:
- open(fn,'w').close()
-
-safety_flags = {
- True: 'zip-safe',
- False: 'not-zip-safe',
-}
-
-def scan_module(egg_dir, base, name, stubs):
- """Check whether module possibly uses unsafe-for-zipfile stuff"""
-
- filename = os.path.join(base,name)
- if filename[:-1] in stubs:
- return True # Extension module
- pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
- module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
- f = open(filename,'rb'); f.read(8) # skip magic & date
- code = marshal.load(f); f.close()
- safe = True
- symbols = dict.fromkeys(iter_symbols(code))
- for bad in ['__file__', '__path__']:
- if bad in symbols:
- log.warn("%s: module references %s", module, bad)
- safe = False
- if 'inspect' in symbols:
- for bad in [
- 'getsource', 'getabsfile', 'getsourcefile', 'getfile'
- 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
- 'getinnerframes', 'getouterframes', 'stack', 'trace'
- ]:
- if bad in symbols:
- log.warn("%s: module MAY be using inspect.%s", module, bad)
- safe = False
- if '__name__' in symbols and '__main__' in symbols and '.' not in module:
- if get_python_version()>="2.4":
- log.warn("%s: top-level module may be 'python -m' script", module)
- safe = False
- return safe
-
-def iter_symbols(code):
- """Yield names and strings used by `code` and its nested code objects"""
- for name in code.co_names: yield name
- for const in code.co_consts:
- if isinstance(const,basestring):
- yield const
- elif isinstance(const,CodeType):
- for name in iter_symbols(const):
- yield name
-
-# Attribute names of options for commands that might need to be convinced to
-# install to the egg build directory
-
-INSTALL_DIRECTORY_ATTRS = [
- 'install_lib', 'install_dir', 'install_data', 'install_base'
-]
-
-def make_zipfile (zip_filename, base_dir, verbose=0, dry_run=0, compress=None):
- """Create a zip file from all the files under 'base_dir'. The output
- zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
- Python module (if available) or the InfoZIP "zip" utility (if installed
- and found on the default search path). If neither tool is available,
- raises DistutilsExecError. Returns the name of the output zip file.
- """
- import zipfile
- mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
- log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
-
- def visit (z, dirname, names):
- for name in names:
- path = os.path.normpath(os.path.join(dirname, name))
- if os.path.isfile(path):
- p = path[len(base_dir)+1:]
- if not dry_run:
- z.write(path, p)
- log.debug("adding '%s'" % p)
-
- if compress is None:
- compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
-
- compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
- if not dry_run:
- z = zipfile.ZipFile(zip_filename, "w", compression=compression)
- os.path.walk(base_dir, visit, z)
- z.close()
- else:
- os.path.walk(base_dir, visit, None)
-
- return zip_filename
diff --git a/Lib/setuptools/command/bdist_rpm.py b/Lib/setuptools/command/bdist_rpm.py
deleted file mode 100755
index 00e07acc37..0000000000
--- a/Lib/setuptools/command/bdist_rpm.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# This is just a kludge so that bdist_rpm doesn't guess wrong about the
-# distribution name and version, if the egg_info command is going to alter
-# them, and another kludge to allow you to build old-style non-egg RPMs
-
-from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
-
-class bdist_rpm(_bdist_rpm):
-
- def initialize_options(self):
- _bdist_rpm.initialize_options(self)
- self.no_egg = None
-
- def run(self):
- self.run_command('egg_info') # ensure distro name is up-to-date
- _bdist_rpm.run(self)
-
- def _make_spec_file(self):
- version = self.distribution.get_version()
- rpmversion = version.replace('-','_')
- spec = _bdist_rpm._make_spec_file(self)
- line23 = '%define version '+version
- line24 = '%define version '+rpmversion
- spec = [
- line.replace(
- "Source0: %{name}-%{version}.tar",
- "Source0: %{name}-%{unmangled_version}.tar"
- ).replace(
- "setup.py install ",
- "setup.py install --single-version-externally-managed "
- ).replace(
- "%setup",
- "%setup -n %{name}-%{unmangled_version}"
- ).replace(line23,line24)
- for line in spec
- ]
- spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
- return spec
diff --git a/Lib/setuptools/command/build_ext.py b/Lib/setuptools/command/build_ext.py
deleted file mode 100644
index f8551fb49f..0000000000
--- a/Lib/setuptools/command/build_ext.py
+++ /dev/null
@@ -1,285 +0,0 @@
-from distutils.command.build_ext import build_ext as _du_build_ext
-try:
- # Attempt to use Pyrex for building extensions, if available
- from Pyrex.Distutils.build_ext import build_ext as _build_ext
-except ImportError:
- _build_ext = _du_build_ext
-
-import os, sys
-from distutils.file_util import copy_file
-from setuptools.extension import Library
-from distutils.ccompiler import new_compiler
-from distutils.sysconfig import customize_compiler, get_config_var
-get_config_var("LDSHARED") # make sure _config_vars is initialized
-from distutils.sysconfig import _config_vars
-from distutils import log
-from distutils.errors import *
-
-have_rtld = False
-use_stubs = False
-libtype = 'shared'
-
-if sys.platform == "darwin":
- use_stubs = True
-elif os.name != 'nt':
- try:
- from dl import RTLD_NOW
- have_rtld = True
- use_stubs = True
- except ImportError:
- pass
-
-def if_dl(s):
- if have_rtld:
- return s
- return ''
-
-
-
-
-
-
-class build_ext(_build_ext):
- def run(self):
- """Build extensions in build directory, then copy if --inplace"""
- old_inplace, self.inplace = self.inplace, 0
- _build_ext.run(self)
- self.inplace = old_inplace
- if old_inplace:
- self.copy_extensions_to_source()
-
- def copy_extensions_to_source(self):
- build_py = self.get_finalized_command('build_py')
- for ext in self.extensions:
- fullname = self.get_ext_fullname(ext.name)
- filename = self.get_ext_filename(fullname)
- modpath = fullname.split('.')
- package = '.'.join(modpath[:-1])
- package_dir = build_py.get_package_dir(package)
- dest_filename = os.path.join(package_dir,os.path.basename(filename))
- src_filename = os.path.join(self.build_lib,filename)
-
- # Always copy, even if source is older than destination, to ensure
- # that the right extensions for the current Python/platform are
- # used.
- copy_file(
- src_filename, dest_filename, verbose=self.verbose,
- dry_run=self.dry_run
- )
- if ext._needs_stub:
- self.write_stub(package_dir or os.curdir, ext, True)
-
-
- if _build_ext is not _du_build_ext:
- # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
- def swig_sources(self, sources, *otherargs):
- # first do any Pyrex processing
- sources = _build_ext.swig_sources(self, sources) or sources
- # Then do any actual SWIG stuff on the remainder
- return _du_build_ext.swig_sources(self, sources, *otherargs)
-
-
-
- def get_ext_filename(self, fullname):
- filename = _build_ext.get_ext_filename(self,fullname)
- ext = self.ext_map[fullname]
- if isinstance(ext,Library):
- fn, ext = os.path.splitext(filename)
- return self.shlib_compiler.library_filename(fn,libtype)
- elif use_stubs and ext._links_to_dynamic:
- d,fn = os.path.split(filename)
- return os.path.join(d,'dl-'+fn)
- else:
- return filename
-
- def initialize_options(self):
- _build_ext.initialize_options(self)
- self.shlib_compiler = None
- self.shlibs = []
- self.ext_map = {}
-
- def finalize_options(self):
- _build_ext.finalize_options(self)
- self.extensions = self.extensions or []
- self.check_extensions_list(self.extensions)
- self.shlibs = [ext for ext in self.extensions
- if isinstance(ext,Library)]
- if self.shlibs:
- self.setup_shlib_compiler()
- for ext in self.extensions:
- fullname = ext._full_name = self.get_ext_fullname(ext.name)
- self.ext_map[fullname] = ext
- ltd = ext._links_to_dynamic = \
- self.shlibs and self.links_to_dynamic(ext) or False
- ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
- filename = ext._file_name = self.get_ext_filename(fullname)
- libdir = os.path.dirname(os.path.join(self.build_lib,filename))
- if ltd and libdir not in ext.library_dirs:
- ext.library_dirs.append(libdir)
- if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
- ext.runtime_library_dirs.append(os.curdir)
-
-
-
- def setup_shlib_compiler(self):
- compiler = self.shlib_compiler = new_compiler(
- compiler=self.compiler, dry_run=self.dry_run, force=self.force
- )
- if sys.platform == "darwin":
- tmp = _config_vars.copy()
- try:
- # XXX Help! I don't have any idea whether these are right...
- _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
- _config_vars['CCSHARED'] = " -dynamiclib"
- _config_vars['SO'] = ".dylib"
- customize_compiler(compiler)
- finally:
- _config_vars.clear()
- _config_vars.update(tmp)
- else:
- customize_compiler(compiler)
-
- if self.include_dirs is not None:
- compiler.set_include_dirs(self.include_dirs)
- if self.define is not None:
- # 'define' option is a list of (name,value) tuples
- for (name,value) in self.define:
- compiler.define_macro(name, value)
- if self.undef is not None:
- for macro in self.undef:
- compiler.undefine_macro(macro)
- if self.libraries is not None:
- compiler.set_libraries(self.libraries)
- if self.library_dirs is not None:
- compiler.set_library_dirs(self.library_dirs)
- if self.rpath is not None:
- compiler.set_runtime_library_dirs(self.rpath)
- if self.link_objects is not None:
- compiler.set_link_objects(self.link_objects)
-
- # hack so distutils' build_extension() builds a library instead
- compiler.link_shared_object = link_shared_object.__get__(compiler)
-
-
-
- def get_export_symbols(self, ext):
- if isinstance(ext,Library):
- return ext.export_symbols
- return _build_ext.get_export_symbols(self,ext)
-
- def build_extension(self, ext):
- _compiler = self.compiler
- try:
- if isinstance(ext,Library):
- self.compiler = self.shlib_compiler
- _build_ext.build_extension(self,ext)
- if ext._needs_stub:
- self.write_stub(
- self.get_finalized_command('build_py').build_lib, ext
- )
- finally:
- self.compiler = _compiler
-
- def links_to_dynamic(self, ext):
- """Return true if 'ext' links to a dynamic lib in the same package"""
- # XXX this should check to ensure the lib is actually being built
- # XXX as dynamic, and not just using a locally-found version or a
- # XXX static-compiled version
- libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
- pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
- for libname in ext.libraries:
- if pkg+libname in libnames: return True
- return False
-
- def get_outputs(self):
- outputs = _build_ext.get_outputs(self)
- optimize = self.get_finalized_command('build_py').optimize
- for ext in self.extensions:
- if ext._needs_stub:
- base = os.path.join(self.build_lib, *ext._full_name.split('.'))
- outputs.append(base+'.py')
- outputs.append(base+'.pyc')
- if optimize:
- outputs.append(base+'.pyo')
- return outputs
-
- def write_stub(self, output_dir, ext, compile=False):
- log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
- stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
- if compile and os.path.exists(stub_file):
- raise DistutilsError(stub_file+" already exists! Please delete.")
- if not self.dry_run:
- f = open(stub_file,'w')
- f.write('\n'.join([
- "def __bootstrap__():",
- " global __bootstrap__, __file__, __loader__",
- " import sys, os, pkg_resources, imp"+if_dl(", dl"),
- " __file__ = pkg_resources.resource_filename(__name__,%r)"
- % os.path.basename(ext._file_name),
- " del __bootstrap__",
- " if '__loader__' in globals():",
- " del __loader__",
- if_dl(" old_flags = sys.getdlopenflags()"),
- " old_dir = os.getcwd()",
- " try:",
- " os.chdir(os.path.dirname(__file__))",
- if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
- " imp.load_dynamic(__name__,__file__)",
- " finally:",
- if_dl(" sys.setdlopenflags(old_flags)"),
- " os.chdir(old_dir)",
- "__bootstrap__()",
- "" # terminal \n
- ]))
- f.close()
- if compile:
- from distutils.util import byte_compile
- byte_compile([stub_file], optimize=0,
- force=True, dry_run=self.dry_run)
- optimize = self.get_finalized_command('install_lib').optimize
- if optimize > 0:
- byte_compile([stub_file], optimize=optimize,
- force=True, dry_run=self.dry_run)
- if os.path.exists(stub_file) and not self.dry_run:
- os.unlink(stub_file)
-
-
-if use_stubs or os.name=='nt':
- # Build shared libraries
- #
- def link_shared_object(self, objects, output_libname, output_dir=None,
- libraries=None, library_dirs=None, runtime_library_dirs=None,
- export_symbols=None, debug=0, extra_preargs=None,
- extra_postargs=None, build_temp=None, target_lang=None
- ): self.link(
- self.SHARED_LIBRARY, objects, output_libname,
- output_dir, libraries, library_dirs, runtime_library_dirs,
- export_symbols, debug, extra_preargs, extra_postargs,
- build_temp, target_lang
- )
-else:
- # Build static libraries everywhere else
- libtype = 'static'
-
- def link_shared_object(self, objects, output_libname, output_dir=None,
- libraries=None, library_dirs=None, runtime_library_dirs=None,
- export_symbols=None, debug=0, extra_preargs=None,
- extra_postargs=None, build_temp=None, target_lang=None
- ):
- # XXX we need to either disallow these attrs on Library instances,
- # or warn/abort here if set, or something...
- #libraries=None, library_dirs=None, runtime_library_dirs=None,
- #export_symbols=None, extra_preargs=None, extra_postargs=None,
- #build_temp=None
-
- assert output_dir is None # distutils build_ext doesn't pass this
- output_dir,filename = os.path.split(output_libname)
- basename, ext = os.path.splitext(filename)
- if self.library_filename("x").startswith('lib'):
- # strip 'lib' prefix; this is kludgy if some platform uses
- # a different prefix
- basename = basename[3:]
-
- self.create_static_lib(
- objects, basename, output_dir, debug, target_lang
- )
diff --git a/Lib/setuptools/command/build_py.py b/Lib/setuptools/command/build_py.py
deleted file mode 100644
index 77a9b236c2..0000000000
--- a/Lib/setuptools/command/build_py.py
+++ /dev/null
@@ -1,192 +0,0 @@
-import os.path, sys, fnmatch
-from distutils.command.build_py import build_py as _build_py
-from distutils.util import convert_path
-from glob import glob
-
-class build_py(_build_py):
- """Enhanced 'build_py' command that includes data files with packages
-
- The data files are specified via a 'package_data' argument to 'setup()'.
- See 'setuptools.dist.Distribution' for more details.
-
- Also, this version of the 'build_py' command allows you to specify both
- 'py_modules' and 'packages' in the same setup operation.
- """
- def finalize_options(self):
- _build_py.finalize_options(self)
- self.package_data = self.distribution.package_data
- self.exclude_package_data = self.distribution.exclude_package_data or {}
- if 'data_files' in self.__dict__: del self.__dict__['data_files']
-
- def run(self):
- """Build modules, packages, and copy data files to build directory"""
- if not self.py_modules and not self.packages:
- return
-
- if self.py_modules:
- self.build_modules()
-
- if self.packages:
- self.build_packages()
- self.build_package_data()
-
- # Only compile actual .py files, using our base class' idea of what our
- # output files are.
- self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
-
- def __getattr__(self,attr):
- if attr=='data_files': # lazily compute data files
- self.data_files = files = self._get_data_files(); return files
- return _build_py.__getattr__(self,attr)
-
- def _get_data_files(self):
- """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
- self.analyze_manifest()
- data = []
- for package in self.packages or ():
- # Locate package source directory
- src_dir = self.get_package_dir(package)
-
- # Compute package build directory
- build_dir = os.path.join(*([self.build_lib] + package.split('.')))
-
- # Length of path to strip from found files
- plen = len(src_dir)+1
-
- # Strip directory from globbed filenames
- filenames = [
- file[plen:] for file in self.find_data_files(package, src_dir)
- ]
- data.append( (package, src_dir, build_dir, filenames) )
- return data
-
- def find_data_files(self, package, src_dir):
- """Return filenames for package's data files in 'src_dir'"""
- globs = (self.package_data.get('', [])
- + self.package_data.get(package, []))
- files = self.manifest_files.get(package, [])[:]
- for pattern in globs:
- # Each pattern has to be converted to a platform-specific path
- files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
- return self.exclude_data_files(package, src_dir, files)
-
- def build_package_data(self):
- """Copy data files into build directory"""
- lastdir = None
- for package, src_dir, build_dir, filenames in self.data_files:
- for filename in filenames:
- target = os.path.join(build_dir, filename)
- self.mkpath(os.path.dirname(target))
- self.copy_file(os.path.join(src_dir, filename), target)
-
-
- def analyze_manifest(self):
- self.manifest_files = mf = {}
- if not self.distribution.include_package_data:
- return
- src_dirs = {}
- for package in self.packages or ():
- # Locate package source directory
- src_dirs[assert_relative(self.get_package_dir(package))] = package
-
- self.run_command('egg_info')
- ei_cmd = self.get_finalized_command('egg_info')
- for path in ei_cmd.filelist.files:
- if path.endswith('.py'):
- continue
- d,f = os.path.split(assert_relative(path))
- prev = None
- while d and d!=prev and d not in src_dirs:
- prev = d
- d, df = os.path.split(d)
- f = os.path.join(df, f)
- if d in src_dirs:
- mf.setdefault(src_dirs[d],[]).append(path)
-
-
- def get_data_files(self): pass # kludge 2.4 for lazy computation
-
- if sys.version<"2.4": # Python 2.4 already has this code
- def get_outputs(self, include_bytecode=1):
- """Return complete list of files copied to the build directory
-
- This includes both '.py' files and data files, as well as '.pyc'
- and '.pyo' files if 'include_bytecode' is true. (This method is
- needed for the 'install_lib' command to do its job properly, and to
- generate a correct installation manifest.)
- """
- return _build_py.get_outputs(self, include_bytecode) + [
- os.path.join(build_dir, filename)
- for package, src_dir, build_dir,filenames in self.data_files
- for filename in filenames
- ]
-
- def check_package(self, package, package_dir):
- """Check namespace packages' __init__ for declare_namespace"""
- try:
- return self.packages_checked[package]
- except KeyError:
- pass
-
- init_py = _build_py.check_package(self, package, package_dir)
- self.packages_checked[package] = init_py
-
- if not init_py or not self.distribution.namespace_packages:
- return init_py
-
- for pkg in self.distribution.namespace_packages:
- if pkg==package or pkg.startswith(package+'.'):
- break
- else:
- return init_py
-
- f = open(init_py,'rU')
- if 'declare_namespace' not in f.read():
- from distutils.errors import DistutilsError
- raise DistutilsError(
- "Namespace package problem: %s is a namespace package, but its\n"
- "__init__.py does not call declare_namespace()! Please fix it.\n"
- '(See the setuptools manual under "Namespace Packages" for '
- "details.)\n" % (package,)
- )
- f.close()
- return init_py
-
- def initialize_options(self):
- self.packages_checked={}
- _build_py.initialize_options(self)
-
-
-
-
-
-
-
- def exclude_data_files(self, package, src_dir, files):
- """Filter filenames for package's data files in 'src_dir'"""
- globs = (self.exclude_package_data.get('', [])
- + self.exclude_package_data.get(package, []))
- bad = []
- for pattern in globs:
- bad.extend(
- fnmatch.filter(
- files, os.path.join(src_dir, convert_path(pattern))
- )
- )
- bad = dict.fromkeys(bad)
- return [f for f in files if f not in bad]
-
-
-def assert_relative(path):
- if not os.path.isabs(path):
- return path
- from distutils.errors import DistutilsSetupError
- raise DistutilsSetupError(
-"""Error: setup script specifies an absolute path:
-
- %s
-
-setup() arguments must *always* be /-separated paths relative to the
-setup.py directory, *never* absolute paths.
-""" % path
- )
diff --git a/Lib/setuptools/command/develop.py b/Lib/setuptools/command/develop.py
deleted file mode 100755
index 7ab5b23838..0000000000
--- a/Lib/setuptools/command/develop.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from setuptools.command.easy_install import easy_install
-from distutils.util import convert_path
-from pkg_resources import Distribution, PathMetadata, normalize_path
-from distutils import log
-from distutils.errors import *
-import sys, os
-
-class develop(easy_install):
- """Set up package for development"""
-
- description = "install package in 'development mode'"
-
- user_options = easy_install.user_options + [
- ("uninstall", "u", "Uninstall this source package"),
- ]
-
- boolean_options = easy_install.boolean_options + ['uninstall']
-
- command_consumes_arguments = False # override base
-
- def run(self):
- if self.uninstall:
- self.multi_version = True
- self.uninstall_link()
- else:
- self.install_for_development()
- self.warn_deprecated_options()
-
- def initialize_options(self):
- self.uninstall = None
- easy_install.initialize_options(self)
-
-
-
-
-
-
-
-
-
-
- def finalize_options(self):
- ei = self.get_finalized_command("egg_info")
- if ei.broken_egg_info:
- raise DistutilsError(
- "Please rename %r to %r before using 'develop'"
- % (ei.egg_info, ei.broken_egg_info)
- )
- self.args = [ei.egg_name]
- easy_install.finalize_options(self)
- self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
- self.egg_base = ei.egg_base
- self.egg_path = os.path.abspath(ei.egg_base)
-
- # Make a distribution for the package's source
- self.dist = Distribution(
- normalize_path(self.egg_path),
- PathMetadata(self.egg_path, os.path.abspath(ei.egg_info)),
- project_name = ei.egg_name
- )
-
- def install_for_development(self):
- # Ensure metadata is up-to-date
- self.run_command('egg_info')
-
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
-
- self.install_site_py() # ensure that target dir is site-safe
-
- # create an .egg-link in the installation dir, pointing to our egg
- log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
- if not self.dry_run:
- f = open(self.egg_link,"w")
- f.write(self.egg_path)
- f.close()
-
- # postprocess the installed distro, fixing up .pth, installing scripts,
- # and handling requirements
- self.process_distribution(None, self.dist)
-
- def uninstall_link(self):
- if os.path.exists(self.egg_link):
- log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
- contents = [line.rstrip() for line in file(self.egg_link)]
- if contents != [self.egg_path]:
- log.warn("Link points to %s: uninstall aborted", contents)
- return
- if not self.dry_run:
- os.unlink(self.egg_link)
- if not self.dry_run:
- self.update_pth(self.dist) # remove any .pth link to us
- if self.distribution.scripts:
- # XXX should also check for entry point scripts!
- log.warn("Note: you must uninstall or replace scripts manually!")
-
-
- def install_egg_scripts(self, dist):
- if dist is not self.dist:
- # Installing a dependency, so fall back to normal behavior
- return easy_install.install_egg_scripts(self,dist)
-
- # create wrapper scripts in the script dir, pointing to dist.scripts
-
- # new-style...
- self.install_wrapper_scripts(dist)
-
- # ...and old-style
- for script_name in self.distribution.scripts or []:
- script_path = os.path.abspath(convert_path(script_name))
- script_name = os.path.basename(script_path)
- f = open(script_path,'rU')
- script_text = f.read()
- f.close()
- self.install_script(dist, script_name, script_text, script_path)
diff --git a/Lib/setuptools/command/easy_install.py b/Lib/setuptools/command/easy_install.py
deleted file mode 100755
index 3ddcec441f..0000000000
--- a/Lib/setuptools/command/easy_install.py
+++ /dev/null
@@ -1,1555 +0,0 @@
-#!python
-"""\
-Easy Install
-------------
-
-A tool for doing automatic download/extract/build of distutils-based Python
-packages. For detailed documentation, see the accompanying EasyInstall.txt
-file, or visit the `EasyInstall home page`__.
-
-__ http://peak.telecommunity.com/DevCenter/EasyInstall
-"""
-import sys, os.path, zipimport, shutil, tempfile, zipfile, re, stat, random
-from glob import glob
-from setuptools import Command
-from setuptools.sandbox import run_setup
-from distutils import log, dir_util
-from distutils.sysconfig import get_python_lib
-from distutils.errors import DistutilsArgError, DistutilsOptionError, \
- DistutilsError
-from setuptools.archive_util import unpack_archive
-from setuptools.package_index import PackageIndex, parse_bdist_wininst
-from setuptools.package_index import URL_SCHEME
-from setuptools.command import bdist_egg, egg_info
-from pkg_resources import *
-sys_executable = os.path.normpath(sys.executable)
-
-__all__ = [
- 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
- 'main', 'get_exe_prefixes',
-]
-
-def samefile(p1,p2):
- if hasattr(os.path,'samefile') and (
- os.path.exists(p1) and os.path.exists(p2)
- ):
- return os.path.samefile(p1,p2)
- return (
- os.path.normpath(os.path.normcase(p1)) ==
- os.path.normpath(os.path.normcase(p2))
- )
-
-class easy_install(Command):
- """Manage a download/build/install process"""
- description = "Find/get/install Python packages"
- command_consumes_arguments = True
-
- user_options = [
- ('prefix=', None, "installation prefix"),
- ("zip-ok", "z", "install package as a zipfile"),
- ("multi-version", "m", "make apps have to require() a version"),
- ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
- ("install-dir=", "d", "install package to DIR"),
- ("script-dir=", "s", "install scripts to DIR"),
- ("exclude-scripts", "x", "Don't install scripts"),
- ("always-copy", "a", "Copy all needed packages to install dir"),
- ("index-url=", "i", "base URL of Python Package Index"),
- ("find-links=", "f", "additional URL(s) to search for packages"),
- ("delete-conflicting", "D", "no longer needed; don't use this"),
- ("ignore-conflicts-at-my-risk", None,
- "no longer needed; don't use this"),
- ("build-directory=", "b",
- "download/extract/build in DIR; keep the results"),
- ('optimize=', 'O',
- "also compile with optimization: -O1 for \"python -O\", "
- "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
- ('record=', None,
- "filename in which to record list of installed files"),
- ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
- ('site-dirs=','S',"list of directories where .pth files work"),
- ('editable', 'e', "Install specified packages in editable form"),
- ('no-deps', 'N', "don't install dependencies"),
- ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
- ]
- boolean_options = [
- 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
- 'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable',
- 'no-deps',
- ]
- negative_opt = {'always-unzip': 'zip-ok'}
- create_index = PackageIndex
-
-
- def initialize_options(self):
- self.zip_ok = None
- self.install_dir = self.script_dir = self.exclude_scripts = None
- self.index_url = None
- self.find_links = None
- self.build_directory = None
- self.args = None
- self.optimize = self.record = None
- self.upgrade = self.always_copy = self.multi_version = None
- self.editable = self.no_deps = self.allow_hosts = None
- self.root = self.prefix = self.no_report = None
-
- # Options not specifiable via command line
- self.package_index = None
- self.pth_file = None
- self.delete_conflicting = None
- self.ignore_conflicts_at_my_risk = None
- self.site_dirs = None
- self.installed_projects = {}
- self.sitepy_installed = False
- # Always read easy_install options, even if we are subclassed, or have
- # an independent instance created. This ensures that defaults will
- # always come from the standard configuration file(s)' "easy_install"
- # section, even if this is a "develop" or "install" command, or some
- # other embedding.
- self._dry_run = None
- self.verbose = self.distribution.verbose
- self.distribution._set_command_options(
- self, self.distribution.get_option_dict('easy_install')
- )
-
- def delete_blockers(self, blockers):
- for filename in blockers:
- if os.path.exists(filename) or os.path.islink(filename):
- log.info("Deleting %s", filename)
- if not self.dry_run:
- if os.path.isdir(filename) and not os.path.islink(filename):
- rmtree(filename)
- else:
- os.unlink(filename)
-
- def finalize_options(self):
- self._expand('install_dir','script_dir','build_directory','site_dirs')
- # If a non-default installation directory was specified, default the
- # script directory to match it.
- if self.script_dir is None:
- self.script_dir = self.install_dir
-
- # Let install_dir get set by install_lib command, which in turn
- # gets its info from the install command, and takes into account
- # --prefix and --home and all that other crud.
- self.set_undefined_options('install_lib',
- ('install_dir','install_dir')
- )
- # Likewise, set default script_dir from 'install_scripts.install_dir'
- self.set_undefined_options('install_scripts',
- ('install_dir', 'script_dir')
- )
- # default --record from the install command
- self.set_undefined_options('install', ('record', 'record'))
- normpath = map(normalize_path, sys.path)
- self.all_site_dirs = get_site_dirs()
- if self.site_dirs is not None:
- site_dirs = [
- os.path.expanduser(s.strip()) for s in self.site_dirs.split(',')
- ]
- for d in site_dirs:
- if not os.path.isdir(d):
- log.warn("%s (in --site-dirs) does not exist", d)
- elif normalize_path(d) not in normpath:
- raise DistutilsOptionError(
- d+" (in --site-dirs) is not on sys.path"
- )
- else:
- self.all_site_dirs.append(normalize_path(d))
- self.check_site_dir()
- self.index_url = self.index_url or "http://www.python.org/pypi"
- self.shadow_path = self.all_site_dirs[:]
- for path_item in self.install_dir, normalize_path(self.script_dir):
- if path_item not in self.shadow_path:
- self.shadow_path.insert(0, path_item)
-
- if self.allow_hosts is not None:
- hosts = [s.strip() for s in self.allow_hosts.split(',')]
- else:
- hosts = ['*']
-
- if self.package_index is None:
- self.package_index = self.create_index(
- self.index_url, search_path = self.shadow_path, hosts=hosts
- )
- self.local_index = Environment(self.shadow_path)
-
- if self.find_links is not None:
- if isinstance(self.find_links, basestring):
- self.find_links = self.find_links.split()
- else:
- self.find_links = []
-
- self.package_index.add_find_links(self.find_links)
- self.set_undefined_options('install_lib', ('optimize','optimize'))
- if not isinstance(self.optimize,int):
- try:
- self.optimize = int(self.optimize)
- if not (0 <= self.optimize <= 2): raise ValueError
- except ValueError:
- raise DistutilsOptionError("--optimize must be 0, 1, or 2")
-
- if self.delete_conflicting and self.ignore_conflicts_at_my_risk:
- raise DistutilsOptionError(
- "Can't use both --delete-conflicting and "
- "--ignore-conflicts-at-my-risk at the same time"
- )
- if self.editable and not self.build_directory:
- raise DistutilsArgError(
- "Must specify a build directory (-b) when using --editable"
- )
- if not self.args:
- raise DistutilsArgError(
- "No urls, filenames, or requirements specified (see --help)")
-
- self.outputs = []
-
- def run(self):
- if self.verbose<>self.distribution.verbose:
- log.set_verbosity(self.verbose)
- try:
- for spec in self.args:
- self.easy_install(spec, not self.no_deps)
- if self.record:
- outputs = self.outputs
- if self.root: # strip any package prefix
- root_len = len(self.root)
- for counter in xrange(len(outputs)):
- outputs[counter] = outputs[counter][root_len:]
- from distutils import file_util
- self.execute(
- file_util.write_file, (self.record, outputs),
- "writing list of installed files to '%s'" %
- self.record
- )
- self.warn_deprecated_options()
- finally:
- log.set_verbosity(self.distribution.verbose)
-
- def pseudo_tempname(self):
- """Return a pseudo-tempname base in the install directory.
- This code is intentionally naive; if a malicious party can write to
- the target directory you're already in deep doodoo.
- """
- try:
- pid = os.getpid()
- except:
- pid = random.randint(0,sys.maxint)
- return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
-
- def warn_deprecated_options(self):
- if self.delete_conflicting or self.ignore_conflicts_at_my_risk:
- log.warn(
- "Note: The -D, --delete-conflicting and"
- " --ignore-conflicts-at-my-risk no longer have any purpose"
- " and should not be used."
- )
-
- def check_site_dir(self):
- """Verify that self.install_dir is .pth-capable dir, if needed"""
-
- instdir = normalize_path(self.install_dir)
- pth_file = os.path.join(instdir,'easy-install.pth')
-
- # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
- is_site_dir = instdir in self.all_site_dirs
-
- if not is_site_dir:
- # No? Then directly test whether it does .pth file processing
- is_site_dir = self.check_pth_processing()
- else:
- # make sure we can write to target dir
- testfile = self.pseudo_tempname()+'.write-test'
- test_exists = os.path.exists(testfile)
- try:
- if test_exists: os.unlink(testfile)
- open(testfile,'w').close()
- os.unlink(testfile)
- except (OSError,IOError):
- self.cant_write_to_target()
-
- if not is_site_dir and not self.multi_version:
- # Can't install non-multi to non-site dir
- raise DistutilsError(self.no_default_version_msg())
-
- if is_site_dir:
- if self.pth_file is None:
- self.pth_file = PthDistributions(pth_file)
- else:
- self.pth_file = None
-
- PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep)
- if instdir not in map(normalize_path, filter(None,PYTHONPATH)):
- # only PYTHONPATH dirs need a site.py, so pretend it's there
- self.sitepy_installed = True
-
- self.install_dir = instdir
-
-
- def cant_write_to_target(self):
- msg = """can't create or remove files in install directory
-
-The following error occurred while trying to add or remove files in the
-installation directory:
-
- %s
-
-The installation directory you specified (via --install-dir, --prefix, or
-the distutils default setting) was:
-
- %s
-""" % (sys.exc_info()[1], self.install_dir,)
-
- if not os.path.exists(self.install_dir):
- msg += """
-This directory does not currently exist. Please create it and try again, or
-choose a different installation directory (using the -d or --install-dir
-option).
-"""
- else:
- msg += """
-Perhaps your account does not have write access to this directory? If the
-installation directory is a system-owned directory, you may need to sign in
-as the administrator or "root" account. If you do not have administrative
-access to this machine, you may wish to choose a different installation
-directory, preferably one that is listed in your PYTHONPATH environment
-variable.
-
-For information on other options, you may wish to consult the
-documentation at:
-
- http://peak.telecommunity.com/EasyInstall.html
-
-Please make the appropriate changes for your system and try again.
-"""
- raise DistutilsError(msg)
-
-
-
-
- def check_pth_processing(self):
- """Empirically verify whether .pth files are supported in inst. dir"""
- instdir = self.install_dir
- log.info("Checking .pth file support in %s", instdir)
- pth_file = self.pseudo_tempname()+".pth"
- ok_file = pth_file+'.ok'
- ok_exists = os.path.exists(ok_file)
- try:
- if ok_exists: os.unlink(ok_file)
- f = open(pth_file,'w')
- except (OSError,IOError):
- self.cant_write_to_target()
- else:
- try:
- f.write("import os;open(%r,'w').write('OK')\n" % (ok_file,))
- f.close(); f=None
- executable = sys.executable
- if os.name=='nt':
- dirname,basename = os.path.split(executable)
- alt = os.path.join(dirname,'pythonw.exe')
- if basename.lower()=='python.exe' and os.path.exists(alt):
- # use pythonw.exe to avoid opening a console window
- executable = alt
- if ' ' in executable: executable='"%s"' % executable
- from distutils.spawn import spawn
- spawn([executable,'-E','-c','pass'],0)
-
- if os.path.exists(ok_file):
- log.info(
- "TEST PASSED: %s appears to support .pth files",
- instdir
- )
- return True
- finally:
- if f: f.close()
- if os.path.exists(ok_file): os.unlink(ok_file)
- if os.path.exists(pth_file): os.unlink(pth_file)
- if not self.multi_version:
- log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
- return False
-
- def install_egg_scripts(self, dist):
- """Write all the scripts for `dist`, unless scripts are excluded"""
-
- self.install_wrapper_scripts(dist)
- if self.exclude_scripts or not dist.metadata_isdir('scripts'):
- return
-
- for script_name in dist.metadata_listdir('scripts'):
- self.install_script(
- dist, script_name,
- dist.get_metadata('scripts/'+script_name).replace('\r','\n')
- )
-
- def add_output(self, path):
- if os.path.isdir(path):
- for base, dirs, files in os.walk(path):
- for filename in files:
- self.outputs.append(os.path.join(base,filename))
- else:
- self.outputs.append(path)
-
- def not_editable(self, spec):
- if self.editable:
- raise DistutilsArgError(
- "Invalid argument %r: you can't use filenames or URLs "
- "with --editable (except via the --find-links option)."
- % (spec,)
- )
-
- def check_editable(self,spec):
- if not self.editable:
- return
-
- if os.path.exists(os.path.join(self.build_directory, spec.key)):
- raise DistutilsArgError(
- "%r already exists in %s; can't do a checkout there" %
- (spec.key, self.build_directory)
- )
-
-
-
- def easy_install(self, spec, deps=False):
- tmpdir = tempfile.mkdtemp(prefix="easy_install-")
- download = None
- self.install_site_py()
-
- try:
- if not isinstance(spec,Requirement):
- if URL_SCHEME(spec):
- # It's a url, download it to tmpdir and process
- self.not_editable(spec)
- download = self.package_index.download(spec, tmpdir)
- return self.install_item(None, download, tmpdir, deps, True)
-
- elif os.path.exists(spec):
- # Existing file or directory, just process it directly
- self.not_editable(spec)
- return self.install_item(None, spec, tmpdir, deps, True)
- else:
- spec = parse_requirement_arg(spec)
-
- self.check_editable(spec)
- dist = self.package_index.fetch_distribution(
- spec, tmpdir, self.upgrade, self.editable, not self.always_copy
- )
-
- if dist is None:
- msg = "Could not find suitable distribution for %r" % spec
- if self.always_copy:
- msg+=" (--always-copy skips system and development eggs)"
- raise DistutilsError(msg)
- elif dist.precedence==DEVELOP_DIST:
- # .egg-info dists don't need installing, just process deps
- self.process_distribution(spec, dist, deps, "Using")
- return dist
- else:
- return self.install_item(spec, dist.location, tmpdir, deps)
-
- finally:
- if os.path.exists(tmpdir):
- rmtree(tmpdir)
-
- def install_item(self, spec, download, tmpdir, deps, install_needed=False):
-
- # Installation is also needed if file in tmpdir or is not an egg
- install_needed = install_needed or os.path.dirname(download) == tmpdir
- install_needed = install_needed or not download.endswith('.egg')
-
- log.info("Processing %s", os.path.basename(download))
-
- if install_needed or self.always_copy:
- dists = self.install_eggs(spec, download, tmpdir)
- for dist in dists:
- self.process_distribution(spec, dist, deps)
- else:
- dists = [self.check_conflicts(self.egg_distribution(download))]
- self.process_distribution(spec, dists[0], deps, "Using")
-
- if spec is not None:
- for dist in dists:
- if dist in spec:
- return dist
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def process_distribution(self, requirement, dist, deps=True, *info):
- self.update_pth(dist)
- self.package_index.add(dist)
- self.local_index.add(dist)
- self.install_egg_scripts(dist)
- self.installed_projects[dist.key] = dist
- log.warn(self.installation_report(requirement, dist, *info))
- if not deps and not self.always_copy:
- return
- elif requirement is not None and dist.key != requirement.key:
- log.warn("Skipping dependencies for %s", dist)
- return # XXX this is not the distribution we were looking for
- elif requirement is None or dist not in requirement:
- # if we wound up with a different version, resolve what we've got
- distreq = dist.as_requirement()
- requirement = requirement or distreq
- requirement = Requirement(
- distreq.project_name, distreq.specs, requirement.extras
- )
- if dist.has_metadata('dependency_links.txt'):
- self.package_index.add_find_links(
- dist.get_metadata_lines('dependency_links.txt')
- )
- log.info("Processing dependencies for %s", requirement)
- try:
- distros = WorkingSet([]).resolve(
- [requirement], self.local_index, self.easy_install
- )
- except DistributionNotFound, e:
- raise DistutilsError(
- "Could not find required distribution %s" % e.args
- )
- except VersionConflict, e:
- raise DistutilsError(
- "Installed distribution %s conflicts with requirement %s"
- % e.args
- )
- if self.always_copy:
- # Force all the relevant distros to be copied or activated
- for dist in distros:
- if dist.key not in self.installed_projects:
- self.easy_install(dist.as_requirement())
-
- def should_unzip(self, dist):
- if self.zip_ok is not None:
- return not self.zip_ok
- if dist.has_metadata('not-zip-safe'):
- return True
- if not dist.has_metadata('zip-safe'):
- return True
- return False
-
- def maybe_move(self, spec, dist_filename, setup_base):
- dst = os.path.join(self.build_directory, spec.key)
- if os.path.exists(dst):
- log.warn(
- "%r already exists in %s; build directory %s will not be kept",
- spec.key, self.build_directory, setup_base
- )
- return setup_base
- if os.path.isdir(dist_filename):
- setup_base = dist_filename
- else:
- if os.path.dirname(dist_filename)==setup_base:
- os.unlink(dist_filename) # get it out of the tmp dir
- contents = os.listdir(setup_base)
- if len(contents)==1:
- dist_filename = os.path.join(setup_base,contents[0])
- if os.path.isdir(dist_filename):
- # if the only thing there is a directory, move it instead
- setup_base = dist_filename
- ensure_directory(dst); shutil.move(setup_base, dst)
- return dst
-
- def install_wrapper_scripts(self, dist):
- if not self.exclude_scripts:
- for args in get_script_args(dist):
- self.write_script(*args)
-
-
-
-
-
-
- def install_script(self, dist, script_name, script_text, dev_path=None):
- """Generate a legacy script wrapper and install it"""
- spec = str(dist.as_requirement())
-
- if dev_path:
- script_text = get_script_header(script_text) + (
- "# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r\n"
- "__requires__ = %(spec)r\n"
- "from pkg_resources import require; require(%(spec)r)\n"
- "del require\n"
- "__file__ = %(dev_path)r\n"
- "execfile(__file__)\n"
- ) % locals()
- else:
- script_text = get_script_header(script_text) + (
- "# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r\n"
- "__requires__ = %(spec)r\n"
- "import pkg_resources\n"
- "pkg_resources.run_script(%(spec)r, %(script_name)r)\n"
- ) % locals()
-
- self.write_script(script_name, script_text)
-
- def write_script(self, script_name, contents, mode="t", blockers=()):
- """Write an executable file to the scripts directory"""
- self.delete_blockers( # clean up old .py/.pyw w/o a script
- [os.path.join(self.script_dir,x) for x in blockers])
- log.info("Installing %s script to %s", script_name, self.script_dir)
- target = os.path.join(self.script_dir, script_name)
- self.add_output(target)
-
- if not self.dry_run:
- ensure_directory(target)
- f = open(target,"w"+mode)
- f.write(contents)
- f.close()
- try:
- os.chmod(target,0755)
- except (AttributeError, os.error):
- pass
-
- def install_eggs(self, spec, dist_filename, tmpdir):
- # .egg dirs or files are already built, so just return them
- if dist_filename.lower().endswith('.egg'):
- return [self.install_egg(dist_filename, tmpdir)]
- elif dist_filename.lower().endswith('.exe'):
- return [self.install_exe(dist_filename, tmpdir)]
-
- # Anything else, try to extract and build
- setup_base = tmpdir
- if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
- unpack_archive(dist_filename, tmpdir, self.unpack_progress)
- elif os.path.isdir(dist_filename):
- setup_base = os.path.abspath(dist_filename)
-
- if (setup_base.startswith(tmpdir) # something we downloaded
- and self.build_directory and spec is not None
- ):
- setup_base = self.maybe_move(spec, dist_filename, setup_base)
-
- # Find the setup.py file
- setup_script = os.path.join(setup_base, 'setup.py')
-
- if not os.path.exists(setup_script):
- setups = glob(os.path.join(setup_base, '*', 'setup.py'))
- if not setups:
- raise DistutilsError(
- "Couldn't find a setup script in %s" % dist_filename
- )
- if len(setups)>1:
- raise DistutilsError(
- "Multiple setup scripts in %s" % dist_filename
- )
- setup_script = setups[0]
-
- # Now run it, and return the result
- if self.editable:
- log.warn(self.report_editable(spec, setup_script))
- return []
- else:
- return self.build_and_install(setup_script, setup_base)
-
- def egg_distribution(self, egg_path):
- if os.path.isdir(egg_path):
- metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO'))
- else:
- metadata = EggMetadata(zipimport.zipimporter(egg_path))
- return Distribution.from_filename(egg_path,metadata=metadata)
-
- def install_egg(self, egg_path, tmpdir):
- destination = os.path.join(self.install_dir,os.path.basename(egg_path))
- destination = os.path.abspath(destination)
- if not self.dry_run:
- ensure_directory(destination)
-
- dist = self.egg_distribution(egg_path)
- self.check_conflicts(dist)
- if not samefile(egg_path, destination):
- if os.path.isdir(destination) and not os.path.islink(destination):
- dir_util.remove_tree(destination, dry_run=self.dry_run)
- elif os.path.exists(destination):
- self.execute(os.unlink,(destination,),"Removing "+destination)
- uncache_zipdir(destination)
- if os.path.isdir(egg_path):
- if egg_path.startswith(tmpdir):
- f,m = shutil.move, "Moving"
- else:
- f,m = shutil.copytree, "Copying"
- elif self.should_unzip(dist):
- self.mkpath(destination)
- f,m = self.unpack_and_compile, "Extracting"
- elif egg_path.startswith(tmpdir):
- f,m = shutil.move, "Moving"
- else:
- f,m = shutil.copy2, "Copying"
-
- self.execute(f, (egg_path, destination),
- (m+" %s to %s") %
- (os.path.basename(egg_path),os.path.dirname(destination)))
-
- self.add_output(destination)
- return self.egg_distribution(destination)
-
- def install_exe(self, dist_filename, tmpdir):
- # See if it's valid, get data
- cfg = extract_wininst_cfg(dist_filename)
- if cfg is None:
- raise DistutilsError(
- "%s is not a valid distutils Windows .exe" % dist_filename
- )
- # Create a dummy distribution object until we build the real distro
- dist = Distribution(None,
- project_name=cfg.get('metadata','name'),
- version=cfg.get('metadata','version'), platform="win32"
- )
-
- # Convert the .exe to an unpacked egg
- egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg')
- egg_tmp = egg_path+'.tmp'
- egg_info = os.path.join(egg_tmp, 'EGG-INFO')
- pkg_inf = os.path.join(egg_info, 'PKG-INFO')
- ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
- dist._provider = PathMetadata(egg_tmp, egg_info) # XXX
- self.exe_to_egg(dist_filename, egg_tmp)
-
- # Write EGG-INFO/PKG-INFO
- if not os.path.exists(pkg_inf):
- f = open(pkg_inf,'w')
- f.write('Metadata-Version: 1.0\n')
- for k,v in cfg.items('metadata'):
- if k<>'target_version':
- f.write('%s: %s\n' % (k.replace('_','-').title(), v))
- f.close()
- script_dir = os.path.join(egg_info,'scripts')
- self.delete_blockers( # delete entry-point scripts to avoid duping
- [os.path.join(script_dir,args[0]) for args in get_script_args(dist)]
- )
- # Build .egg file from tmpdir
- bdist_egg.make_zipfile(
- egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
- )
- # install the .egg
- return self.install_egg(egg_path, tmpdir)
-
- def exe_to_egg(self, dist_filename, egg_tmp):
- """Extract a bdist_wininst to the directories an egg would use"""
- # Check for .pth file and set up prefix translations
- prefixes = get_exe_prefixes(dist_filename)
- to_compile = []
- native_libs = []
- top_level = {}
-
- def process(src,dst):
- for old,new in prefixes:
- if src.startswith(old):
- src = new+src[len(old):]
- parts = src.split('/')
- dst = os.path.join(egg_tmp, *parts)
- dl = dst.lower()
- if dl.endswith('.pyd') or dl.endswith('.dll'):
- top_level[os.path.splitext(parts[0])[0]] = 1
- native_libs.append(src)
- elif dl.endswith('.py') and old!='SCRIPTS/':
- top_level[os.path.splitext(parts[0])[0]] = 1
- to_compile.append(dst)
- return dst
- if not src.endswith('.pth'):
- log.warn("WARNING: can't process %s", src)
- return None
-
- # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
- unpack_archive(dist_filename, egg_tmp, process)
- stubs = []
- for res in native_libs:
- if res.lower().endswith('.pyd'): # create stubs for .pyd's
- parts = res.split('/')
- resource, parts[-1] = parts[-1], parts[-1][:-1]
- pyfile = os.path.join(egg_tmp, *parts)
- to_compile.append(pyfile); stubs.append(pyfile)
- bdist_egg.write_stub(resource, pyfile)
-
- self.byte_compile(to_compile) # compile .py's
- bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'),
- bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
-
- for name in 'top_level','native_libs':
- if locals()[name]:
- txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt')
- if not os.path.exists(txt):
- open(txt,'w').write('\n'.join(locals()[name])+'\n')
-
- def check_conflicts(self, dist):
- """Verify that there are no conflicting "old-style" packages"""
-
- return dist # XXX temporarily disable until new strategy is stable
- from imp import find_module, get_suffixes
- from glob import glob
-
- blockers = []
- names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr
-
- exts = {'.pyc':1, '.pyo':1} # get_suffixes() might leave one out
- for ext,mode,typ in get_suffixes():
- exts[ext] = 1
-
- for path,files in expand_paths([self.install_dir]+self.all_site_dirs):
- for filename in files:
- base,ext = os.path.splitext(filename)
- if base in names:
- if not ext:
- # no extension, check for package
- try:
- f, filename, descr = find_module(base, [path])
- except ImportError:
- continue
- else:
- if f: f.close()
- if filename not in blockers:
- blockers.append(filename)
- elif ext in exts and base!='site': # XXX ugh
- blockers.append(os.path.join(path,filename))
- if blockers:
- self.found_conflicts(dist, blockers)
-
- return dist
-
- def found_conflicts(self, dist, blockers):
- if self.delete_conflicting:
- log.warn("Attempting to delete conflicting packages:")
- return self.delete_blockers(blockers)
-
- msg = """\
--------------------------------------------------------------------------
-CONFLICT WARNING:
-
-The following modules or packages have the same names as modules or
-packages being installed, and will be *before* the installed packages in
-Python's search path. You MUST remove all of the relevant files and
-directories before you will be able to use the package(s) you are
-installing:
-
- %s
-
-""" % '\n '.join(blockers)
-
- if self.ignore_conflicts_at_my_risk:
- msg += """\
-(Note: you can run EasyInstall on '%s' with the
---delete-conflicting option to attempt deletion of the above files
-and/or directories.)
-""" % dist.project_name
- else:
- msg += """\
-Note: you can attempt this installation again with EasyInstall, and use
-either the --delete-conflicting (-D) option or the
---ignore-conflicts-at-my-risk option, to either delete the above files
-and directories, or to ignore the conflicts, respectively. Note that if
-you ignore the conflicts, the installed package(s) may not work.
-"""
- msg += """\
--------------------------------------------------------------------------
-"""
- sys.stderr.write(msg)
- sys.stderr.flush()
- if not self.ignore_conflicts_at_my_risk:
- raise DistutilsError("Installation aborted due to conflicts")
-
- def installation_report(self, req, dist, what="Installed"):
- """Helpful installation message for display to package users"""
- msg = "\n%(what)s %(eggloc)s%(extras)s"
- if self.multi_version and not self.no_report:
- msg += """
-
-Because this distribution was installed --multi-version or --install-dir,
-before you can import modules from this package in an application, you
-will need to 'import pkg_resources' and then use a 'require()' call
-similar to one of these examples, in order to select the desired version:
-
- pkg_resources.require("%(name)s") # latest installed version
- pkg_resources.require("%(name)s==%(version)s") # this exact version
- pkg_resources.require("%(name)s>=%(version)s") # this version or higher
-"""
- if self.install_dir not in map(normalize_path,sys.path):
- msg += """
-
-Note also that the installation directory must be on sys.path at runtime for
-this to work. (e.g. by being the application's script directory, by being on
-PYTHONPATH, or by being added to sys.path by your code.)
-"""
- eggloc = dist.location
- name = dist.project_name
- version = dist.version
- extras = '' # TODO: self.report_extras(req, dist)
- return msg % locals()
-
- def report_editable(self, spec, setup_script):
- dirname = os.path.dirname(setup_script)
- python = sys.executable
- return """\nExtracted editable version of %(spec)s to %(dirname)s
-
-If it uses setuptools in its setup script, you can activate it in
-"development" mode by going to that directory and running::
-
- %(python)s setup.py develop
-
-See the setuptools documentation for the "develop" command for more info.
-""" % locals()
-
- def run_setup(self, setup_script, setup_base, args):
- sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
- sys.modules.setdefault('distutils.command.egg_info', egg_info)
-
- args = list(args)
- if self.verbose>2:
- v = 'v' * (self.verbose - 1)
- args.insert(0,'-'+v)
- elif self.verbose<2:
- args.insert(0,'-q')
- if self.dry_run:
- args.insert(0,'-n')
- log.info(
- "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args)
- )
- try:
- run_setup(setup_script, args)
- except SystemExit, v:
- raise DistutilsError("Setup script exited with %s" % (v.args[0],))
-
- def build_and_install(self, setup_script, setup_base):
- args = ['bdist_egg', '--dist-dir']
- dist_dir = tempfile.mkdtemp(
- prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
- )
- try:
- args.append(dist_dir)
- self.run_setup(setup_script, setup_base, args)
- all_eggs = Environment([dist_dir])
- eggs = []
- for key in all_eggs:
- for dist in all_eggs[key]:
- eggs.append(self.install_egg(dist.location, setup_base))
- if not eggs and not self.dry_run:
- log.warn("No eggs found in %s (setup script problem?)",
- dist_dir)
- return eggs
- finally:
- rmtree(dist_dir)
- log.set_verbosity(self.verbose) # restore our log verbosity
-
- def update_pth(self,dist):
- if self.pth_file is None:
- return
-
- for d in self.pth_file[dist.key]: # drop old entries
- if self.multi_version or d.location != dist.location:
- log.info("Removing %s from easy-install.pth file", d)
- self.pth_file.remove(d)
- if d.location in self.shadow_path:
- self.shadow_path.remove(d.location)
-
- if not self.multi_version:
- if dist.location in self.pth_file.paths:
- log.info(
- "%s is already the active version in easy-install.pth",
- dist
- )
- else:
- log.info("Adding %s to easy-install.pth file", dist)
- self.pth_file.add(dist) # add new entry
- if dist.location not in self.shadow_path:
- self.shadow_path.append(dist.location)
-
- if not self.dry_run:
-
- self.pth_file.save()
-
- if dist.key=='setuptools':
- # Ensure that setuptools itself never becomes unavailable!
- # XXX should this check for latest version?
- filename = os.path.join(self.install_dir,'setuptools.pth')
- if os.path.islink(filename): os.unlink(filename)
- f = open(filename, 'wt')
- f.write(self.pth_file.make_relative(dist.location)+'\n')
- f.close()
-
- def unpack_progress(self, src, dst):
- # Progress filter for unpacking
- log.debug("Unpacking %s to %s", src, dst)
- return dst # only unpack-and-compile skips files for dry run
-
- def unpack_and_compile(self, egg_path, destination):
- to_compile = []
-
- def pf(src,dst):
- if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
- to_compile.append(dst)
- self.unpack_progress(src,dst)
- return not self.dry_run and dst or None
-
- unpack_archive(egg_path, destination, pf)
- self.byte_compile(to_compile)
-
-
- def byte_compile(self, to_compile):
- from distutils.util import byte_compile
- try:
- # try to make the byte compile messages quieter
- log.set_verbosity(self.verbose - 1)
-
- byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
- if self.optimize:
- byte_compile(
- to_compile, optimize=self.optimize, force=1,
- dry_run=self.dry_run
- )
- finally:
- log.set_verbosity(self.verbose) # restore original verbosity
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def no_default_version_msg(self):
- return """bad install directory or PYTHONPATH
-
-You are attempting to install a package to a directory that is not
-on PYTHONPATH and which Python does not read ".pth" files from. The
-installation directory you specified (via --install-dir, --prefix, or
-the distutils default setting) was:
-
- %s
-
-and your PYTHONPATH environment variable currently contains:
-
- %r
-
-Here are some of your options for correcting the problem:
-
-* You can choose a different installation directory, i.e., one that is
- on PYTHONPATH or supports .pth files
-
-* You can add the installation directory to the PYTHONPATH environment
- variable. (It must then also be on PYTHONPATH whenever you run
- Python and want to use the package(s) you are installing.)
-
-* You can set up the installation directory to support ".pth" files by
- using one of the approaches described here:
-
- http://peak.telecommunity.com/EasyInstall.html#custom-installation-locations
-
-Please make the appropriate changes for your system and try again.""" % (
- self.install_dir, os.environ.get('PYTHONPATH','')
- )
-
-
-
-
-
-
-
-
-
-
- def install_site_py(self):
- """Make sure there's a site.py in the target dir, if needed"""
-
- if self.sitepy_installed:
- return # already did it, or don't need to
-
- sitepy = os.path.join(self.install_dir, "site.py")
- source = resource_string("setuptools", "site-patch.py")
- current = ""
-
- if os.path.exists(sitepy):
- log.debug("Checking existing site.py in %s", self.install_dir)
- current = open(sitepy,'rb').read()
- if not current.startswith('def __boot():'):
- raise DistutilsError(
- "%s is not a setuptools-generated site.py; please"
- " remove it." % sitepy
- )
-
- if current != source:
- log.info("Creating %s", sitepy)
- if not self.dry_run:
- ensure_directory(sitepy)
- f = open(sitepy,'wb')
- f.write(source)
- f.close()
- self.byte_compile([sitepy])
-
- self.sitepy_installed = True
-
-
-
-
-
-
-
-
-
-
-
-
- INSTALL_SCHEMES = dict(
- posix = dict(
- install_dir = '$base/lib/python$py_version_short/site-packages',
- script_dir = '$base/bin',
- ),
- )
-
- DEFAULT_SCHEME = dict(
- install_dir = '$base/Lib/site-packages',
- script_dir = '$base/Scripts',
- )
-
- def _expand(self, *attrs):
- config_vars = self.get_finalized_command('install').config_vars
-
- if self.prefix:
- # Set default install_dir/scripts from --prefix
- config_vars = config_vars.copy()
- config_vars['base'] = self.prefix
- scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME)
- for attr,val in scheme.items():
- if getattr(self,attr,None) is None:
- setattr(self,attr,val)
-
- from distutils.util import subst_vars
- for attr in attrs:
- val = getattr(self, attr)
- if val is not None:
- val = subst_vars(val, config_vars)
- if os.name == 'posix':
- val = os.path.expanduser(val)
- setattr(self, attr, val)
-
-
-
-
-
-
-
-
-
-def get_site_dirs():
- # return a list of 'site' dirs
- sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep))
- prefixes = [sys.prefix]
- if sys.exec_prefix != sys.prefix:
- prefixes.append(sys.exec_prefix)
- for prefix in prefixes:
- if prefix:
- if sys.platform in ('os2emx', 'riscos'):
- sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
- elif os.sep == '/':
- sitedirs.extend([os.path.join(prefix,
- "lib",
- "python" + sys.version[:3],
- "site-packages"),
- os.path.join(prefix, "lib", "site-python")])
- else:
- sitedirs.extend(
- [prefix, os.path.join(prefix, "lib", "site-packages")]
- )
- if sys.platform == 'darwin':
- # for framework builds *only* we add the standard Apple
- # locations. Currently only per-user, but /Library and
- # /Network/Library could be added too
- if 'Python.framework' in prefix:
- home = os.environ.get('HOME')
- if home:
- sitedirs.append(
- os.path.join(home,
- 'Library',
- 'Python',
- sys.version[:3],
- 'site-packages'))
- for plat_specific in (0,1):
- site_lib = get_python_lib(plat_specific)
- if site_lib not in sitedirs: sitedirs.append(site_lib)
-
- sitedirs = map(normalize_path, sitedirs)
- return sitedirs
-
-
-def expand_paths(inputs):
- """Yield sys.path directories that might contain "old-style" packages"""
-
- seen = {}
-
- for dirname in inputs:
- dirname = normalize_path(dirname)
- if dirname in seen:
- continue
-
- seen[dirname] = 1
- if not os.path.isdir(dirname):
- continue
-
- files = os.listdir(dirname)
- yield dirname, files
-
- for name in files:
- if not name.endswith('.pth'):
- # We only care about the .pth files
- continue
- if name in ('easy-install.pth','setuptools.pth'):
- # Ignore .pth files that we control
- continue
-
- # Read the .pth file
- f = open(os.path.join(dirname,name))
- lines = list(yield_lines(f))
- f.close()
-
- # Yield existing non-dupe, non-import directory lines from it
- for line in lines:
- if not line.startswith("import"):
- line = normalize_path(line.rstrip())
- if line not in seen:
- seen[line] = 1
- if not os.path.isdir(line):
- continue
- yield line, os.listdir(line)
-
-
-def extract_wininst_cfg(dist_filename):
- """Extract configuration data from a bdist_wininst .exe
-
- Returns a ConfigParser.RawConfigParser, or None
- """
- f = open(dist_filename,'rb')
- try:
- endrec = zipfile._EndRecData(f)
- if endrec is None:
- return None
-
- prepended = (endrec[9] - endrec[5]) - endrec[6]
- if prepended < 12: # no wininst data here
- return None
- f.seek(prepended-12)
-
- import struct, StringIO, ConfigParser
- tag, cfglen, bmlen = struct.unpack("<iii",f.read(12))
- if tag not in (0x1234567A, 0x1234567B):
- return None # not a valid tag
-
- f.seek(prepended-(12+cfglen+bmlen))
- cfg = ConfigParser.RawConfigParser({'version':'','target_version':''})
- try:
- cfg.readfp(StringIO.StringIO(f.read(cfglen).split(chr(0),1)[0]))
- except ConfigParser.Error:
- return None
- if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
- return None
- return cfg
-
- finally:
- f.close()
-
-
-
-
-
-
-
-
-def get_exe_prefixes(exe_filename):
- """Get exe->egg path translations for a given .exe file"""
-
- prefixes = [
- ('PURELIB/', ''),
- ('PLATLIB/', ''),
- ('SCRIPTS/', 'EGG-INFO/scripts/')
- ]
- z = zipfile.ZipFile(exe_filename)
- try:
- for info in z.infolist():
- name = info.filename
- parts = name.split('/')
- if len(parts)==3 and parts[2]=='PKG-INFO':
- if parts[1].endswith('.egg-info'):
- prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/'))
- break
- if len(parts)<>2 or not name.endswith('.pth'):
- continue
- if name.endswith('-nspkg.pth'):
- continue
- if parts[0] in ('PURELIB','PLATLIB'):
- for pth in yield_lines(z.read(name)):
- pth = pth.strip().replace('\\','/')
- if not pth.startswith('import'):
- prefixes.append((('%s/%s/' % (parts[0],pth)), ''))
- finally:
- z.close()
-
- prefixes.sort(); prefixes.reverse()
- return prefixes
-
-
-def parse_requirement_arg(spec):
- try:
- return Requirement.parse(spec)
- except ValueError:
- raise DistutilsError(
- "Not a URL, existing file, or requirement spec: %r" % (spec,)
- )
-
-class PthDistributions(Environment):
- """A .pth file with Distribution paths in it"""
-
- dirty = False
-
- def __init__(self, filename):
- self.filename = filename
- self.basedir = normalize_path(os.path.dirname(self.filename))
- self._load(); Environment.__init__(self, [], None, None)
- for path in yield_lines(self.paths):
- map(self.add, find_distributions(path, True))
-
- def _load(self):
- self.paths = []
- saw_import = False
- seen = {}
- if os.path.isfile(self.filename):
- for line in open(self.filename,'rt'):
- if line.startswith('import'):
- saw_import = True
- continue
- path = line.rstrip()
- self.paths.append(path)
- if not path.strip() or path.strip().startswith('#'):
- continue
- # skip non-existent paths, in case somebody deleted a package
- # manually, and duplicate paths as well
- path = self.paths[-1] = normalize_path(
- os.path.join(self.basedir,path)
- )
- if not os.path.exists(path) or path in seen:
- self.paths.pop() # skip it
- self.dirty = True # we cleaned up, so we're dirty now :)
- continue
- seen[path] = 1
-
- if self.paths and not saw_import:
- self.dirty = True # ensure anything we touch has import wrappers
- while self.paths and not self.paths[-1].strip():
- self.paths.pop()
-
- def save(self):
- """Write changed .pth file back to disk"""
- if not self.dirty:
- return
-
- data = '\n'.join(map(self.make_relative,self.paths))
- if data:
- log.debug("Saving %s", self.filename)
- data = (
- "import sys; sys.__plen = len(sys.path)\n"
- "%s\n"
- "import sys; new=sys.path[sys.__plen:];"
- " del sys.path[sys.__plen:];"
- " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
- " sys.__egginsert = p+len(new)\n"
- ) % data
-
- if os.path.islink(self.filename):
- os.unlink(self.filename)
- f = open(self.filename,'wb')
- f.write(data); f.close()
-
- elif os.path.exists(self.filename):
- log.debug("Deleting empty %s", self.filename)
- os.unlink(self.filename)
-
- self.dirty = False
-
- def add(self,dist):
- """Add `dist` to the distribution map"""
- if dist.location not in self.paths:
- self.paths.append(dist.location); self.dirty = True
- Environment.add(self,dist)
-
- def remove(self,dist):
- """Remove `dist` from the distribution map"""
- while dist.location in self.paths:
- self.paths.remove(dist.location); self.dirty = True
- Environment.remove(self,dist)
-
-
- def make_relative(self,path):
- if normalize_path(os.path.dirname(path))==self.basedir:
- return os.path.basename(path)
- return path
-
-
-def get_script_header(script_text, executable=sys_executable):
- """Create a #! line, getting options (if any) from script_text"""
- from distutils.command.build_scripts import first_line_re
- first, rest = (script_text+'\n').split('\n',1)
- match = first_line_re.match(first)
- options = ''
- if match:
- script_text = rest
- options = match.group(1) or ''
- if options:
- options = ' '+options
- return "#!%(executable)s%(options)s\n" % locals()
-
-
-def auto_chmod(func, arg, exc):
- if func is os.remove and os.name=='nt':
- os.chmod(arg, stat.S_IWRITE)
- return func(arg)
- exc = sys.exc_info()
- raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg)))
-
-
-def uncache_zipdir(path):
- """Ensure that the zip directory cache doesn't have stale info for path"""
- from zipimport import _zip_directory_cache as zdc
- if path in zdc:
- del zdc[path]
- else:
- path = normalize_path(path)
- for p in zdc:
- if normalize_path(p)==path:
- del zdc[p]
- return
-
-
-def get_script_args(dist, executable=sys_executable):
- """Yield write_script() argument tuples for a distribution's entrypoints"""
- spec = str(dist.as_requirement())
- header = get_script_header("", executable)
- for group in 'console_scripts', 'gui_scripts':
- for name,ep in dist.get_entry_map(group).items():
- script_text = (
- "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n"
- "__requires__ = %(spec)r\n"
- "import sys\n"
- "from pkg_resources import load_entry_point\n"
- "\n"
- "sys.exit(\n"
- " load_entry_point(%(spec)r, %(group)r, %(name)r)()\n"
- ")\n"
- ) % locals()
- if sys.platform=='win32':
- # On Windows, add a .py extension and an .exe launcher
- if group=='gui_scripts':
- ext, launcher = '-script.pyw', 'gui.exe'
- old = ['.pyw']
- new_header = re.sub('(?i)python.exe','pythonw.exe',header)
- else:
- ext, launcher = '-script.py', 'cli.exe'
- old = ['.py','.pyc','.pyo']
- new_header = re.sub('(?i)pythonw.exe','pythonw.exe',header)
-
- if os.path.exists(new_header[2:-1]):
- hdr = new_header
- else:
- hdr = header
- yield (name+ext, hdr+script_text, 't', [name+x for x in old])
- yield (
- name+'.exe', resource_string('setuptools', launcher),
- 'b' # write in binary mode
- )
- else:
- # On other platforms, we assume the right thing to do is to
- # just write the stub with no extension.
- yield (name, header+script_text)
-
-def rmtree(path, ignore_errors=False, onerror=auto_chmod):
- """Recursively delete a directory tree.
-
- This code is taken from the Python 2.4 version of 'shutil', because
- the 2.3 version doesn't really work right.
- """
- if ignore_errors:
- def onerror(*args):
- pass
- elif onerror is None:
- def onerror(*args):
- raise
- names = []
- try:
- names = os.listdir(path)
- except os.error, err:
- onerror(os.listdir, path, sys.exc_info())
- for name in names:
- fullname = os.path.join(path, name)
- try:
- mode = os.lstat(fullname).st_mode
- except os.error:
- mode = 0
- if stat.S_ISDIR(mode):
- rmtree(fullname, ignore_errors, onerror)
- else:
- try:
- os.remove(fullname)
- except os.error, err:
- onerror(os.remove, fullname, sys.exc_info())
- try:
- os.rmdir(path)
- except os.error:
- onerror(os.rmdir, path, sys.exc_info())
-
-
-
-
-
-
-
-def main(argv=None, **kw):
- from setuptools import setup
- from setuptools.dist import Distribution
- import distutils.core
-
- USAGE = """\
-usage: %(script)s [options] requirement_or_url ...
- or: %(script)s --help
-"""
-
- def gen_usage (script_name):
- script = os.path.basename(script_name)
- return USAGE % vars()
-
- def with_ei_usage(f):
- old_gen_usage = distutils.core.gen_usage
- try:
- distutils.core.gen_usage = gen_usage
- return f()
- finally:
- distutils.core.gen_usage = old_gen_usage
-
- class DistributionWithoutHelpCommands(Distribution):
- def _show_help(self,*args,**kw):
- with_ei_usage(lambda: Distribution._show_help(self,*args,**kw))
-
- if argv is None:
- argv = sys.argv[1:]
-
- with_ei_usage(lambda:
- setup(
- script_args = ['-q','easy_install', '-v']+argv,
- script_name = sys.argv[0] or 'easy_install',
- distclass=DistributionWithoutHelpCommands, **kw
- )
- )
diff --git a/Lib/setuptools/command/egg_info.py b/Lib/setuptools/command/egg_info.py
deleted file mode 100755
index b68fb39e12..0000000000
--- a/Lib/setuptools/command/egg_info.py
+++ /dev/null
@@ -1,365 +0,0 @@
-"""setuptools.command.egg_info
-
-Create a distribution's .egg-info directory and contents"""
-
-# This module should be kept compatible with Python 2.3
-import os, re
-from setuptools import Command
-from distutils.errors import *
-from distutils import log
-from setuptools.command.sdist import sdist
-from distutils import file_util
-from distutils.util import convert_path
-from distutils.filelist import FileList
-from pkg_resources import parse_requirements, safe_name, parse_version, \
- safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename
-from sdist import walk_revctrl
-
-class egg_info(Command):
- description = "create a distribution's .egg-info directory"
-
- user_options = [
- ('egg-base=', 'e', "directory containing .egg-info directories"
- " (default: top of the source tree)"),
- ('tag-svn-revision', 'r',
- "Add subversion revision ID to version number"),
- ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
- ('tag-build=', 'b', "Specify explicit tag to add to version number"),
- ]
-
- boolean_options = ['tag-date','tag-svn-revision']
-
- def initialize_options (self):
- self.egg_name = None
- self.egg_version = None
- self.egg_base = None
- self.egg_info = None
- self.tag_build = None
- self.tag_svn_revision = 0
- self.tag_date = 0
- self.broken_egg_info = False
-
- def finalize_options (self):
- self.egg_name = safe_name(self.distribution.get_name())
- self.egg_version = self.tagged_version()
-
- try:
- list(
- parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
- )
- except ValueError:
- raise DistutilsOptionError(
- "Invalid distribution name or version syntax: %s-%s" %
- (self.egg_name,self.egg_version)
- )
-
- if self.egg_base is None:
- dirs = self.distribution.package_dir
- self.egg_base = (dirs or {}).get('',os.curdir)
-
- self.ensure_dirname('egg_base')
- self.egg_info = to_filename(self.egg_name)+'.egg-info'
- if self.egg_base != os.curdir:
- self.egg_info = os.path.join(self.egg_base, self.egg_info)
- if '-' in self.egg_name: self.check_broken_egg_info()
-
- # Set package version for the benefit of dumber commands
- # (e.g. sdist, bdist_wininst, etc.)
- #
- self.distribution.metadata.version = self.egg_version
-
- # If we bootstrapped around the lack of a PKG-INFO, as might be the
- # case in a fresh checkout, make sure that any special tags get added
- # to the version info
- #
- pd = self.distribution._patched_dist
- if pd is not None and pd.key==self.egg_name.lower():
- pd._version = self.egg_version
- pd._parsed_version = parse_version(self.egg_version)
- self.distribution._patched_dist = None
-
-
-
- def write_or_delete_file(self, what, filename, data, force=False):
- """Write `data` to `filename` or delete if empty
-
- If `data` is non-empty, this routine is the same as ``write_file()``.
- If `data` is empty but not ``None``, this is the same as calling
- ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
- unless `filename` exists, in which case a warning is issued about the
- orphaned file (if `force` is false), or deleted (if `force` is true).
- """
- if data:
- self.write_file(what, filename, data)
- elif os.path.exists(filename):
- if data is None and not force:
- log.warn(
- "%s not set in setup(), but %s exists", what, filename
- )
- return
- else:
- self.delete_file(filename)
-
- def write_file(self, what, filename, data):
- """Write `data` to `filename` (if not a dry run) after announcing it
-
- `what` is used in a log message to identify what is being written
- to the file.
- """
- log.info("writing %s to %s", what, filename)
- if not self.dry_run:
- f = open(filename, 'wb')
- f.write(data)
- f.close()
-
- def delete_file(self, filename):
- """Delete `filename` (if not a dry run) after announcing it"""
- log.info("deleting %s", filename)
- if not self.dry_run:
- os.unlink(filename)
-
-
-
-
- def run(self):
- self.mkpath(self.egg_info)
- installer = self.distribution.fetch_build_egg
- for ep in iter_entry_points('egg_info.writers'):
- writer = ep.load(installer=installer)
- writer(self, ep.name, os.path.join(self.egg_info,ep.name))
- self.find_sources()
-
- def tagged_version(self):
- version = self.distribution.get_version()
- if self.tag_build:
- version+=self.tag_build
- if self.tag_svn_revision and (
- os.path.exists('.svn') or os.path.exists('PKG-INFO')
- ): version += '-r%s' % self.get_svn_revision()
- if self.tag_date:
- import time; version += time.strftime("-%Y%m%d")
- return safe_version(version)
-
- def get_svn_revision(self):
- revision = 0
- urlre = re.compile('url="([^"]+)"')
- revre = re.compile('committed-rev="(\d+)"')
- for base,dirs,files in os.walk(os.curdir):
- if '.svn' not in dirs:
- dirs[:] = []
- continue # no sense walking uncontrolled subdirs
- dirs.remove('.svn')
- f = open(os.path.join(base,'.svn','entries'))
- data = f.read()
- f.close()
- dirurl = urlre.search(data).group(1) # get repository URL
- if base==os.curdir:
- base_url = dirurl+'/' # save the root url
- elif not dirurl.startswith(base_url):
- dirs[:] = []
- continue # not part of the same svn tree, skip it
- for match in revre.finditer(data):
- revision = max(revision, int(match.group(1)))
- return str(revision or get_pkg_info_revision())
-
- def find_sources(self):
- """Generate SOURCES.txt manifest file"""
- manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
- mm = manifest_maker(self.distribution)
- mm.manifest = manifest_filename
- mm.run()
- self.filelist = mm.filelist
-
- def check_broken_egg_info(self):
- bei = self.egg_name+'.egg-info'
- if self.egg_base != os.curdir:
- bei = os.path.join(self.egg_base, bei)
- if os.path.exists(bei):
- log.warn(
- "-"*78+'\n'
- "Note: Your current .egg-info directory has a '-' in its name;"
- '\nthis will not work correctly with "setup.py develop".\n\n'
- 'Please rename %s to %s to correct this problem.\n'+'-'*78,
- bei, self.egg_info
- )
- self.broken_egg_info = self.egg_info
- self.egg_info = bei # make it work for now
-
-class FileList(FileList):
- """File list that accepts only existing, platform-independent paths"""
-
- def append(self, item):
- path = convert_path(item)
- if os.path.exists(path):
- self.files.append(path)
-
-
-
-
-
-
-
-
-
-
-
-class manifest_maker(sdist):
-
- template = "MANIFEST.in"
-
- def initialize_options (self):
- self.use_defaults = 1
- self.prune = 1
- self.manifest_only = 1
- self.force_manifest = 1
-
- def finalize_options(self):
- pass
-
- def run(self):
- self.filelist = FileList()
- if not os.path.exists(self.manifest):
- self.write_manifest() # it must exist so it'll get in the list
- self.filelist.findall()
- self.add_defaults()
- if os.path.exists(self.template):
- self.read_template()
- self.prune_file_list()
- self.filelist.sort()
- self.filelist.remove_duplicates()
- self.write_manifest()
-
- def write_manifest (self):
- """Write the file list in 'self.filelist' (presumably as filled in
- by 'add_defaults()' and 'read_template()') to the manifest file
- named by 'self.manifest'.
- """
- files = self.filelist.files
- if os.sep!='/':
- files = [f.replace(os.sep,'/') for f in files]
- self.execute(file_util.write_file, (self.manifest, files),
- "writing manifest file '%s'" % self.manifest)
-
-
-
-
-
- def add_defaults(self):
- sdist.add_defaults(self)
- self.filelist.append(self.template)
- self.filelist.append(self.manifest)
- rcfiles = list(walk_revctrl())
- if rcfiles:
- self.filelist.extend(rcfiles)
- elif os.path.exists(self.manifest):
- self.read_manifest()
- ei_cmd = self.get_finalized_command('egg_info')
- self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
-
- def prune_file_list (self):
- build = self.get_finalized_command('build')
- base_dir = self.distribution.get_fullname()
- self.filelist.exclude_pattern(None, prefix=build.build_base)
- self.filelist.exclude_pattern(None, prefix=base_dir)
- sep = re.escape(os.sep)
- self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def write_pkg_info(cmd, basename, filename):
- log.info("writing %s", filename)
- if not cmd.dry_run:
- metadata = cmd.distribution.metadata
- metadata.version, oldver = cmd.egg_version, metadata.version
- metadata.name, oldname = cmd.egg_name, metadata.name
- try:
- # write unescaped data to PKG-INFO, so older pkg_resources
- # can still parse it
- metadata.write_pkg_info(cmd.egg_info)
- finally:
- metadata.name, metadata.version = oldname, oldver
-
- safe = getattr(cmd.distribution,'zip_safe',None)
- import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe)
-
-def warn_depends_obsolete(cmd, basename, filename):
- if os.path.exists(filename):
- log.warn(
- "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
- "Use the install_requires/extras_require setup() args instead."
- )
-
-
-def write_requirements(cmd, basename, filename):
- dist = cmd.distribution
- data = ['\n'.join(yield_lines(dist.install_requires or ()))]
- for extra,reqs in (dist.extras_require or {}).items():
- data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
- cmd.write_or_delete_file("requirements", filename, ''.join(data))
-
-def write_toplevel_names(cmd, basename, filename):
- pkgs = dict.fromkeys(
- [k.split('.',1)[0]
- for k in cmd.distribution.iter_distribution_names()
- ]
- )
- cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
-
-
-
-def overwrite_arg(cmd, basename, filename):
- write_arg(cmd, basename, filename, True)
-
-def write_arg(cmd, basename, filename, force=False):
- argname = os.path.splitext(basename)[0]
- value = getattr(cmd.distribution, argname, None)
- if value is not None:
- value = '\n'.join(value)+'\n'
- cmd.write_or_delete_file(argname, filename, value, force)
-
-def write_entries(cmd, basename, filename):
- ep = cmd.distribution.entry_points
-
- if isinstance(ep,basestring) or ep is None:
- data = ep
- elif ep is not None:
- data = []
- for section, contents in ep.items():
- if not isinstance(contents,basestring):
- contents = EntryPoint.parse_group(section, contents)
- contents = '\n'.join(map(str,contents.values()))
- data.append('[%s]\n%s\n\n' % (section,contents))
- data = ''.join(data)
-
- cmd.write_or_delete_file('entry points', filename, data, True)
-
-def get_pkg_info_revision():
- # See if we can get a -r### off of PKG-INFO, in case this is an sdist of
- # a subversion revision
- #
- if os.path.exists('PKG-INFO'):
- f = open('PKG-INFO','rU')
- for line in f:
- match = re.match(r"Version:.*-r(\d+)\s*$", line)
- if match:
- return int(match.group(1))
- return 0
diff --git a/Lib/setuptools/command/install.py b/Lib/setuptools/command/install.py
deleted file mode 100644
index bfb9af5aee..0000000000
--- a/Lib/setuptools/command/install.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import setuptools, sys
-from distutils.command.install import install as _install
-from distutils.errors import DistutilsArgError
-
-class install(_install):
- """Use easy_install to install the package, w/dependencies"""
-
- user_options = _install.user_options + [
- ('old-and-unmanageable', None, "Try not to use this!"),
- ('single-version-externally-managed', None,
- "used by system package builders to create 'flat' eggs"),
- ]
- boolean_options = _install.boolean_options + [
- 'old-and-unmanageable', 'single-version-externally-managed',
- ]
- new_commands = [
- ('install_egg_info', lambda self: True),
- ('install_scripts', lambda self: True),
- ]
- _nc = dict(new_commands)
- sub_commands = [
- cmd for cmd in _install.sub_commands if cmd[0] not in _nc
- ] + new_commands
-
- def initialize_options(self):
- _install.initialize_options(self)
- self.old_and_unmanageable = None
- self.single_version_externally_managed = None
- self.no_compile = None # make DISTUTILS_DEBUG work right!
-
- def finalize_options(self):
- _install.finalize_options(self)
- if self.root:
- self.single_version_externally_managed = True
- elif self.single_version_externally_managed:
- if not self.root and not self.record:
- raise DistutilsArgError(
- "You must specify --record or --root when building system"
- " packages"
- )
-
- def handle_extra_path(self):
- # We always ignore extra_path, because we install as .egg or .egg-info
- self.path_file = None
- self.extra_dirs = ''
-
- def run(self):
- # Explicit request for old-style install? Just do it
- if self.old_and_unmanageable or self.single_version_externally_managed:
- return _install.run(self)
-
- # Attempt to detect whether we were called from setup() or by another
- # command. If we were called by setup(), our caller will be the
- # 'run_command' method in 'distutils.dist', and *its* caller will be
- # the 'run_commands' method. If we were called any other way, our
- # immediate caller *might* be 'run_command', but it won't have been
- # called by 'run_commands'. This is slightly kludgy, but seems to
- # work.
- #
- caller = sys._getframe(2)
- caller_module = caller.f_globals.get('__name__','')
- caller_name = caller.f_code.co_name
-
- if caller_module != 'distutils.dist' or caller_name!='run_commands':
- # We weren't called from the command line or setup(), so we
- # should run in backward-compatibility mode to support bdist_*
- # commands.
- _install.run(self)
- else:
- self.do_egg_install()
-
-
-
-
-
-
-
-
-
-
-
-
- def do_egg_install(self):
-
- from setuptools.command.easy_install import easy_install
-
- cmd = easy_install(
- self.distribution, args="x", root=self.root, record=self.record,
- )
- cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
-
- self.run_command('bdist_egg')
- args = [self.distribution.get_command_obj('bdist_egg').egg_output]
-
- if setuptools.bootstrap_install_from:
- # Bootstrap self-installation of setuptools
- args.insert(0, setuptools.bootstrap_install_from)
-
- cmd.args = args
- cmd.run()
- setuptools.bootstrap_install_from = None
diff --git a/Lib/setuptools/command/install_egg_info.py b/Lib/setuptools/command/install_egg_info.py
deleted file mode 100755
index 193e91a0bb..0000000000
--- a/Lib/setuptools/command/install_egg_info.py
+++ /dev/null
@@ -1,81 +0,0 @@
-from setuptools import Command
-from setuptools.archive_util import unpack_archive
-from distutils import log, dir_util
-import os, shutil, pkg_resources
-
-class install_egg_info(Command):
- """Install an .egg-info directory for the package"""
-
- description = "Install an .egg-info directory for the package"
-
- user_options = [
- ('install-dir=', 'd', "directory to install to"),
- ]
-
- def initialize_options(self):
- self.install_dir = None
-
- def finalize_options(self):
- self.set_undefined_options('install_lib',('install_dir','install_dir'))
- ei_cmd = self.get_finalized_command("egg_info")
- basename = pkg_resources.Distribution(
- None, None, ei_cmd.egg_name, ei_cmd.egg_version
- ).egg_name()+'.egg-info'
- self.source = ei_cmd.egg_info
- self.target = os.path.join(self.install_dir, basename)
- self.outputs = [self.target]
-
- def run(self):
- self.run_command('egg_info')
- target = self.target
- if os.path.isdir(self.target) and not os.path.islink(self.target):
- dir_util.remove_tree(self.target, dry_run=self.dry_run)
- elif os.path.exists(self.target):
- self.execute(os.unlink,(self.target,),"Removing "+self.target)
- if not self.dry_run:
- pkg_resources.ensure_directory(self.target)
- self.execute(self.copytree, (),
- "Copying %s to %s" % (self.source, self.target)
- )
- self.install_namespaces()
-
- def get_outputs(self):
- return self.outputs
-
- def copytree(self):
- # Copy the .egg-info tree to site-packages
- def skimmer(src,dst):
- # filter out source-control directories; note that 'src' is always
- # a '/'-separated path, regardless of platform. 'dst' is a
- # platform-specific path.
- for skip in '.svn/','CVS/':
- if src.startswith(skip) or '/'+skip in src:
- return None
- self.outputs.append(dst)
- log.debug("Copying %s to %s", src, dst)
- return dst
- unpack_archive(self.source, self.target, skimmer)
-
- def install_namespaces(self):
- nsp = (self.distribution.namespace_packages or [])[:]
- if not nsp: return
- nsp.sort() # set up shorter names first
- filename,ext = os.path.splitext(self.target)
- filename += '-nspkg.pth'; self.outputs.append(filename)
- log.info("Installing %s",filename)
- if not self.dry_run:
- f = open(filename,'wb')
- for pkg in nsp:
- pth = tuple(pkg.split('.'))
- f.write(
- "import sys,new,os; "
- "p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
- "*%(pth)r); "
- "ie = os.path.exists(os.path.join(p,'__init__.py')); "
- "m = not ie and "
- "sys.modules.setdefault(%(pkg)r,new.module(%(pkg)r)); "
- "mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
- "(p not in mp) and mp.append(p)\n"
- % locals()
- )
- f.close()
diff --git a/Lib/setuptools/command/install_lib.py b/Lib/setuptools/command/install_lib.py
deleted file mode 100644
index 96c8dfeb51..0000000000
--- a/Lib/setuptools/command/install_lib.py
+++ /dev/null
@@ -1,76 +0,0 @@
-from distutils.command.install_lib import install_lib as _install_lib
-import os
-
-class install_lib(_install_lib):
- """Don't add compiled flags to filenames of non-Python files"""
-
- def _bytecode_filenames (self, py_filenames):
- bytecode_files = []
- for py_file in py_filenames:
- if not py_file.endswith('.py'):
- continue
- if self.compile:
- bytecode_files.append(py_file + "c")
- if self.optimize > 0:
- bytecode_files.append(py_file + "o")
-
- return bytecode_files
-
- def run(self):
- self.build()
- outfiles = self.install()
- if outfiles is not None:
- # always compile, in case we have any extension stubs to deal with
- self.byte_compile(outfiles)
-
- def get_exclusions(self):
- exclude = {}
- nsp = self.distribution.namespace_packages
-
- if (nsp and self.get_finalized_command('install')
- .single_version_externally_managed
- ):
- for pkg in nsp:
- parts = pkg.split('.')
- while parts:
- pkgdir = os.path.join(self.install_dir, *parts)
- for f in '__init__.py', '__init__.pyc', '__init__.pyo':
- exclude[os.path.join(pkgdir,f)] = 1
- parts.pop()
- return exclude
-
- def copy_tree(
- self, infile, outfile,
- preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
- ):
- assert preserve_mode and preserve_times and not preserve_symlinks
- exclude = self.get_exclusions()
-
- if not exclude:
- return _install_lib.copy_tree(self, infile, outfile)
-
- # Exclude namespace package __init__.py* files from the output
-
- from setuptools.archive_util import unpack_directory
- from distutils import log
-
- outfiles = []
-
- def pf(src, dst):
- if dst in exclude:
- log.warn("Skipping installation of %s (namespace package)",dst)
- return False
-
- log.info("copying %s -> %s", src, os.path.dirname(dst))
- outfiles.append(dst)
- return dst
-
- unpack_directory(infile, outfile, pf)
- return outfiles
-
- def get_outputs(self):
- outputs = _install_lib.get_outputs(self)
- exclude = self.get_exclusions()
- if exclude:
- return [f for f in outputs if f not in exclude]
- return outputs
diff --git a/Lib/setuptools/command/install_scripts.py b/Lib/setuptools/command/install_scripts.py
deleted file mode 100755
index 69558bf9a2..0000000000
--- a/Lib/setuptools/command/install_scripts.py
+++ /dev/null
@@ -1,56 +0,0 @@
-from distutils.command.install_scripts import install_scripts \
- as _install_scripts
-from easy_install import get_script_args, sys_executable
-from pkg_resources import Distribution, PathMetadata, ensure_directory
-import os
-from distutils import log
-
-class install_scripts(_install_scripts):
- """Do normal script install, plus any egg_info wrapper scripts"""
-
- def initialize_options(self):
- _install_scripts.initialize_options(self)
- self.no_ep = False
-
- def run(self):
- self.run_command("egg_info")
- if self.distribution.scripts:
- _install_scripts.run(self) # run first to set up self.outfiles
- else:
- self.outfiles = []
- if self.no_ep:
- # don't install entry point scripts into .egg file!
- return
-
- ei_cmd = self.get_finalized_command("egg_info")
- dist = Distribution(
- ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
- ei_cmd.egg_name, ei_cmd.egg_version,
- )
- bs_cmd = self.get_finalized_command('build_scripts')
- executable = getattr(bs_cmd,'executable',sys_executable)
-
- for args in get_script_args(dist, executable):
- self.write_script(*args)
-
-
-
-
-
-
-
- def write_script(self, script_name, contents, mode="t", *ignored):
- """Write an executable file to the scripts directory"""
- log.info("Installing %s script to %s", script_name, self.install_dir)
- target = os.path.join(self.install_dir, script_name)
- self.outfiles.append(target)
-
- if not self.dry_run:
- ensure_directory(target)
- f = open(target,"w"+mode)
- f.write(contents)
- f.close()
- try:
- os.chmod(target,0755)
- except (AttributeError, os.error):
- pass
diff --git a/Lib/setuptools/command/rotate.py b/Lib/setuptools/command/rotate.py
deleted file mode 100755
index 8aab312ce1..0000000000
--- a/Lib/setuptools/command/rotate.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-
-class rotate(Command):
- """Delete older distributions"""
-
- description = "delete older distributions, keeping N newest files"
- user_options = [
- ('match=', 'm', "patterns to match (required)"),
- ('dist-dir=', 'd', "directory where the distributions are"),
- ('keep=', 'k', "number of matching distributions to keep"),
- ]
-
- boolean_options = []
-
- def initialize_options(self):
- self.match = None
- self.dist_dir = None
- self.keep = None
-
- def finalize_options(self):
- if self.match is None:
- raise DistutilsOptionError(
- "Must specify one or more (comma-separated) match patterns "
- "(e.g. '.zip' or '.egg')"
- )
- if self.keep is None:
- raise DistutilsOptionError("Must specify number of files to keep")
- try:
- self.keep = int(self.keep)
- except ValueError:
- raise DistutilsOptionError("--keep must be an integer")
- if isinstance(self.match, basestring):
- self.match = [
- convert_path(p.strip()) for p in self.match.split(',')
- ]
- self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
-
- def run(self):
- self.run_command("egg_info")
- from glob import glob
- for pattern in self.match:
- pattern = self.distribution.get_name()+'*'+pattern
- files = glob(os.path.join(self.dist_dir,pattern))
- files = [(os.path.getmtime(f),f) for f in files]
- files.sort()
- files.reverse()
-
- log.info("%d file(s) matching %s", len(files), pattern)
- files = files[self.keep:]
- for (t,f) in files:
- log.info("Deleting %s", f)
- if not self.dry_run:
- os.unlink(f)
diff --git a/Lib/setuptools/command/saveopts.py b/Lib/setuptools/command/saveopts.py
deleted file mode 100755
index 9c58d72e44..0000000000
--- a/Lib/setuptools/command/saveopts.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import distutils, os
-from setuptools import Command
-from setuptools.command.setopt import edit_config, option_base
-
-class saveopts(option_base):
- """Save command-line options to a file"""
-
- description = "save supplied options to setup.cfg or other config file"
-
- def run(self):
- dist = self.distribution
- commands = dist.command_options.keys()
- settings = {}
-
- for cmd in commands:
-
- if cmd=='saveopts':
- continue # don't save our own options!
-
- for opt,(src,val) in dist.get_option_dict(cmd).items():
- if src=="command line":
- settings.setdefault(cmd,{})[opt] = val
-
- edit_config(self.filename, settings, self.dry_run)
diff --git a/Lib/setuptools/command/sdist.py b/Lib/setuptools/command/sdist.py
deleted file mode 100755
index 829cd3c57e..0000000000
--- a/Lib/setuptools/command/sdist.py
+++ /dev/null
@@ -1,163 +0,0 @@
-from distutils.command.sdist import sdist as _sdist
-from distutils.util import convert_path
-import os, re, sys, pkg_resources
-
-entities = [
- ("&lt;","<"), ("&gt;", ">"), ("&quot;", '"'), ("&apos;", "'"),
- ("&amp;", "&")
-]
-
-def unescape(data):
- for old,new in entities:
- data = data.replace(old,new)
- return data
-
-def re_finder(pattern, postproc=None):
- def find(dirname, filename):
- f = open(filename,'rU')
- data = f.read()
- f.close()
- for match in pattern.finditer(data):
- path = match.group(1)
- if postproc:
- path = postproc(path)
- yield joinpath(dirname,path)
- return find
-
-def joinpath(prefix,suffix):
- if not prefix:
- return suffix
- return os.path.join(prefix,suffix)
-
-
-
-
-
-
-
-
-
-
-
-def walk_revctrl(dirname=''):
- """Find all files under revision control"""
- for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
- for item in ep.load()(dirname):
- yield item
-
-def _default_revctrl(dirname=''):
- for path, finder in finders:
- path = joinpath(dirname,path)
- if os.path.isfile(path):
- for path in finder(dirname,path):
- if os.path.isfile(path):
- yield path
- elif os.path.isdir(path):
- for item in _default_revctrl(path):
- yield item
-
-def externals_finder(dirname, filename):
- """Find any 'svn:externals' directories"""
- found = False
- f = open(filename,'rb')
- for line in iter(f.readline, ''): # can't use direct iter!
- parts = line.split()
- if len(parts)==2:
- kind,length = parts
- data = f.read(int(length))
- if kind=='K' and data=='svn:externals':
- found = True
- elif kind=='V' and found:
- f.close()
- break
- else:
- f.close()
- return
-
- for line in data.splitlines():
- parts = line.split()
- if parts:
- yield joinpath(dirname, parts[0])
-
-
-finders = [
- (convert_path('CVS/Entries'),
- re_finder(re.compile(r"^\w?/([^/]+)/", re.M))),
- (convert_path('.svn/entries'),
- re_finder(
- re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I),
- unescape
- )
- ),
- (convert_path('.svn/dir-props'), externals_finder),
-]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class sdist(_sdist):
- """Smart sdist that finds anything supported by revision control"""
-
- user_options = [
- ('formats=', None,
- "formats for source distribution (comma-separated list)"),
- ('keep-temp', 'k',
- "keep the distribution tree around after creating " +
- "archive file(s)"),
- ('dist-dir=', 'd',
- "directory to put the source distribution archive(s) in "
- "[default: dist]"),
- ]
-
- negative_opt = {}
-
- def run(self):
- self.run_command('egg_info')
- ei_cmd = self.get_finalized_command('egg_info')
- self.filelist = ei_cmd.filelist
- self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
-
- self.check_metadata()
- self.make_distribution()
-
- dist_files = getattr(self.distribution,'dist_files',[])
- for file in self.archive_files:
- data = ('sdist', '', file)
- if data not in dist_files:
- dist_files.append(data)
-
- def read_template(self):
- try:
- _sdist.read_template(self)
- except:
- # grody hack to close the template file (MANIFEST.in)
- # this prevents easy_install's attempt at deleting the file from
- # dying and thus masking the real error
- sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
- raise
diff --git a/Lib/setuptools/command/setopt.py b/Lib/setuptools/command/setopt.py
deleted file mode 100755
index e0c1058f38..0000000000
--- a/Lib/setuptools/command/setopt.py
+++ /dev/null
@@ -1,158 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-
-__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
-
-
-def config_file(kind="local"):
- """Get the filename of the distutils, local, global, or per-user config
-
- `kind` must be one of "local", "global", or "user"
- """
- if kind=='local':
- return 'setup.cfg'
- if kind=='global':
- return os.path.join(
- os.path.dirname(distutils.__file__),'distutils.cfg'
- )
- if kind=='user':
- dot = os.name=='posix' and '.' or ''
- return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
- raise ValueError(
- "config_file() type must be 'local', 'global', or 'user'", kind
- )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def edit_config(filename, settings, dry_run=False):
- """Edit a configuration file to include `settings`
-
- `settings` is a dictionary of dictionaries or ``None`` values, keyed by
- command/section name. A ``None`` value means to delete the entire section,
- while a dictionary lists settings to be changed or deleted in that section.
- A setting of ``None`` means to delete that setting.
- """
- from ConfigParser import RawConfigParser
- log.debug("Reading configuration from %s", filename)
- opts = RawConfigParser()
- opts.read([filename])
- for section, options in settings.items():
- if options is None:
- log.info("Deleting section [%s] from %s", section, filename)
- opts.remove_section(section)
- else:
- if not opts.has_section(section):
- log.debug("Adding new section [%s] to %s", section, filename)
- opts.add_section(section)
- for option,value in options.items():
- if value is None:
- log.debug("Deleting %s.%s from %s",
- section, option, filename
- )
- opts.remove_option(section,option)
- if not opts.options(section):
- log.info("Deleting empty [%s] section from %s",
- section, filename)
- opts.remove_section(section)
- else:
- log.debug(
- "Setting %s.%s to %r in %s",
- section, option, value, filename
- )
- opts.set(section,option,value)
-
- log.info("Writing %s", filename)
- if not dry_run:
- f = open(filename,'w'); opts.write(f); f.close()
-
-class option_base(Command):
- """Abstract base class for commands that mess with config files"""
-
- user_options = [
- ('global-config', 'g',
- "save options to the site-wide distutils.cfg file"),
- ('user-config', 'u',
- "save options to the current user's pydistutils.cfg file"),
- ('filename=', 'f',
- "configuration file to use (default=setup.cfg)"),
- ]
-
- boolean_options = [
- 'global-config', 'user-config',
- ]
-
- def initialize_options(self):
- self.global_config = None
- self.user_config = None
- self.filename = None
-
- def finalize_options(self):
- filenames = []
- if self.global_config:
- filenames.append(config_file('global'))
- if self.user_config:
- filenames.append(config_file('user'))
- if self.filename is not None:
- filenames.append(self.filename)
- if not filenames:
- filenames.append(config_file('local'))
- if len(filenames)>1:
- raise DistutilsOptionError(
- "Must specify only one configuration file option",
- filenames
- )
- self.filename, = filenames
-
-
-
-
-class setopt(option_base):
- """Save command-line options to a file"""
-
- description = "set an option in setup.cfg or another config file"
-
- user_options = [
- ('command=', 'c', 'command to set an option for'),
- ('option=', 'o', 'option to set'),
- ('set-value=', 's', 'value of the option'),
- ('remove', 'r', 'remove (unset) the value'),
- ] + option_base.user_options
-
- boolean_options = option_base.boolean_options + ['remove']
-
- def initialize_options(self):
- option_base.initialize_options(self)
- self.command = None
- self.option = None
- self.set_value = None
- self.remove = None
-
- def finalize_options(self):
- option_base.finalize_options(self)
- if self.command is None or self.option is None:
- raise DistutilsOptionError("Must specify --command *and* --option")
- if self.set_value is None and not self.remove:
- raise DistutilsOptionError("Must specify --set-value or --remove")
-
- def run(self):
- edit_config(
- self.filename, {
- self.command: {self.option.replace('-','_'):self.set_value}
- },
- self.dry_run
- )
diff --git a/Lib/setuptools/command/test.py b/Lib/setuptools/command/test.py
deleted file mode 100644
index 01fca35f09..0000000000
--- a/Lib/setuptools/command/test.py
+++ /dev/null
@@ -1,119 +0,0 @@
-from setuptools import Command
-from distutils.errors import DistutilsOptionError
-import sys
-from pkg_resources import *
-from unittest import TestLoader, main
-
-class ScanningLoader(TestLoader):
-
- def loadTestsFromModule(self, module):
- """Return a suite of all tests cases contained in the given module
-
- If the module is a package, load tests from all the modules in it.
- If the module has an ``additional_tests`` function, call it and add
- the return value to the tests.
- """
- tests = []
- if module.__name__!='setuptools.tests.doctest': # ugh
- tests.append(TestLoader.loadTestsFromModule(self,module))
-
- if hasattr(module, "additional_tests"):
- tests.append(module.additional_tests())
-
- if hasattr(module, '__path__'):
- for file in resource_listdir(module.__name__, ''):
- if file.endswith('.py') and file!='__init__.py':
- submodule = module.__name__+'.'+file[:-3]
- else:
- if resource_exists(
- module.__name__, file+'/__init__.py'
- ):
- submodule = module.__name__+'.'+file
- else:
- continue
- tests.append(self.loadTestsFromName(submodule))
-
- if len(tests)!=1:
- return self.suiteClass(tests)
- else:
- return tests[0] # don't create a nested suite for only one return
-
-
-class test(Command):
-
- """Command to run unit tests after in-place build"""
-
- description = "run unit tests after in-place build"
-
- user_options = [
- ('test-module=','m', "Run 'test_suite' in specified module"),
- ('test-suite=','s',
- "Test suite to run (e.g. 'some_module.test_suite')"),
- ]
-
- def initialize_options(self):
- self.test_suite = None
- self.test_module = None
- self.test_loader = None
-
-
- def finalize_options(self):
-
- if self.test_suite is None:
- if self.test_module is None:
- self.test_suite = self.distribution.test_suite
- else:
- self.test_suite = self.test_module+".test_suite"
- elif self.test_module:
- raise DistutilsOptionError(
- "You may specify a module or a suite, but not both"
- )
-
- self.test_args = [self.test_suite]
-
- if self.verbose:
- self.test_args.insert(0,'--verbose')
- if self.test_loader is None:
- self.test_loader = getattr(self.distribution,'test_loader',None)
- if self.test_loader is None:
- self.test_loader = "setuptools.command.test:ScanningLoader"
-
-
-
- def run(self):
- # Ensure metadata is up-to-date
- self.run_command('egg_info')
-
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
-
- if self.distribution.tests_require:
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
- if self.test_suite:
- cmd = ' '.join(self.test_args)
- if self.dry_run:
- self.announce('skipping "unittest %s" (dry run)' % cmd)
- else:
- self.announce('running "unittest %s"' % cmd)
- self.run_tests()
-
-
- def run_tests(self):
- import unittest
- old_path = sys.path[:]
- ei_cmd = self.get_finalized_command("egg_info")
- path_item = normalize_path(ei_cmd.egg_base)
- metadata = PathMetadata(
- path_item, normalize_path(ei_cmd.egg_info)
- )
- dist = Distribution(path_item, metadata, project_name=ei_cmd.egg_name)
- working_set.add(dist)
- require(str(dist.as_requirement()))
- loader_ep = EntryPoint.parse("x="+self.test_loader)
- loader_class = loader_ep.load(require=False)
- unittest.main(
- None, None, [unittest.__file__]+self.test_args,
- testLoader = loader_class()
- )
diff --git a/Lib/setuptools/command/upload.py b/Lib/setuptools/command/upload.py
deleted file mode 100755
index 644c400aaa..0000000000
--- a/Lib/setuptools/command/upload.py
+++ /dev/null
@@ -1,178 +0,0 @@
-"""distutils.command.upload
-
-Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
-
-from distutils.errors import *
-from distutils.core import Command
-from distutils.spawn import spawn
-from distutils import log
-from md5 import md5
-import os
-import socket
-import platform
-import ConfigParser
-import httplib
-import base64
-import urlparse
-import cStringIO as StringIO
-
-class upload(Command):
-
- description = "upload binary package to PyPI"
-
- DEFAULT_REPOSITORY = 'http://www.python.org/pypi'
-
- user_options = [
- ('repository=', 'r',
- "url of repository [default: %s]" % DEFAULT_REPOSITORY),
- ('show-response', None,
- 'display full response text from server'),
- ('sign', 's',
- 'sign files to upload using gpg'),
- ('identity=', 'i', 'GPG identity used to sign files'),
- ]
- boolean_options = ['show-response', 'sign']
-
- def initialize_options(self):
- self.username = ''
- self.password = ''
- self.repository = ''
- self.show_response = 0
- self.sign = False
- self.identity = None
-
- def finalize_options(self):
- if self.identity and not self.sign:
- raise DistutilsOptionError(
- "Must use --sign for --identity to have meaning"
- )
- if os.environ.has_key('HOME'):
- rc = os.path.join(os.environ['HOME'], '.pypirc')
- if os.path.exists(rc):
- self.announce('Using PyPI login from %s' % rc)
- config = ConfigParser.ConfigParser({
- 'username':'',
- 'password':'',
- 'repository':''})
- config.read(rc)
- if not self.repository:
- self.repository = config.get('server-login', 'repository')
- if not self.username:
- self.username = config.get('server-login', 'username')
- if not self.password:
- self.password = config.get('server-login', 'password')
- if not self.repository:
- self.repository = self.DEFAULT_REPOSITORY
-
- def run(self):
- if not self.distribution.dist_files:
- raise DistutilsOptionError("No dist file created in earlier command")
- for command, pyversion, filename in self.distribution.dist_files:
- self.upload_file(command, pyversion, filename)
-
- def upload_file(self, command, pyversion, filename):
- # Sign if requested
- if self.sign:
- gpg_args = ["gpg", "--detach-sign", "-a", filename]
- if self.identity:
- gpg_args[2:2] = ["--local-user", self.identity]
- spawn(gpg_args,
- dry_run=self.dry_run)
-
- # Fill in the data
- content = open(filename,'rb').read()
- basename = os.path.basename(filename)
- comment = ''
- if command=='bdist_egg' and self.distribution.has_ext_modules():
- comment = "built on %s" % platform.platform(terse=1)
- data = {
- ':action':'file_upload',
- 'protcol_version':'1',
- 'name':self.distribution.get_name(),
- 'version':self.distribution.get_version(),
- 'content':(basename,content),
- 'filetype':command,
- 'pyversion':pyversion,
- 'md5_digest':md5(content).hexdigest(),
- }
- if command == 'bdist_rpm':
- dist, version, id = platform.dist()
- if dist:
- comment = 'built for %s %s' % (dist, version)
- elif command == 'bdist_dumb':
- comment = 'built for %s' % platform.platform(terse=1)
- data['comment'] = comment
-
- if self.sign:
- data['gpg_signature'] = (os.path.basename(filename) + ".asc",
- open(filename+".asc").read())
-
- # set up the authentication
- auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
-
- # Build up the MIME payload for the POST data
- boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
- sep_boundary = '\n--' + boundary
- end_boundary = sep_boundary + '--'
- body = StringIO.StringIO()
- for key, value in data.items():
- # handle multiple entries for the same name
- if type(value) != type([]):
- value = [value]
- for value in value:
- if type(value) is tuple:
- fn = ';filename="%s"' % value[0]
- value = value[1]
- else:
- fn = ""
- value = str(value)
- body.write(sep_boundary)
- body.write('\nContent-Disposition: form-data; name="%s"'%key)
- body.write(fn)
- body.write("\n\n")
- body.write(value)
- if value and value[-1] == '\r':
- body.write('\n') # write an extra newline (lurve Macs)
- body.write(end_boundary)
- body.write("\n")
- body = body.getvalue()
-
- self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
-
- # build the Request
- # We can't use urllib2 since we need to send the Basic
- # auth right with the first request
- schema, netloc, url, params, query, fragments = \
- urlparse.urlparse(self.repository)
- assert not params and not query and not fragments
- if schema == 'http':
- http = httplib.HTTPConnection(netloc)
- elif schema == 'https':
- http = httplib.HTTPSConnection(netloc)
- else:
- raise AssertionError, "unsupported schema "+schema
-
- data = ''
- loglevel = log.INFO
- try:
- http.connect()
- http.putrequest("POST", url)
- http.putheader('Content-type',
- 'multipart/form-data; boundary=%s'%boundary)
- http.putheader('Content-length', str(len(body)))
- http.putheader('Authorization', auth)
- http.endheaders()
- http.send(body)
- except socket.error, e:
- self.announce(e.msg, log.ERROR)
- return
-
- r = http.getresponse()
- if r.status == 200:
- self.announce('Server response (%s): %s' % (r.status, r.reason),
- log.INFO)
- else:
- self.announce('Upload failed (%s): %s' % (r.status, r.reason),
- log.ERROR)
- if self.show_response:
- print '-'*75, r.read(), '-'*75
diff --git a/Lib/setuptools/depends.py b/Lib/setuptools/depends.py
deleted file mode 100644
index 68d81945ef..0000000000
--- a/Lib/setuptools/depends.py
+++ /dev/null
@@ -1,239 +0,0 @@
-from __future__ import generators
-import sys, imp, marshal
-from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
-from distutils.version import StrictVersion, LooseVersion
-
-__all__ = [
- 'Require', 'find_module', 'get_module_constant', 'extract_constant'
-]
-
-class Require:
- """A prerequisite to building or installing a distribution"""
-
- def __init__(self,name,requested_version,module,homepage='',
- attribute=None,format=None
- ):
-
- if format is None and requested_version is not None:
- format = StrictVersion
-
- if format is not None:
- requested_version = format(requested_version)
- if attribute is None:
- attribute = '__version__'
-
- self.__dict__.update(locals())
- del self.self
-
-
- def full_name(self):
- """Return full package/distribution name, w/version"""
- if self.requested_version is not None:
- return '%s-%s' % (self.name,self.requested_version)
- return self.name
-
-
- def version_ok(self,version):
- """Is 'version' sufficiently up-to-date?"""
- return self.attribute is None or self.format is None or \
- str(version)<>"unknown" and version >= self.requested_version
-
-
- def get_version(self, paths=None, default="unknown"):
-
- """Get version number of installed module, 'None', or 'default'
-
- Search 'paths' for module. If not found, return 'None'. If found,
- return the extracted version attribute, or 'default' if no version
- attribute was specified, or the value cannot be determined without
- importing the module. The version is formatted according to the
- requirement's version format (if any), unless it is 'None' or the
- supplied 'default'.
- """
-
- if self.attribute is None:
- try:
- f,p,i = find_module(self.module,paths)
- if f: f.close()
- return default
- except ImportError:
- return None
-
- v = get_module_constant(self.module,self.attribute,default,paths)
-
- if v is not None and v is not default and self.format is not None:
- return self.format(v)
-
- return v
-
-
- def is_present(self,paths=None):
- """Return true if dependency is present on 'paths'"""
- return self.get_version(paths) is not None
-
-
- def is_current(self,paths=None):
- """Return true if dependency is present and up-to-date on 'paths'"""
- version = self.get_version(paths)
- if version is None:
- return False
- return self.version_ok(version)
-
-
-def _iter_code(code):
-
- """Yield '(op,arg)' pair for each operation in code object 'code'"""
-
- from array import array
- from dis import HAVE_ARGUMENT, EXTENDED_ARG
-
- bytes = array('b',code.co_code)
- eof = len(code.co_code)
-
- ptr = 0
- extended_arg = 0
-
- while ptr<eof:
-
- op = bytes[ptr]
-
- if op>=HAVE_ARGUMENT:
-
- arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
- ptr += 3
-
- if op==EXTENDED_ARG:
- extended_arg = arg * 65536L
- continue
-
- else:
- arg = None
- ptr += 1
-
- yield op,arg
-
-
-
-
-
-
-
-
-
-
-def find_module(module, paths=None):
- """Just like 'imp.find_module()', but with package support"""
-
- parts = module.split('.')
-
- while parts:
- part = parts.pop(0)
- f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
-
- if kind==PKG_DIRECTORY:
- parts = parts or ['__init__']
- paths = [path]
-
- elif parts:
- raise ImportError("Can't find %r in %s" % (parts,module))
-
- return info
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def get_module_constant(module, symbol, default=-1, paths=None):
-
- """Find 'module' by searching 'paths', and extract 'symbol'
-
- Return 'None' if 'module' does not exist on 'paths', or it does not define
- 'symbol'. If the module defines 'symbol' as a constant, return the
- constant. Otherwise, return 'default'."""
-
- try:
- f, path, (suffix,mode,kind) = find_module(module,paths)
- except ImportError:
- # Module doesn't exist
- return None
-
- try:
- if kind==PY_COMPILED:
- f.read(8) # skip magic & date
- code = marshal.load(f)
- elif kind==PY_FROZEN:
- code = imp.get_frozen_object(module)
- elif kind==PY_SOURCE:
- code = compile(f.read(), path, 'exec')
- else:
- # Not something we can parse; we'll have to import it. :(
- if module not in sys.modules:
- imp.load_module(module,f,path,(suffix,mode,kind))
- return getattr(sys.modules[module],symbol,None)
-
- finally:
- if f:
- f.close()
-
- return extract_constant(code,symbol,default)
-
-
-
-
-
-
-
-
-def extract_constant(code,symbol,default=-1):
-
- """Extract the constant value of 'symbol' from 'code'
-
- If the name 'symbol' is bound to a constant value by the Python code
- object 'code', return that value. If 'symbol' is bound to an expression,
- return 'default'. Otherwise, return 'None'.
-
- Return value is based on the first assignment to 'symbol'. 'symbol' must
- be a global, or at least a non-"fast" local in the code block. That is,
- only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
- must be present in 'code.co_names'.
- """
-
- if symbol not in code.co_names:
- # name's not there, can't possibly be an assigment
- return None
-
- name_idx = list(code.co_names).index(symbol)
-
- STORE_NAME = 90
- STORE_GLOBAL = 97
- LOAD_CONST = 100
-
- const = default
-
- for op, arg in _iter_code(code):
-
- if op==LOAD_CONST:
- const = code.co_consts[arg]
- elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
- return const
- else:
- const = default
diff --git a/Lib/setuptools/dist.py b/Lib/setuptools/dist.py
deleted file mode 100644
index f0417c1d1a..0000000000
--- a/Lib/setuptools/dist.py
+++ /dev/null
@@ -1,798 +0,0 @@
-__all__ = ['Distribution']
-
-from distutils.core import Distribution as _Distribution
-from setuptools.depends import Require
-from setuptools.command.install import install
-from setuptools.command.sdist import sdist
-from setuptools.command.install_lib import install_lib
-from distutils.errors import DistutilsOptionError, DistutilsPlatformError
-from distutils.errors import DistutilsSetupError
-import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
-import os
-
-def _get_unpatched(cls):
- """Protect against re-patching the distutils if reloaded
-
- Also ensures that no other distutils extension monkeypatched the distutils
- first.
- """
- while cls.__module__.startswith('setuptools'):
- cls, = cls.__bases__
- if not cls.__module__.startswith('distutils'):
- raise AssertionError(
- "distutils has already been patched by %r" % cls
- )
- return cls
-
-_Distribution = _get_unpatched(_Distribution)
-
-sequence = tuple, list
-
-def check_importable(dist, attr, value):
- try:
- ep = pkg_resources.EntryPoint.parse('x='+value)
- assert not ep.extras
- except (TypeError,ValueError,AttributeError,AssertionError):
- raise DistutilsSetupError(
- "%r must be importable 'module:attrs' string (got %r)"
- % (attr,value)
- )
-
-
-def assert_string_list(dist, attr, value):
- """Verify that value is a string list or None"""
- try:
- assert ''.join(value)!=value
- except (TypeError,ValueError,AttributeError,AssertionError):
- raise DistutilsSetupError(
- "%r must be a list of strings (got %r)" % (attr,value)
- )
-
-def check_nsp(dist, attr, value):
- """Verify that namespace packages are valid"""
- assert_string_list(dist,attr,value)
-
- for nsp in value:
- if not dist.has_contents_for(nsp):
- raise DistutilsSetupError(
- "Distribution contains no modules or packages for " +
- "namespace package %r" % nsp
- )
-
-def check_extras(dist, attr, value):
- """Verify that extras_require mapping is valid"""
- try:
- for k,v in value.items():
- list(pkg_resources.parse_requirements(v))
- except (TypeError,ValueError,AttributeError):
- raise DistutilsSetupError(
- "'extras_require' must be a dictionary whose values are "
- "strings or lists of strings containing valid project/version "
- "requirement specifiers."
- )
-
-def assert_bool(dist, attr, value):
- """Verify that value is True, False, 0, or 1"""
- if bool(value) != value:
- raise DistutilsSetupError(
- "%r must be a boolean value (got %r)" % (attr,value)
- )
-
-
-
-def check_requirements(dist, attr, value):
- """Verify that install_requires is a valid requirements list"""
- try:
- list(pkg_resources.parse_requirements(value))
- except (TypeError,ValueError):
- raise DistutilsSetupError(
- "%r must be a string or list of strings "
- "containing valid project/version requirement specifiers" % (attr,)
- )
-
-def check_entry_points(dist, attr, value):
- """Verify that entry_points map is parseable"""
- try:
- pkg_resources.EntryPoint.parse_map(value)
- except ValueError, e:
- raise DistutilsSetupError(e)
-
-
-def check_test_suite(dist, attr, value):
- if not isinstance(value,basestring):
- raise DistutilsSetupError("test_suite must be a string")
-
-
-def check_package_data(dist, attr, value):
- """Verify that value is a dictionary of package names to glob lists"""
- if isinstance(value,dict):
- for k,v in value.items():
- if not isinstance(k,str): break
- try: iter(v)
- except TypeError:
- break
- else:
- return
- raise DistutilsSetupError(
- attr+" must be a dictionary mapping package names to lists of "
- "wildcard patterns"
- )
-
-
-
-
-class Distribution(_Distribution):
- """Distribution with support for features, tests, and package data
-
- This is an enhanced version of 'distutils.dist.Distribution' that
- effectively adds the following new optional keyword arguments to 'setup()':
-
- 'install_requires' -- a string or sequence of strings specifying project
- versions that the distribution requires when installed, in the format
- used by 'pkg_resources.require()'. They will be installed
- automatically when the package is installed. If you wish to use
- packages that are not available in PyPI, or want to give your users an
- alternate download location, you can add a 'find_links' option to the
- '[easy_install]' section of your project's 'setup.cfg' file, and then
- setuptools will scan the listed web pages for links that satisfy the
- requirements.
-
- 'extras_require' -- a dictionary mapping names of optional "extras" to the
- additional requirement(s) that using those extras incurs. For example,
- this::
-
- extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
-
- indicates that the distribution can optionally provide an extra
- capability called "reST", but it can only be used if docutils and
- reSTedit are installed. If the user installs your package using
- EasyInstall and requests one of your extras, the corresponding
- additional requirements will be installed if needed.
-
- 'features' -- a dictionary mapping option names to 'setuptools.Feature'
- objects. Features are a portion of the distribution that can be
- included or excluded based on user options, inter-feature dependencies,
- and availability on the current system. Excluded features are omitted
- from all setup commands, including source and binary distributions, so
- you can create multiple distributions from the same source tree.
- Feature names should be valid Python identifiers, except that they may
- contain the '-' (minus) sign. Features can be included or excluded
- via the command line options '--with-X' and '--without-X', where 'X' is
- the name of the feature. Whether a feature is included by default, and
- whether you are allowed to control this from the command line, is
- determined by the Feature object. See the 'Feature' class for more
- information.
-
- 'test_suite' -- the name of a test suite to run for the 'test' command.
- If the user runs 'python setup.py test', the package will be installed,
- and the named test suite will be run. The format is the same as
- would be used on a 'unittest.py' command line. That is, it is the
- dotted name of an object to import and call to generate a test suite.
-
- 'package_data' -- a dictionary mapping package names to lists of filenames
- or globs to use to find data files contained in the named packages.
- If the dictionary has filenames or globs listed under '""' (the empty
- string), those names will be searched for in every package, in addition
- to any names for the specific package. Data files found using these
- names/globs will be installed along with the package, in the same
- location as the package. Note that globs are allowed to reference
- the contents of non-package subdirectories, as long as you use '/' as
- a path separator. (Globs are automatically converted to
- platform-specific paths at runtime.)
-
- In addition to these new keywords, this class also has several new methods
- for manipulating the distribution's contents. For example, the 'include()'
- and 'exclude()' methods can be thought of as in-place add and subtract
- commands that add or remove packages, modules, extensions, and so on from
- the distribution. They are used by the feature subsystem to configure the
- distribution for the included and excluded features.
- """
-
- _patched_dist = None
-
- def patch_missing_pkg_info(self, attrs):
- # Fake up a replacement for the data that would normally come from
- # PKG-INFO, but which might not yet be built if this is a fresh
- # checkout.
- #
- if not attrs or 'name' not in attrs or 'version' not in attrs:
- return
- key = pkg_resources.safe_name(str(attrs['name'])).lower()
- dist = pkg_resources.working_set.by_key.get(key)
- if dist is not None and not dist.has_metadata('PKG-INFO'):
- dist._version = pkg_resources.safe_version(str(attrs['version']))
- self._patched_dist = dist
-
- def __init__ (self, attrs=None):
- have_package_data = hasattr(self, "package_data")
- if not have_package_data:
- self.package_data = {}
- self.require_features = []
- self.features = {}
- self.dist_files = []
- self.patch_missing_pkg_info(attrs)
- # Make sure we have any eggs needed to interpret 'attrs'
- if attrs and 'dependency_links' in attrs:
- self.dependency_links = attrs.pop('dependency_links')
- assert_string_list(self,'dependency_links',self.dependency_links)
- if attrs and 'setup_requires' in attrs:
- self.fetch_build_eggs(attrs.pop('setup_requires'))
- for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
- if not hasattr(self,ep.name):
- setattr(self,ep.name,None)
- _Distribution.__init__(self,attrs)
- if isinstance(self.metadata.version, (int,long,float)):
- # Some people apparently take "version number" too literally :)
- self.metadata.version = str(self.metadata.version)
-
- def parse_command_line(self):
- """Process features after parsing command line options"""
- result = _Distribution.parse_command_line(self)
- if self.features:
- self._finalize_features()
- return result
-
- def _feature_attrname(self,name):
- """Convert feature name to corresponding option attribute name"""
- return 'with_'+name.replace('-','_')
-
- def fetch_build_eggs(self, requires):
- """Resolve pre-setup requirements"""
- from pkg_resources import working_set, parse_requirements
- for dist in working_set.resolve(
- parse_requirements(requires), installer=self.fetch_build_egg
- ):
- working_set.add(dist)
-
- def finalize_options(self):
- _Distribution.finalize_options(self)
- if self.features:
- self._set_global_opts_from_features()
-
- for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
- value = getattr(self,ep.name,None)
- if value is not None:
- ep.require(installer=self.fetch_build_egg)
- ep.load()(self, ep.name, value)
-
- def fetch_build_egg(self, req):
- """Fetch an egg needed for building"""
- try:
- cmd = self._egg_fetcher
- except AttributeError:
- from setuptools.command.easy_install import easy_install
- dist = self.__class__({'script_args':['easy_install']})
- dist.parse_config_files()
- opts = dist.get_option_dict('easy_install')
- keep = (
- 'find_links', 'site_dirs', 'index_url', 'optimize',
- 'site_dirs', 'allow_hosts'
- )
- for key in opts.keys():
- if key not in keep:
- del opts[key] # don't use any other settings
- if self.dependency_links:
- links = self.dependency_links[:]
- if 'find_links' in opts:
- links = opts['find_links'][1].split() + links
- opts['find_links'] = ('setup', links)
- cmd = easy_install(
- dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
- always_copy=False, build_directory=None, editable=False,
- upgrade=False, multi_version=True, no_report = True
- )
- cmd.ensure_finalized()
- self._egg_fetcher = cmd
- return cmd.easy_install(req)
-
- def _set_global_opts_from_features(self):
- """Add --with-X/--without-X options based on optional features"""
-
- go = []
- no = self.negative_opt.copy()
-
- for name,feature in self.features.items():
- self._set_feature(name,None)
- feature.validate(self)
-
- if feature.optional:
- descr = feature.description
- incdef = ' (default)'
- excdef=''
- if not feature.include_by_default():
- excdef, incdef = incdef, excdef
-
- go.append(('with-'+name, None, 'include '+descr+incdef))
- go.append(('without-'+name, None, 'exclude '+descr+excdef))
- no['without-'+name] = 'with-'+name
-
- self.global_options = self.feature_options = go + self.global_options
- self.negative_opt = self.feature_negopt = no
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def _finalize_features(self):
- """Add/remove features and resolve dependencies between them"""
-
- # First, flag all the enabled items (and thus their dependencies)
- for name,feature in self.features.items():
- enabled = self.feature_is_included(name)
- if enabled or (enabled is None and feature.include_by_default()):
- feature.include_in(self)
- self._set_feature(name,1)
-
- # Then disable the rest, so that off-by-default features don't
- # get flagged as errors when they're required by an enabled feature
- for name,feature in self.features.items():
- if not self.feature_is_included(name):
- feature.exclude_from(self)
- self._set_feature(name,0)
-
-
- def get_command_class(self, command):
- """Pluggable version of get_command_class()"""
- if command in self.cmdclass:
- return self.cmdclass[command]
-
- for ep in pkg_resources.iter_entry_points('distutils.commands',command):
- ep.require(installer=self.fetch_build_egg)
- self.cmdclass[command] = cmdclass = ep.load()
- return cmdclass
- else:
- return _Distribution.get_command_class(self, command)
-
- def print_commands(self):
- for ep in pkg_resources.iter_entry_points('distutils.commands'):
- if ep.name not in self.cmdclass:
- cmdclass = ep.load(False) # don't require extras, we're not running
- self.cmdclass[ep.name] = cmdclass
- return _Distribution.print_commands(self)
-
-
-
-
-
- def _set_feature(self,name,status):
- """Set feature's inclusion status"""
- setattr(self,self._feature_attrname(name),status)
-
- def feature_is_included(self,name):
- """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
- return getattr(self,self._feature_attrname(name))
-
- def include_feature(self,name):
- """Request inclusion of feature named 'name'"""
-
- if self.feature_is_included(name)==0:
- descr = self.features[name].description
- raise DistutilsOptionError(
- descr + " is required, but was excluded or is not available"
- )
- self.features[name].include_in(self)
- self._set_feature(name,1)
-
- def include(self,**attrs):
- """Add items to distribution that are named in keyword arguments
-
- For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
- the distribution's 'py_modules' attribute, if it was not already
- there.
-
- Currently, this method only supports inclusion for attributes that are
- lists or tuples. If you need to add support for adding to other
- attributes in this or a subclass, you can add an '_include_X' method,
- where 'X' is the name of the attribute. The method will be called with
- the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
- will try to call 'dist._include_foo({"bar":"baz"})', which can then
- handle whatever special inclusion logic is needed.
- """
- for k,v in attrs.items():
- include = getattr(self, '_include_'+k, None)
- if include:
- include(v)
- else:
- self._include_misc(k,v)
-
- def exclude_package(self,package):
- """Remove packages, modules, and extensions in named package"""
-
- pfx = package+'.'
- if self.packages:
- self.packages = [
- p for p in self.packages
- if p<>package and not p.startswith(pfx)
- ]
-
- if self.py_modules:
- self.py_modules = [
- p for p in self.py_modules
- if p<>package and not p.startswith(pfx)
- ]
-
- if self.ext_modules:
- self.ext_modules = [
- p for p in self.ext_modules
- if p.name<>package and not p.name.startswith(pfx)
- ]
-
-
- def has_contents_for(self,package):
- """Return true if 'exclude_package(package)' would do something"""
-
- pfx = package+'.'
-
- for p in self.iter_distribution_names():
- if p==package or p.startswith(pfx):
- return True
-
-
-
-
-
-
-
-
-
-
- def _exclude_misc(self,name,value):
- """Handle 'exclude()' for list/tuple attrs without a special handler"""
- if not isinstance(value,sequence):
- raise DistutilsSetupError(
- "%s: setting must be a list or tuple (%r)" % (name, value)
- )
- try:
- old = getattr(self,name)
- except AttributeError:
- raise DistutilsSetupError(
- "%s: No such distribution setting" % name
- )
- if old is not None and not isinstance(old,sequence):
- raise DistutilsSetupError(
- name+": this setting cannot be changed via include/exclude"
- )
- elif old:
- setattr(self,name,[item for item in old if item not in value])
-
- def _include_misc(self,name,value):
- """Handle 'include()' for list/tuple attrs without a special handler"""
-
- if not isinstance(value,sequence):
- raise DistutilsSetupError(
- "%s: setting must be a list (%r)" % (name, value)
- )
- try:
- old = getattr(self,name)
- except AttributeError:
- raise DistutilsSetupError(
- "%s: No such distribution setting" % name
- )
- if old is None:
- setattr(self,name,value)
- elif not isinstance(old,sequence):
- raise DistutilsSetupError(
- name+": this setting cannot be changed via include/exclude"
- )
- else:
- setattr(self,name,old+[item for item in value if item not in old])
-
- def exclude(self,**attrs):
- """Remove items from distribution that are named in keyword arguments
-
- For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
- the distribution's 'py_modules' attribute. Excluding packages uses
- the 'exclude_package()' method, so all of the package's contained
- packages, modules, and extensions are also excluded.
-
- Currently, this method only supports exclusion from attributes that are
- lists or tuples. If you need to add support for excluding from other
- attributes in this or a subclass, you can add an '_exclude_X' method,
- where 'X' is the name of the attribute. The method will be called with
- the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
- will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
- handle whatever special exclusion logic is needed.
- """
- for k,v in attrs.items():
- exclude = getattr(self, '_exclude_'+k, None)
- if exclude:
- exclude(v)
- else:
- self._exclude_misc(k,v)
-
- def _exclude_packages(self,packages):
- if not isinstance(packages,sequence):
- raise DistutilsSetupError(
- "packages: setting must be a list or tuple (%r)" % (packages,)
- )
- map(self.exclude_package, packages)
-
-
-
-
-
-
-
-
-
-
-
-
- def _parse_command_opts(self, parser, args):
- # Remove --with-X/--without-X options when processing command args
- self.global_options = self.__class__.global_options
- self.negative_opt = self.__class__.negative_opt
-
- # First, expand any aliases
- command = args[0]
- aliases = self.get_option_dict('aliases')
- while command in aliases:
- src,alias = aliases[command]
- del aliases[command] # ensure each alias can expand only once!
- import shlex
- args[:1] = shlex.split(alias,True)
- command = args[0]
-
- nargs = _Distribution._parse_command_opts(self, parser, args)
-
- # Handle commands that want to consume all remaining arguments
- cmd_class = self.get_command_class(command)
- if getattr(cmd_class,'command_consumes_arguments',None):
- self.get_option_dict(command)['args'] = ("command line", nargs)
- if nargs is not None:
- return []
-
- return nargs
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def get_cmdline_options(self):
- """Return a '{cmd: {opt:val}}' map of all command-line options
-
- Option names are all long, but do not include the leading '--', and
- contain dashes rather than underscores. If the option doesn't take
- an argument (e.g. '--quiet'), the 'val' is 'None'.
-
- Note that options provided by config files are intentionally excluded.
- """
-
- d = {}
-
- for cmd,opts in self.command_options.items():
-
- for opt,(src,val) in opts.items():
-
- if src != "command line":
- continue
-
- opt = opt.replace('_','-')
-
- if val==0:
- cmdobj = self.get_command_obj(cmd)
- neg_opt = self.negative_opt.copy()
- neg_opt.update(getattr(cmdobj,'negative_opt',{}))
- for neg,pos in neg_opt.items():
- if pos==opt:
- opt=neg
- val=None
- break
- else:
- raise AssertionError("Shouldn't be able to get here")
-
- elif val==1:
- val = None
-
- d.setdefault(cmd,{})[opt] = val
-
- return d
-
-
- def iter_distribution_names(self):
- """Yield all packages, modules, and extension names in distribution"""
-
- for pkg in self.packages or ():
- yield pkg
-
- for module in self.py_modules or ():
- yield module
-
- for ext in self.ext_modules or ():
- if isinstance(ext,tuple):
- name,buildinfo = ext
- yield name
- else:
- yield ext.name
-
-# Install it throughout the distutils
-for module in distutils.dist, distutils.core, distutils.cmd:
- module.Distribution = Distribution
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class Feature:
- """A subset of the distribution that can be excluded if unneeded/wanted
-
- Features are created using these keyword arguments:
-
- 'description' -- a short, human readable description of the feature, to
- be used in error messages, and option help messages.
-
- 'standard' -- if true, the feature is included by default if it is
- available on the current system. Otherwise, the feature is only
- included if requested via a command line '--with-X' option, or if
- another included feature requires it. The default setting is 'False'.
-
- 'available' -- if true, the feature is available for installation on the
- current system. The default setting is 'True'.
-
- 'optional' -- if true, the feature's inclusion can be controlled from the
- command line, using the '--with-X' or '--without-X' options. If
- false, the feature's inclusion status is determined automatically,
- based on 'availabile', 'standard', and whether any other feature
- requires it. The default setting is 'True'.
-
- 'require_features' -- a string or sequence of strings naming features
- that should also be included if this feature is included. Defaults to
- empty list. May also contain 'Require' objects that should be
- added/removed from the distribution.
-
- 'remove' -- a string or list of strings naming packages to be removed
- from the distribution if this feature is *not* included. If the
- feature *is* included, this argument is ignored. This argument exists
- to support removing features that "crosscut" a distribution, such as
- defining a 'tests' feature that removes all the 'tests' subpackages
- provided by other features. The default for this argument is an empty
- list. (Note: the named package(s) or modules must exist in the base
- distribution when the 'setup()' function is initially called.)
-
- other keywords -- any other keyword arguments are saved, and passed to
- the distribution's 'include()' and 'exclude()' methods when the
- feature is included or excluded, respectively. So, for example, you
- could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
- added or removed from the distribution as appropriate.
-
- A feature must include at least one 'requires', 'remove', or other
- keyword argument. Otherwise, it can't affect the distribution in any way.
- Note also that you can subclass 'Feature' to create your own specialized
- feature types that modify the distribution in other ways when included or
- excluded. See the docstrings for the various methods here for more detail.
- Aside from the methods, the only feature attributes that distributions look
- at are 'description' and 'optional'.
- """
- def __init__(self, description, standard=False, available=True,
- optional=True, require_features=(), remove=(), **extras
- ):
-
- self.description = description
- self.standard = standard
- self.available = available
- self.optional = optional
- if isinstance(require_features,(str,Require)):
- require_features = require_features,
-
- self.require_features = [
- r for r in require_features if isinstance(r,str)
- ]
- er = [r for r in require_features if not isinstance(r,str)]
- if er: extras['require_features'] = er
-
- if isinstance(remove,str):
- remove = remove,
- self.remove = remove
- self.extras = extras
-
- if not remove and not require_features and not extras:
- raise DistutilsSetupError(
- "Feature %s: must define 'require_features', 'remove', or at least one"
- " of 'packages', 'py_modules', etc."
- )
-
- def include_by_default(self):
- """Should this feature be included by default?"""
- return self.available and self.standard
-
- def include_in(self,dist):
-
- """Ensure feature and its requirements are included in distribution
-
- You may override this in a subclass to perform additional operations on
- the distribution. Note that this method may be called more than once
- per feature, and so should be idempotent.
-
- """
-
- if not self.available:
- raise DistutilsPlatformError(
- self.description+" is required,"
- "but is not available on this platform"
- )
-
- dist.include(**self.extras)
-
- for f in self.require_features:
- dist.include_feature(f)
-
-
-
- def exclude_from(self,dist):
-
- """Ensure feature is excluded from distribution
-
- You may override this in a subclass to perform additional operations on
- the distribution. This method will be called at most once per
- feature, and only after all included features have been asked to
- include themselves.
- """
-
- dist.exclude(**self.extras)
-
- if self.remove:
- for item in self.remove:
- dist.exclude_package(item)
-
-
-
- def validate(self,dist):
-
- """Verify that feature makes sense in context of distribution
-
- This method is called by the distribution just before it parses its
- command line. It checks to ensure that the 'remove' attribute, if any,
- contains only valid package/module names that are present in the base
- distribution when 'setup()' is called. You may override it in a
- subclass to perform any other required validation of the feature
- against a target distribution.
- """
-
- for item in self.remove:
- if not dist.has_contents_for(item):
- raise DistutilsSetupError(
- "%s wants to be able to remove %s, but the distribution"
- " doesn't contain any packages or modules under %s"
- % (self.description, item, item)
- )
diff --git a/Lib/setuptools/extension.py b/Lib/setuptools/extension.py
deleted file mode 100644
index cfcf55bf1a..0000000000
--- a/Lib/setuptools/extension.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from distutils.core import Extension as _Extension
-from dist import _get_unpatched
-_Extension = _get_unpatched(_Extension)
-
-try:
- from Pyrex.Distutils.build_ext import build_ext
-except ImportError:
- have_pyrex = False
-else:
- have_pyrex = True
-
-
-class Extension(_Extension):
- """Extension that uses '.c' files in place of '.pyx' files"""
-
- if not have_pyrex:
- # convert .pyx extensions to .c
- def __init__(self,*args,**kw):
- _Extension.__init__(self,*args,**kw)
- sources = []
- for s in self.sources:
- if s.endswith('.pyx'):
- sources.append(s[:-3]+'c')
- else:
- sources.append(s)
- self.sources = sources
-
-class Library(Extension):
- """Just like a regular Extension, but built as a library instead"""
-
-import sys, distutils.core, distutils.extension
-distutils.core.Extension = Extension
-distutils.extension.Extension = Extension
-if 'distutils.command.build_ext' in sys.modules:
- sys.modules['distutils.command.build_ext'].Extension = Extension
diff --git a/Lib/setuptools/gui.exe b/Lib/setuptools/gui.exe
deleted file mode 100755
index 63ff35f4dd..0000000000
--- a/Lib/setuptools/gui.exe
+++ /dev/null
Binary files differ
diff --git a/Lib/setuptools/package_index.py b/Lib/setuptools/package_index.py
deleted file mode 100755
index 107e222b82..0000000000
--- a/Lib/setuptools/package_index.py
+++ /dev/null
@@ -1,674 +0,0 @@
-"""PyPI and direct package downloading"""
-
-import sys, os.path, re, urlparse, urllib2, shutil, random, socket
-from pkg_resources import *
-from distutils import log
-from distutils.errors import DistutilsError
-from md5 import md5
-from fnmatch import translate
-
-EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
-HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
-# this is here to fix emacs' cruddy broken syntax highlighting
-PYPI_MD5 = re.compile(
- '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a href="[^?]+\?:action=show_md5'
- '&amp;digest=([0-9a-f]{32})">md5</a>\\)'
-)
-
-URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
-EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
-
-__all__ = [
- 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
- 'interpret_distro_name',
-]
-
-
-def parse_bdist_wininst(name):
- """Return (base,pyversion) or (None,None) for possible .exe name"""
-
- lower = name.lower()
- base, py_ver = None, None
-
- if lower.endswith('.exe'):
- if lower.endswith('.win32.exe'):
- base = name[:-10]
- elif lower.startswith('.win32-py',-16):
- py_ver = name[-7:-4]
- base = name[:-16]
-
- return base,py_ver
-
-def egg_info_for_url(url):
- scheme, server, path, parameters, query, fragment = urlparse.urlparse(url)
- base = urllib2.unquote(path.split('/')[-1])
- if '#' in base: base, fragment = base.split('#',1)
- return base,fragment
-
-def distros_for_url(url, metadata=None):
- """Yield egg or source distribution objects that might be found at a URL"""
- base, fragment = egg_info_for_url(url)
- dists = distros_for_location(url, base, metadata)
- if fragment and not dists:
- match = EGG_FRAGMENT.match(fragment)
- if match:
- return interpret_distro_name(
- url, match.group(1), metadata, precedence = CHECKOUT_DIST
- )
- return dists
-
-def distros_for_location(location, basename, metadata=None):
- """Yield egg or source distribution objects based on basename"""
- if basename.endswith('.egg.zip'):
- basename = basename[:-4] # strip the .zip
- if basename.endswith('.egg'): # only one, unambiguous interpretation
- return [Distribution.from_location(location, basename, metadata)]
-
- if basename.endswith('.exe'):
- win_base, py_ver = parse_bdist_wininst(basename)
- if win_base is not None:
- return interpret_distro_name(
- location, win_base, metadata, py_ver, BINARY_DIST, "win32"
- )
-
- # Try source distro extensions (.zip, .tgz, etc.)
- #
- for ext in EXTENSIONS:
- if basename.endswith(ext):
- basename = basename[:-len(ext)]
- return interpret_distro_name(location, basename, metadata)
- return [] # no extension matched
-
-
-def distros_for_filename(filename, metadata=None):
- """Yield possible egg or source distribution objects based on a filename"""
- return distros_for_location(
- normalize_path(filename), os.path.basename(filename), metadata
- )
-
-
-def interpret_distro_name(location, basename, metadata,
- py_version=None, precedence=SOURCE_DIST, platform=None
-):
- """Generate alternative interpretations of a source distro name
-
- Note: if `location` is a filesystem filename, you should call
- ``pkg_resources.normalize_path()`` on it before passing it to this
- routine!
- """
-
- # Generate alternative interpretations of a source distro name
- # Because some packages are ambiguous as to name/versions split
- # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
- # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
- # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
- # the spurious interpretations should be ignored, because in the event
- # there's also an "adns" package, the spurious "python-1.1.0" version will
- # compare lower than any numeric version number, and is therefore unlikely
- # to match a request for it. It's still a potential problem, though, and
- # in the long run PyPI and the distutils should go for "safe" names and
- # versions in distribution archive names (sdist and bdist).
-
- parts = basename.split('-')
- for p in range(1,len(parts)+1):
- yield Distribution(
- location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
- py_version=py_version, precedence = precedence,
- platform = platform
- )
-
-
-
-
-
-class PackageIndex(Environment):
- """A distribution index that scans web pages for download URLs"""
-
- def __init__(self,index_url="http://www.python.org/pypi",hosts=('*',),*args,**kw):
- Environment.__init__(self,*args,**kw)
- self.index_url = index_url + "/"[:not index_url.endswith('/')]
- self.scanned_urls = {}
- self.fetched_urls = {}
- self.package_pages = {}
- self.allows = re.compile('|'.join(map(translate,hosts))).match
- self.to_scan = []
-
- def process_url(self, url, retrieve=False):
- """Evaluate a URL as a possible download, and maybe retrieve it"""
- url = fix_sf_url(url)
- if url in self.scanned_urls and not retrieve:
- return
- self.scanned_urls[url] = True
- if not URL_SCHEME(url):
- self.process_filename(url)
- return
- else:
- dists = list(distros_for_url(url))
- if dists:
- if not self.url_ok(url):
- return
- self.debug("Found link: %s", url)
-
- if dists or not retrieve or url in self.fetched_urls:
- map(self.add, dists)
- return # don't need the actual page
-
- if not self.url_ok(url):
- self.fetched_urls[url] = True
- return
-
- self.info("Reading %s", url)
- f = self.open_url(url)
- self.fetched_urls[url] = self.fetched_urls[f.url] = True
-
-
- if 'html' not in f.headers['content-type'].lower():
- f.close() # not html, we can't process it
- return
-
- base = f.url # handle redirects
- page = f.read()
- f.close()
- if url.startswith(self.index_url):
- page = self.process_index(url, page)
-
- for match in HREF.finditer(page):
- link = urlparse.urljoin(base, match.group(1))
- self.process_url(link)
-
- def process_filename(self, fn, nested=False):
- # process filenames or directories
- if not os.path.exists(fn):
- self.warn("Not found: %s", url)
- return
-
- if os.path.isdir(fn) and not nested:
- path = os.path.realpath(fn)
- for item in os.listdir(path):
- self.process_filename(os.path.join(path,item), True)
-
- dists = distros_for_filename(fn)
- if dists:
- self.debug("Found: %s", fn)
- map(self.add, dists)
-
- def url_ok(self, url, fatal=False):
- if self.allows(urlparse.urlparse(url)[1]):
- return True
- msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n"
- if fatal:
- raise DistutilsError(msg % url)
- else:
- self.warn(msg, url)
-
-
-
- def process_index(self,url,page):
- """Process the contents of a PyPI page"""
- def scan(link):
- # Process a URL to see if it's for a package page
- if link.startswith(self.index_url):
- parts = map(
- urllib2.unquote, link[len(self.index_url):].split('/')
- )
- if len(parts)==2:
- # it's a package page, sanitize and index it
- pkg = safe_name(parts[0])
- ver = safe_version(parts[1])
- self.package_pages.setdefault(pkg.lower(),{})[link] = True
- return to_filename(pkg), to_filename(ver)
- return None, None
-
- if url==self.index_url or 'Index of Packages</title>' in page:
- # process an index page into the package-page index
- for match in HREF.finditer(page):
- scan( urlparse.urljoin(url, match.group(1)) )
- else:
- pkg,ver = scan(url) # ensure this page is in the page index
- # process individual package page
- for tag in ("<th>Home Page", "<th>Download URL"):
- pos = page.find(tag)
- if pos!=-1:
- match = HREF.search(page,pos)
- if match:
- # Process the found URL
- new_url = urlparse.urljoin(url, match.group(1))
- base, frag = egg_info_for_url(new_url)
- if base.endswith('.py') and not frag:
- if pkg and ver:
- new_url+='#egg=%s-%s' % (pkg,ver)
- else:
- self.need_version_info(url)
- self.scan_url(new_url)
- return PYPI_MD5.sub(
- lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
- )
-
- def need_version_info(self, url):
- self.scan_all(
- "Page at %s links to .py file(s) without version info; an index "
- "scan is required.", url
- )
-
- def scan_all(self, msg=None, *args):
- if self.index_url not in self.fetched_urls:
- if msg: self.warn(msg,*args)
- self.warn(
- "Scanning index of all packages (this may take a while)"
- )
- self.scan_url(self.index_url)
-
- def find_packages(self, requirement):
- self.scan_url(self.index_url + requirement.unsafe_name+'/')
-
- if not self.package_pages.get(requirement.key):
- # Fall back to safe version of the name
- self.scan_url(self.index_url + requirement.project_name+'/')
-
- if not self.package_pages.get(requirement.key):
- # We couldn't find the target package, so search the index page too
- self.warn(
- "Couldn't find index page for %r (maybe misspelled?)",
- requirement.unsafe_name
- )
- self.scan_all()
-
- for url in self.package_pages.get(requirement.key,()):
- # scan each page that might be related to the desired package
- self.scan_url(url)
-
- def obtain(self, requirement, installer=None):
- self.prescan(); self.find_packages(requirement)
- for dist in self[requirement.key]:
- if dist in requirement:
- return dist
- self.debug("%s does not match %s", requirement, dist)
- return super(PackageIndex, self).obtain(requirement,installer)
-
- def check_md5(self, cs, info, filename, tfp):
- if re.match('md5=[0-9a-f]{32}$', info):
- self.debug("Validating md5 checksum for %s", filename)
- if cs.hexdigest()<>info[4:]:
- tfp.close()
- os.unlink(filename)
- raise DistutilsError(
- "MD5 validation failed for "+os.path.basename(filename)+
- "; possible download problem?"
- )
-
- def add_find_links(self, urls):
- """Add `urls` to the list that will be prescanned for searches"""
- for url in urls:
- if (
- self.to_scan is None # if we have already "gone online"
- or not URL_SCHEME(url) # or it's a local file/directory
- or url.startswith('file:')
- or list(distros_for_url(url)) # or a direct package link
- ):
- # then go ahead and process it now
- self.scan_url(url)
- else:
- # otherwise, defer retrieval till later
- self.to_scan.append(url)
-
- def prescan(self):
- """Scan urls scheduled for prescanning (e.g. --find-links)"""
- if self.to_scan:
- map(self.scan_url, self.to_scan)
- self.to_scan = None # from now on, go ahead and process immediately
-
-
-
-
-
-
-
-
-
-
- def download(self, spec, tmpdir):
- """Locate and/or download `spec` to `tmpdir`, returning a local path
-
- `spec` may be a ``Requirement`` object, or a string containing a URL,
- an existing local filename, or a project/version requirement spec
- (i.e. the string form of a ``Requirement`` object). If it is the URL
- of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
- that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
- automatically created alongside the downloaded file.
-
- If `spec` is a ``Requirement`` object or a string containing a
- project/version requirement spec, this method returns the location of
- a matching distribution (possibly after downloading it to `tmpdir`).
- If `spec` is a locally existing file or directory name, it is simply
- returned unchanged. If `spec` is a URL, it is downloaded to a subpath
- of `tmpdir`, and the local filename is returned. Various errors may be
- raised if a problem occurs during downloading.
- """
- if not isinstance(spec,Requirement):
- scheme = URL_SCHEME(spec)
- if scheme:
- # It's a url, download it to tmpdir
- found = self._download_url(scheme.group(1), spec, tmpdir)
- base, fragment = egg_info_for_url(spec)
- if base.endswith('.py'):
- found = self.gen_setup(found,fragment,tmpdir)
- return found
- elif os.path.exists(spec):
- # Existing file or directory, just return it
- return spec
- else:
- try:
- spec = Requirement.parse(spec)
- except ValueError:
- raise DistutilsError(
- "Not a URL, existing file, or requirement spec: %r" %
- (spec,)
- )
- return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
-
-
- def fetch_distribution(self,
- requirement, tmpdir, force_scan=False, source=False, develop_ok=False
- ):
- """Obtain a distribution suitable for fulfilling `requirement`
-
- `requirement` must be a ``pkg_resources.Requirement`` instance.
- If necessary, or if the `force_scan` flag is set, the requirement is
- searched for in the (online) package index as well as the locally
- installed packages. If a distribution matching `requirement` is found,
- the returned distribution's ``location`` is the value you would have
- gotten from calling the ``download()`` method with the matching
- distribution's URL or filename. If no matching distribution is found,
- ``None`` is returned.
-
- If the `source` flag is set, only source distributions and source
- checkout links will be considered. Unless the `develop_ok` flag is
- set, development and system eggs (i.e., those using the ``.egg-info``
- format) will be ignored.
- """
-
- # process a Requirement
- self.info("Searching for %s", requirement)
- skipped = {}
-
- def find(req):
- # Find a matching distribution; may be called more than once
-
- for dist in self[req.key]:
-
- if dist.precedence==DEVELOP_DIST and not develop_ok:
- if dist not in skipped:
- self.warn("Skipping development or system egg: %s",dist)
- skipped[dist] = 1
- continue
-
- if dist in req and (dist.precedence<=SOURCE_DIST or not source):
- self.info("Best match: %s", dist)
- return dist.clone(
- location=self.download(dist.location, tmpdir)
- )
-
- if force_scan:
- self.prescan()
- self.find_packages(requirement)
-
- dist = find(requirement)
- if dist is None and self.to_scan is not None:
- self.prescan()
- dist = find(requirement)
-
- if dist is None and not force_scan:
- self.find_packages(requirement)
- dist = find(requirement)
-
- if dist is None:
- self.warn(
- "No local packages or download links found for %s%s",
- (source and "a source distribution of " or ""),
- requirement,
- )
- return dist
-
- def fetch(self, requirement, tmpdir, force_scan=False, source=False):
- """Obtain a file suitable for fulfilling `requirement`
-
- DEPRECATED; use the ``fetch_distribution()`` method now instead. For
- backward compatibility, this routine is identical but returns the
- ``location`` of the downloaded distribution instead of a distribution
- object.
- """
- dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
- if dist is not None:
- return dist.location
- return None
-
-
-
-
-
-
-
-
- def gen_setup(self, filename, fragment, tmpdir):
- match = EGG_FRAGMENT.match(fragment); #import pdb; pdb.set_trace()
- dists = match and [d for d in
- interpret_distro_name(filename, match.group(1), None) if d.version
- ] or []
-
- if len(dists)==1: # unambiguous ``#egg`` fragment
- basename = os.path.basename(filename)
-
- # Make sure the file has been downloaded to the temp dir.
- if os.path.dirname(filename) != tmpdir:
- dst = os.path.join(tmpdir, basename)
- from setuptools.command.easy_install import samefile
- if not samefile(filename, dst):
- shutil.copy2(filename, dst)
- filename=dst
-
- file = open(os.path.join(tmpdir, 'setup.py'), 'w')
- file.write(
- "from setuptools import setup\n"
- "setup(name=%r, version=%r, py_modules=[%r])\n"
- % (
- dists[0].project_name, dists[0].version,
- os.path.splitext(basename)[0]
- )
- )
- file.close()
- return filename
-
- elif match:
- raise DistutilsError(
- "Can't unambiguously interpret project/version identifier %r; "
- "any dashes in the name or version should be escaped using "
- "underscores. %r" % (fragment,dists)
- )
- else:
- raise DistutilsError(
- "Can't process plain .py files without an '#egg=name-version'"
- " suffix to enable automatic setup script generation."
- )
-
- dl_blocksize = 8192
- def _download_to(self, url, filename):
- self.url_ok(url,True) # raises error if not allowed
- self.info("Downloading %s", url)
- # Download the file
- fp, tfp, info = None, None, None
- try:
- if '#' in url:
- url, info = url.split('#', 1)
- fp = self.open_url(url)
- if isinstance(fp, urllib2.HTTPError):
- raise DistutilsError(
- "Can't download %s: %s %s" % (url, fp.code,fp.msg)
- )
- cs = md5()
- headers = fp.info()
- blocknum = 0
- bs = self.dl_blocksize
- size = -1
- if "content-length" in headers:
- size = int(headers["Content-Length"])
- self.reporthook(url, filename, blocknum, bs, size)
- tfp = open(filename,'wb')
- while True:
- block = fp.read(bs)
- if block:
- cs.update(block)
- tfp.write(block)
- blocknum += 1
- self.reporthook(url, filename, blocknum, bs, size)
- else:
- break
- if info: self.check_md5(cs, info, filename, tfp)
- return headers
- finally:
- if fp: fp.close()
- if tfp: tfp.close()
-
- def reporthook(self, url, filename, blocknum, blksize, size):
- pass # no-op
-
- def retry_sf_download(self, url, filename):
- try:
- return self._download_to(url, filename)
- except:
- scheme, server, path, param, query, frag = urlparse.urlparse(url)
- if server!='dl.sourceforge.net':
- raise
-
- mirror = get_sf_ip()
-
- while _sf_mirrors:
- self.warn("Download failed: %s", sys.exc_info()[1])
- url = urlparse.urlunparse((scheme, mirror, path, param, '', frag))
- try:
- return self._download_to(url, filename)
- except:
- _sf_mirrors.remove(mirror) # don't retry the same mirror
- mirror = get_sf_ip()
-
- raise # fail if no mirror works
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- def open_url(self, url):
- try:
- return urllib2.urlopen(url)
- except urllib2.HTTPError, v:
- return v
- except urllib2.URLError, v:
- raise DistutilsError("Download error: %s" % v.reason)
-
-
- def _download_url(self, scheme, url, tmpdir):
-
- # Determine download filename
- #
- name = filter(None,urlparse.urlparse(url)[2].split('/'))
- if name:
- name = name[-1]
- while '..' in name:
- name = name.replace('..','.').replace('\\','_')
- else:
- name = "__downloaded__" # default if URL has no path contents
-
- if name.endswith('.egg.zip'):
- name = name[:-4] # strip the extra .zip before download
-
- filename = os.path.join(tmpdir,name)
-
- # Download the file
- #
- if scheme=='svn' or scheme.startswith('svn+'):
- return self._download_svn(url, filename)
- else:
- headers = self.retry_sf_download(url, filename)
- if 'html' in headers['content-type'].lower():
- return self._download_html(url, headers, filename, tmpdir)
- else:
- return filename
-
- def scan_url(self, url):
- self.process_url(url, True)
-
-
- def _download_html(self, url, headers, filename, tmpdir):
- file = open(filename)
- for line in file:
- if line.strip():
- # Check for a subversion index page
- if re.search(r'<title>Revision \d+:', line):
- # it's a subversion index page:
- file.close()
- os.unlink(filename)
- return self._download_svn(url, filename)
- break # not an index page
- file.close()
- os.unlink(filename)
- raise DistutilsError("Unexpected HTML page found at "+url)
-
- def _download_svn(self, url, filename):
- url = url.split('#',1)[0] # remove any fragment for svn's sake
- self.info("Doing subversion checkout from %s to %s", url, filename)
- os.system("svn checkout -q %s %s" % (url, filename))
- return filename
-
- def debug(self, msg, *args):
- log.debug(msg, *args)
-
- def info(self, msg, *args):
- log.info(msg, *args)
-
- def warn(self, msg, *args):
- log.warn(msg, *args)
-
-
-
-
-
-
-
-
-
-
-
-
-def fix_sf_url(url):
- scheme, server, path, param, query, frag = urlparse.urlparse(url)
- if server!='prdownloads.sourceforge.net':
- return url
- return urlparse.urlunparse(
- (scheme, 'dl.sourceforge.net', 'sourceforge'+path, param, '', frag)
- )
-
-_sf_mirrors = []
-
-def get_sf_ip():
- if not _sf_mirrors:
- try:
- _sf_mirrors[:] = socket.gethostbyname_ex('dl.sourceforge.net')[-1]
- except socket.error:
- # DNS-bl0ck1n9 f1r3w4llz sUx0rs!
- _sf_mirrors[:] = ['dl.sourceforge.net']
- return random.choice(_sf_mirrors)
diff --git a/Lib/setuptools/sandbox.py b/Lib/setuptools/sandbox.py
deleted file mode 100755
index 606944bd29..0000000000
--- a/Lib/setuptools/sandbox.py
+++ /dev/null
@@ -1,203 +0,0 @@
-import os, sys, __builtin__, tempfile
-_os = sys.modules[os.name]
-_open = open
-from distutils.errors import DistutilsError
-__all__ = [
- "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
-]
-
-def run_setup(setup_script, args):
- """Run a distutils setup script, sandboxed in its directory"""
-
- old_dir = os.getcwd()
- save_argv = sys.argv[:]
- save_path = sys.path[:]
- setup_dir = os.path.abspath(os.path.dirname(setup_script))
- temp_dir = os.path.join(setup_dir,'temp')
- if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
- save_tmp = tempfile.tempdir
-
- try:
- tempfile.tempdir = temp_dir
- os.chdir(setup_dir)
- try:
- sys.argv[:] = [setup_script]+list(args)
- sys.path.insert(0, setup_dir)
- DirectorySandbox(setup_dir).run(
- lambda: execfile(
- "setup.py",
- {'__file__':setup_script, '__name__':'__main__'}
- )
- )
- except SystemExit, v:
- if v.args and v.args[0]:
- raise
- # Normal exit, just return
- finally:
- os.chdir(old_dir)
- sys.path[:] = save_path
- sys.argv[:] = save_argv
- tempfile.tempdir = save_tmp
-
-class AbstractSandbox:
- """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
-
- _active = False
-
- def __init__(self):
- self._attrs = [
- name for name in dir(_os)
- if not name.startswith('_') and hasattr(self,name)
- ]
-
- def _copy(self, source):
- for name in self._attrs:
- setattr(os, name, getattr(source,name))
-
- def run(self, func):
- """Run 'func' under os sandboxing"""
- try:
- self._copy(self)
- __builtin__.open = __builtin__.file = self._open
- self._active = True
- return func()
- finally:
- self._active = False
- __builtin__.open = __builtin__.file = _open
- self._copy(_os)
-
-
- def _mk_dual_path_wrapper(name):
- original = getattr(_os,name)
- def wrap(self,src,dst,*args,**kw):
- if self._active:
- src,dst = self._remap_pair(name,src,dst,*args,**kw)
- return original(src,dst,*args,**kw)
- return wrap
-
-
- for name in ["rename", "link", "symlink"]:
- if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
-
-
- def _mk_single_path_wrapper(name, original=None):
- original = original or getattr(_os,name)
- def wrap(self,path,*args,**kw):
- if self._active:
- path = self._remap_input(name,path,*args,**kw)
- return original(path,*args,**kw)
- return wrap
-
- _open = _mk_single_path_wrapper('file', _open)
- for name in [
- "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
- "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
- "startfile", "mkfifo", "mknod", "pathconf", "access"
- ]:
- if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
-
-
- def _mk_single_with_return(name):
- original = getattr(_os,name)
- def wrap(self,path,*args,**kw):
- if self._active:
- path = self._remap_input(name,path,*args,**kw)
- return self._remap_output(name, original(path,*args,**kw))
- return original(path,*args,**kw)
- return wrap
-
- for name in ['readlink', 'tempnam']:
- if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
-
- def _mk_query(name):
- original = getattr(_os,name)
- def wrap(self,*args,**kw):
- retval = original(*args,**kw)
- if self._active:
- return self._remap_output(name, retval)
- return retval
- return wrap
-
- for name in ['getcwd', 'tmpnam']:
- if hasattr(_os,name): locals()[name] = _mk_query(name)
-
- def _validate_path(self,path):
- """Called to remap or validate any path, whether input or output"""
- return path
-
- def _remap_input(self,operation,path,*args,**kw):
- """Called for path inputs"""
- return self._validate_path(path)
-
- def _remap_output(self,operation,path):
- """Called for path outputs"""
- return self._validate_path(path)
-
- def _remap_pair(self,operation,src,dst,*args,**kw):
- """Called for path pairs like rename, link, and symlink operations"""
- return (
- self._remap_input(operation+'-from',src,*args,**kw),
- self._remap_input(operation+'-to',dst,*args,**kw)
- )
-
-
-class DirectorySandbox(AbstractSandbox):
- """Restrict operations to a single subdirectory - pseudo-chroot"""
-
- write_ops = dict.fromkeys([
- "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
- "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
- ])
-
- def __init__(self,sandbox):
- self._sandbox = os.path.normcase(os.path.realpath(sandbox))
- self._prefix = os.path.join(self._sandbox,'')
- AbstractSandbox.__init__(self)
-
- def _violation(self, operation, *args, **kw):
- raise SandboxViolation(operation, args, kw)
-
- def _open(self, path, mode='r', *args, **kw):
- if mode not in ('r', 'rt', 'rb', 'rU') and not self._ok(path):
- self._violation("open", path, mode, *args, **kw)
- return _open(path,mode,*args,**kw)
-
- def tmpnam(self):
- self._violation("tmpnam")
-
- def _ok(self,path):
- active = self._active
- try:
- self._active = False
- realpath = os.path.normcase(os.path.realpath(path))
- if realpath==self._sandbox or realpath.startswith(self._prefix):
- return True
- finally:
- self._active = active
-
- def _remap_input(self,operation,path,*args,**kw):
- """Called for path inputs"""
- if operation in self.write_ops and not self._ok(path):
- self._violation(operation, os.path.realpath(path), *args, **kw)
- return path
-
- def _remap_pair(self,operation,src,dst,*args,**kw):
- """Called for path pairs like rename, link, and symlink operations"""
- if not self._ok(src) or not self._ok(dst):
- self._violation(operation, src, dst, *args, **kw)
- return (src,dst)
-
-
-class SandboxViolation(DistutilsError):
- """A setup script attempted to modify the filesystem outside the sandbox"""
-
- def __str__(self):
- return """SandboxViolation: %s%r %s
-
-The package setup script has attempted to modify files on your system
-that are not within the EasyInstall build area, and has been aborted.
-
-This package cannot be safely installed by EasyInstall, and may not
-support alternate installation locations even if you run its setup
-script by hand. Please inform the package's author and the EasyInstall
-maintainers to find out if a fix or workaround is available.""" % self.args
diff --git a/Lib/setuptools/site-patch.py b/Lib/setuptools/site-patch.py
deleted file mode 100755
index b1b27b9fbd..0000000000
--- a/Lib/setuptools/site-patch.py
+++ /dev/null
@@ -1,74 +0,0 @@
-def __boot():
- import sys, imp, os, os.path
- PYTHONPATH = os.environ.get('PYTHONPATH')
- if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
- PYTHONPATH = []
- else:
- PYTHONPATH = PYTHONPATH.split(os.pathsep)
-
- pic = getattr(sys,'path_importer_cache',{})
- stdpath = sys.path[len(PYTHONPATH):]
- mydir = os.path.dirname(__file__)
- #print "searching",stdpath,sys.path
-
- for item in stdpath:
- if item==mydir or not item:
- continue # skip if current dir. on Windows, or my own directory
- importer = pic.get(item)
- if importer is not None:
- loader = importer.find_module('site')
- if loader is not None:
- # This should actually reload the current module
- loader.load_module('site')
- break
- else:
- try:
- stream, path, descr = imp.find_module('site',[item])
- except ImportError:
- continue
- if stream is None:
- continue
- try:
- # This should actually reload the current module
- imp.load_module('site',stream,path,descr)
- finally:
- stream.close()
- break
- else:
- raise ImportError("Couldn't find the real 'site' module")
-
- #print "loaded", __file__
-
- known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
-
- oldpos = getattr(sys,'__egginsert',0) # save old insertion position
- sys.__egginsert = 0 # and reset the current one
-
- for item in PYTHONPATH:
- addsitedir(item)
-
- sys.__egginsert += oldpos # restore effective old position
-
- d,nd = makepath(stdpath[0])
- insert_at = None
- new_path = []
-
- for item in sys.path:
- p,np = makepath(item)
-
- if np==nd and insert_at is None:
- # We've hit the first 'system' path entry, so added entries go here
- insert_at = len(new_path)
-
- if np in known_paths or insert_at is None:
- new_path.append(item)
- else:
- # new path after the insert point, back-insert it
- new_path.insert(insert_at, item)
- insert_at += 1
-
- sys.path[:] = new_path
-
-if __name__=='site':
- __boot()
- del __boot
diff --git a/Lib/setuptools/tests/__init__.py b/Lib/setuptools/tests/__init__.py
deleted file mode 100644
index 8a767dcf95..0000000000
--- a/Lib/setuptools/tests/__init__.py
+++ /dev/null
@@ -1,364 +0,0 @@
-"""Tests for the 'setuptools' package"""
-
-from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader
-import distutils.core, distutils.cmd
-from distutils.errors import DistutilsOptionError, DistutilsPlatformError
-from distutils.errors import DistutilsSetupError
-import setuptools, setuptools.dist
-from setuptools import Feature
-from distutils.core import Extension
-from setuptools.depends import extract_constant, get_module_constant
-from setuptools.depends import find_module, Require
-from distutils.version import StrictVersion, LooseVersion
-from distutils.util import convert_path
-import sys, os.path
-
-def additional_tests():
- import doctest
- return doctest.DocFileSuite(
- 'api_tests.txt', optionflags=doctest.ELLIPSIS, package=__name__,
- )
-
-
-def makeSetup(**args):
- """Return distribution from 'setup(**args)', without executing commands"""
-
- distutils.core._setup_stop_after = "commandline"
-
- # Don't let system command line leak into tests!
- args.setdefault('script_args',['install'])
-
- try:
- return setuptools.setup(**args)
- finally:
- distutils.core_setup_stop_after = None
-
-
-
-
-
-
-
-class DependsTests(TestCase):
-
- def testExtractConst(self):
-
- from setuptools.depends import extract_constant
-
- def f1():
- global x,y,z
- x = "test"
- y = z
-
- # unrecognized name
- self.assertEqual(extract_constant(f1.func_code,'q', -1), None)
-
- # constant assigned
- self.assertEqual(extract_constant(f1.func_code,'x', -1), "test")
-
- # expression assigned
- self.assertEqual(extract_constant(f1.func_code,'y', -1), -1)
-
- # recognized name, not assigned
- self.assertEqual(extract_constant(f1.func_code,'z', -1), None)
-
-
- def testFindModule(self):
- self.assertRaises(ImportError, find_module, 'no-such.-thing')
- self.assertRaises(ImportError, find_module, 'setuptools.non-existent')
- f,p,i = find_module('setuptools.tests'); f.close()
-
- def testModuleExtract(self):
- from distutils import __version__
- self.assertEqual(
- get_module_constant('distutils','__version__'), __version__
- )
- self.assertEqual(
- get_module_constant('sys','version'), sys.version
- )
- self.assertEqual(
- get_module_constant('setuptools.tests','__doc__'),__doc__
- )
-
- def testRequire(self):
-
- req = Require('Distutils','1.0.3','distutils')
-
- self.assertEqual(req.name, 'Distutils')
- self.assertEqual(req.module, 'distutils')
- self.assertEqual(req.requested_version, '1.0.3')
- self.assertEqual(req.attribute, '__version__')
- self.assertEqual(req.full_name(), 'Distutils-1.0.3')
-
- from distutils import __version__
- self.assertEqual(req.get_version(), __version__)
- self.failUnless(req.version_ok('1.0.9'))
- self.failIf(req.version_ok('0.9.1'))
- self.failIf(req.version_ok('unknown'))
-
- self.failUnless(req.is_present())
- self.failUnless(req.is_current())
-
- req = Require('Distutils 3000','03000','distutils',format=LooseVersion)
- self.failUnless(req.is_present())
- self.failIf(req.is_current())
- self.failIf(req.version_ok('unknown'))
-
- req = Require('Do-what-I-mean','1.0','d-w-i-m')
- self.failIf(req.is_present())
- self.failIf(req.is_current())
-
- req = Require('Tests', None, 'tests', homepage="http://example.com")
- self.assertEqual(req.format, None)
- self.assertEqual(req.attribute, None)
- self.assertEqual(req.requested_version, None)
- self.assertEqual(req.full_name(), 'Tests')
- self.assertEqual(req.homepage, 'http://example.com')
-
- paths = [os.path.dirname(p) for p in __path__]
- self.failUnless(req.is_present(paths))
- self.failUnless(req.is_current(paths))
-
-
-
-class DistroTests(TestCase):
-
- def setUp(self):
- self.e1 = Extension('bar.ext',['bar.c'])
- self.e2 = Extension('c.y', ['y.c'])
-
- self.dist = makeSetup(
- packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
- py_modules=['b.d','x'],
- ext_modules = (self.e1, self.e2),
- package_dir = {},
- )
-
-
- def testDistroType(self):
- self.failUnless(isinstance(self.dist,setuptools.dist.Distribution))
-
-
- def testExcludePackage(self):
- self.dist.exclude_package('a')
- self.assertEqual(self.dist.packages, ['b','c'])
-
- self.dist.exclude_package('b')
- self.assertEqual(self.dist.packages, ['c'])
- self.assertEqual(self.dist.py_modules, ['x'])
- self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
-
- self.dist.exclude_package('c')
- self.assertEqual(self.dist.packages, [])
- self.assertEqual(self.dist.py_modules, ['x'])
- self.assertEqual(self.dist.ext_modules, [self.e1])
-
- # test removals from unspecified options
- makeSetup().exclude_package('x')
-
-
-
-
-
-
-
- def testIncludeExclude(self):
- # remove an extension
- self.dist.exclude(ext_modules=[self.e1])
- self.assertEqual(self.dist.ext_modules, [self.e2])
-
- # add it back in
- self.dist.include(ext_modules=[self.e1])
- self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
-
- # should not add duplicate
- self.dist.include(ext_modules=[self.e1])
- self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
-
- def testExcludePackages(self):
- self.dist.exclude(packages=['c','b','a'])
- self.assertEqual(self.dist.packages, [])
- self.assertEqual(self.dist.py_modules, ['x'])
- self.assertEqual(self.dist.ext_modules, [self.e1])
-
- def testEmpty(self):
- dist = makeSetup()
- dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
- dist = makeSetup()
- dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
-
- def testContents(self):
- self.failUnless(self.dist.has_contents_for('a'))
- self.dist.exclude_package('a')
- self.failIf(self.dist.has_contents_for('a'))
-
- self.failUnless(self.dist.has_contents_for('b'))
- self.dist.exclude_package('b')
- self.failIf(self.dist.has_contents_for('b'))
-
- self.failUnless(self.dist.has_contents_for('c'))
- self.dist.exclude_package('c')
- self.failIf(self.dist.has_contents_for('c'))
-
-
-
-
- def testInvalidIncludeExclude(self):
- self.assertRaises(DistutilsSetupError,
- self.dist.include, nonexistent_option='x'
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.exclude, nonexistent_option='x'
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.include, packages={'x':'y'}
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.exclude, packages={'x':'y'}
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.include, ext_modules={'x':'y'}
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.exclude, ext_modules={'x':'y'}
- )
-
- self.assertRaises(DistutilsSetupError,
- self.dist.include, package_dir=['q']
- )
- self.assertRaises(DistutilsSetupError,
- self.dist.exclude, package_dir=['q']
- )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class FeatureTests(TestCase):
-
- def setUp(self):
- self.req = Require('Distutils','1.0.3','distutils')
- self.dist = makeSetup(
- features={
- 'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
- 'bar': Feature("bar", standard=True, packages=['pkg.bar'],
- py_modules=['bar_et'], remove=['bar.ext'],
- ),
- 'baz': Feature(
- "baz", optional=False, packages=['pkg.baz'],
- scripts = ['scripts/baz_it'],
- libraries=[('libfoo','foo/foofoo.c')]
- ),
- 'dwim': Feature("DWIM", available=False, remove='bazish'),
- },
- script_args=['--without-bar', 'install'],
- packages = ['pkg.bar', 'pkg.foo'],
- py_modules = ['bar_et', 'bazish'],
- ext_modules = [Extension('bar.ext',['bar.c'])]
- )
-
- def testDefaults(self):
- self.failIf(
- Feature(
- "test",standard=True,remove='x',available=False
- ).include_by_default()
- )
- self.failUnless(
- Feature("test",standard=True,remove='x').include_by_default()
- )
- # Feature must have either kwargs, removes, or require_features
- self.assertRaises(DistutilsSetupError, Feature, "test")
-
- def testAvailability(self):
- self.assertRaises(
- DistutilsPlatformError,
- self.dist.features['dwim'].include_in, self.dist
- )
-
- def testFeatureOptions(self):
- dist = self.dist
- self.failUnless(
- ('with-dwim',None,'include DWIM') in dist.feature_options
- )
- self.failUnless(
- ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
- )
- self.failUnless(
- ('with-bar',None,'include bar (default)') in dist.feature_options
- )
- self.failUnless(
- ('without-bar',None,'exclude bar') in dist.feature_options
- )
- self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
- self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
- self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
- self.failIf('without-baz' in dist.feature_negopt)
-
- def testUseFeatures(self):
- dist = self.dist
- self.assertEqual(dist.with_foo,1)
- self.assertEqual(dist.with_bar,0)
- self.assertEqual(dist.with_baz,1)
- self.failIf('bar_et' in dist.py_modules)
- self.failIf('pkg.bar' in dist.packages)
- self.failUnless('pkg.baz' in dist.packages)
- self.failUnless('scripts/baz_it' in dist.scripts)
- self.failUnless(('libfoo','foo/foofoo.c') in dist.libraries)
- self.assertEqual(dist.ext_modules,[])
- self.assertEqual(dist.require_features, [self.req])
-
- # If we ask for bar, it should fail because we explicitly disabled
- # it on the command line
- self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
-
- def testFeatureWithInvalidRemove(self):
- self.assertRaises(
- SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
- )
-
-class TestCommandTests(TestCase):
-
- def testTestIsCommand(self):
- test_cmd = makeSetup().get_command_obj('test')
- self.failUnless(isinstance(test_cmd, distutils.cmd.Command))
-
- def testLongOptSuiteWNoDefault(self):
- ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
- ts1 = ts1.get_command_obj('test')
- ts1.ensure_finalized()
- self.assertEqual(ts1.test_suite, 'foo.tests.suite')
-
- def testDefaultSuite(self):
- ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
- ts2.ensure_finalized()
- self.assertEqual(ts2.test_suite, 'bar.tests.suite')
-
- def testDefaultWModuleOnCmdLine(self):
- ts3 = makeSetup(
- test_suite='bar.tests',
- script_args=['test','-m','foo.tests']
- ).get_command_obj('test')
- ts3.ensure_finalized()
- self.assertEqual(ts3.test_module, 'foo.tests')
- self.assertEqual(ts3.test_suite, 'foo.tests.test_suite')
-
- def testConflictingOptions(self):
- ts4 = makeSetup(
- script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
- ).get_command_obj('test')
- self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
-
- def testNoSuite(self):
- ts5 = makeSetup().get_command_obj('test')
- ts5.ensure_finalized()
- self.assertEqual(ts5.test_suite, None)
diff --git a/Lib/setuptools/tests/api_tests.txt b/Lib/setuptools/tests/api_tests.txt
deleted file mode 100755
index 735ad8dd68..0000000000
--- a/Lib/setuptools/tests/api_tests.txt
+++ /dev/null
@@ -1,330 +0,0 @@
-Pluggable Distributions of Python Software
-==========================================
-
-Distributions
--------------
-
-A "Distribution" is a collection of files that represent a "Release" of a
-"Project" as of a particular point in time, denoted by a
-"Version"::
-
- >>> import sys, pkg_resources
- >>> from pkg_resources import Distribution
- >>> Distribution(project_name="Foo", version="1.2")
- Foo 1.2
-
-Distributions have a location, which can be a filename, URL, or really anything
-else you care to use::
-
- >>> dist = Distribution(
- ... location="http://example.com/something",
- ... project_name="Bar", version="0.9"
- ... )
-
- >>> dist
- Bar 0.9 (http://example.com/something)
-
-
-Distributions have various introspectable attributes::
-
- >>> dist.location
- 'http://example.com/something'
-
- >>> dist.project_name
- 'Bar'
-
- >>> dist.version
- '0.9'
-
- >>> dist.py_version == sys.version[:3]
- True
-
- >>> print dist.platform
- None
-
-Including various computed attributes::
-
- >>> from pkg_resources import parse_version
- >>> dist.parsed_version == parse_version(dist.version)
- True
-
- >>> dist.key # case-insensitive form of the project name
- 'bar'
-
-Distributions are compared (and hashed) by version first::
-
- >>> Distribution(version='1.0') == Distribution(version='1.0')
- True
- >>> Distribution(version='1.0') == Distribution(version='1.1')
- False
- >>> Distribution(version='1.0') < Distribution(version='1.1')
- True
-
-but also by project name (case-insensitive), platform, Python version,
-location, etc.::
-
- >>> Distribution(project_name="Foo",version="1.0") == \
- ... Distribution(project_name="Foo",version="1.0")
- True
-
- >>> Distribution(project_name="Foo",version="1.0") == \
- ... Distribution(project_name="foo",version="1.0")
- True
-
- >>> Distribution(project_name="Foo",version="1.0") == \
- ... Distribution(project_name="Foo",version="1.1")
- False
-
- >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
- ... Distribution(project_name="Foo",py_version="2.4",version="1.0")
- False
-
- >>> Distribution(location="spam",version="1.0") == \
- ... Distribution(location="spam",version="1.0")
- True
-
- >>> Distribution(location="spam",version="1.0") == \
- ... Distribution(location="baz",version="1.0")
- False
-
-
-
-Hash and compare distribution by prio/plat
-
-Get version from metadata
-provider capabilities
-egg_name()
-as_requirement()
-from_location, from_filename (w/path normalization)
-
-Releases may have zero or more "Requirements", which indicate
-what releases of another project the release requires in order to
-function. A Requirement names the other project, expresses some criteria
-as to what releases of that project are acceptable, and lists any "Extras"
-that the requiring release may need from that project. (An Extra is an
-optional feature of a Release, that can only be used if its additional
-Requirements are satisfied.)
-
-
-
-The Working Set
----------------
-
-A collection of active distributions is called a Working Set. Note that a
-Working Set can contain any importable distribution, not just pluggable ones.
-For example, the Python standard library is an importable distribution that
-will usually be part of the Working Set, even though it is not pluggable.
-Similarly, when you are doing development work on a project, the files you are
-editing are also a Distribution. (And, with a little attention to the
-directory names used, and including some additional metadata, such a
-"development distribution" can be made pluggable as well.)
-
- >>> from pkg_resources import WorkingSet
-
-A working set's entries are the sys.path entries that correspond to the active
-distributions. By default, the working set's entries are the items on
-``sys.path``::
-
- >>> ws = WorkingSet()
- >>> ws.entries == sys.path
- True
-
-But you can also create an empty working set explicitly, and add distributions
-to it::
-
- >>> ws = WorkingSet([])
- >>> ws.add(dist)
- >>> ws.entries
- ['http://example.com/something']
- >>> dist in ws
- True
- >>> Distribution('foo',version="") in ws
- False
-
-And you can iterate over its distributions::
-
- >>> list(ws)
- [Bar 0.9 (http://example.com/something)]
-
-Adding the same distribution more than once is a no-op::
-
- >>> ws.add(dist)
- >>> list(ws)
- [Bar 0.9 (http://example.com/something)]
-
-For that matter, adding multiple distributions for the same project also does
-nothing, because a working set can only hold one active distribution per
-project -- the first one added to it::
-
- >>> ws.add(
- ... Distribution(
- ... 'http://example.com/something', project_name="Bar",
- ... version="7.2"
- ... )
- ... )
- >>> list(ws)
- [Bar 0.9 (http://example.com/something)]
-
-You can append a path entry to a working set using ``add_entry()``::
-
- >>> ws.entries
- ['http://example.com/something']
- >>> ws.add_entry(pkg_resources.__file__)
- >>> ws.entries
- ['http://example.com/something', '...pkg_resources.py...']
-
-Multiple additions result in multiple entries, even if the entry is already in
-the working set (because ``sys.path`` can contain the same entry more than
-once)::
-
- >>> ws.add_entry(pkg_resources.__file__)
- >>> ws.entries
- ['...example.com...', '...pkg_resources...', '...pkg_resources...']
-
-And you can specify the path entry a distribution was found under, using the
-optional second parameter to ``add()``::
-
- >>> ws = WorkingSet([])
- >>> ws.add(dist,"foo")
- >>> ws.entries
- ['foo']
-
-But even if a distribution is found under multiple path entries, it still only
-shows up once when iterating the working set:
-
- >>> ws.add_entry(ws.entries[0])
- >>> list(ws)
- [Bar 0.9 (http://example.com/something)]
-
-You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
-
- >>> from pkg_resources import Requirement
- >>> print ws.find(Requirement.parse("Foo==1.0")) # no match, return None
- None
-
- >>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution
- Bar 0.9 (http://example.com/something)
-
-Note that asking for a conflicting version of a distribution already in a
-working set triggers a ``pkg_resources.VersionConflict`` error:
-
- >>> ws.find(Requirement.parse("Bar==1.0")) # doctest: +NORMALIZE_WHITESPACE
- Traceback (most recent call last):
- ...
- VersionConflict: (Bar 0.9 (http://example.com/something),
- Requirement.parse('Bar==1.0'))
-
-You can subscribe a callback function to receive notifications whenever a new
-distribution is added to a working set. The callback is immediately invoked
-once for each existing distribution in the working set, and then is called
-again for new distributions added thereafter::
-
- >>> def added(dist): print "Added", dist
- >>> ws.subscribe(added)
- Added Bar 0.9
- >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12")
- >>> ws.add(foo12)
- Added Foo 1.2
-
-Note, however, that only the first distribution added for a given project name
-will trigger a callback, even during the initial ``subscribe()`` callback::
-
- >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14")
- >>> ws.add(foo14) # no callback, because Foo 1.2 is already active
-
- >>> ws = WorkingSet([])
- >>> ws.add(foo12)
- >>> ws.add(foo14)
- >>> ws.subscribe(added)
- Added Foo 1.2
-
-And adding a callback more than once has no effect, either::
-
- >>> ws.subscribe(added) # no callbacks
-
- # and no double-callbacks on subsequent additions, either
- >>> just_a_test = Distribution(project_name="JustATest", version="0.99")
- >>> ws.add(just_a_test)
- Added JustATest 0.99
-
-
-Finding Plugins
----------------
-
-``WorkingSet`` objects can be used to figure out what plugins in an
-``Environment`` can be loaded without any resolution errors::
-
- >>> from pkg_resources import Environment
-
- >>> plugins = Environment([]) # normally, a list of plugin directories
- >>> plugins.add(foo12)
- >>> plugins.add(foo14)
- >>> plugins.add(just_a_test)
-
-In the simplest case, we just get the newest version of each distribution in
-the plugin environment::
-
- >>> ws = WorkingSet([])
- >>> ws.find_plugins(plugins)
- ([JustATest 0.99, Foo 1.4 (f14)], {})
-
-But if there's a problem with a version conflict or missing requirements, the
-method falls back to older versions, and the error info dict will contain an
-exception instance for each unloadable plugin::
-
- >>> ws.add(foo12) # this will conflict with Foo 1.4
- >>> ws.find_plugins(plugins)
- ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
-
-But if you disallow fallbacks, the failed plugin will be skipped instead of
-trying older versions::
-
- >>> ws.find_plugins(plugins, fallback=False)
- ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
-
-
-
-Platform Compatibility Rules
-----------------------------
-
-On the Mac, there are potential compatibility issues for modules compiled
-on newer versions of Mac OS X than what the user is running. Additionally,
-Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
-
-Basic equality works as on other platforms::
-
- >>> from pkg_resources import compatible_platforms as cp
- >>> reqd = 'macosx-10.4-ppc'
- >>> cp(reqd, reqd)
- True
- >>> cp("win32", reqd)
- False
-
-Distributions made on other machine types are not compatible::
-
- >>> cp("macosx-10.4-i386", reqd)
- False
-
-Distributions made on earlier versions of the OS are compatible, as
-long as they are from the same top-level version. The patchlevel version
-number does not matter::
-
- >>> cp("macosx-10.4-ppc", reqd)
- True
- >>> cp("macosx-10.3-ppc", reqd)
- True
- >>> cp("macosx-10.5-ppc", reqd)
- False
- >>> cp("macosx-9.5-ppc", reqd)
- False
-
-Backwards compatibility for packages made via earlier versions of
-setuptools is provided as well::
-
- >>> cp("darwin-8.2.0-Power_Macintosh", reqd)
- True
- >>> cp("darwin-7.2.0-Power_Macintosh", reqd)
- True
- >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
- False
-
diff --git a/Lib/setuptools/tests/test_resources.py b/Lib/setuptools/tests/test_resources.py
deleted file mode 100644
index f32c72e3a8..0000000000
--- a/Lib/setuptools/tests/test_resources.py
+++ /dev/null
@@ -1,483 +0,0 @@
-from unittest import TestCase, makeSuite
-from pkg_resources import *
-import pkg_resources, sys
-from sets import ImmutableSet
-
-class Metadata(EmptyProvider):
- """Mock object to return metadata as if from an on-disk distribution"""
-
- def __init__(self,*pairs):
- self.metadata = dict(pairs)
-
- def has_metadata(self,name):
- return name in self.metadata
-
- def get_metadata(self,name):
- return self.metadata[name]
-
- def get_metadata_lines(self,name):
- return yield_lines(self.get_metadata(name))
-
-
-class DistroTests(TestCase):
-
- def testCollection(self):
- # empty path should produce no distributions
- ad = Environment([], platform=None, python=None)
- self.assertEqual(list(ad), [])
- self.assertEqual(ad['FooPkg'],[])
-
- ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
- ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
- ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
-
- # Name is in there now
- self.failUnless(ad['FooPkg'])
-
- # But only 1 package
- self.assertEqual(list(ad), ['foopkg'])
-
-
-
- # Distributions sort by version
- self.assertEqual(
- [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
- )
- # Removing a distribution leaves sequence alone
- ad.remove(ad['FooPkg'][1])
- self.assertEqual(
- [dist.version for dist in ad['FooPkg']], ['1.4','1.2']
- )
- # And inserting adds them in order
- ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
- self.assertEqual(
- [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
- )
-
- ws = WorkingSet([])
- foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
- foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
- req, = parse_requirements("FooPkg>=1.3")
-
- # Nominal case: no distros on path, should yield all applicable
- self.assertEqual(ad.best_match(req,ws).version, '1.9')
- # If a matching distro is already installed, should return only that
- ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
-
- # If the first matching distro is unsuitable, it's a version conflict
- ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
- self.assertRaises(VersionConflict, ad.best_match, req, ws)
-
- # If more than one match on the path, the first one takes precedence
- ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
- self.assertEqual(ad.best_match(req,ws).version, '1.4')
-
- def checkFooPkg(self,d):
- self.assertEqual(d.project_name, "FooPkg")
- self.assertEqual(d.key, "foopkg")
- self.assertEqual(d.version, "1.3-1")
- self.assertEqual(d.py_version, "2.4")
- self.assertEqual(d.platform, "win32")
- self.assertEqual(d.parsed_version, parse_version("1.3-1"))
-
- def testDistroBasics(self):
- d = Distribution(
- "/some/path",
- project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
- )
- self.checkFooPkg(d)
-
- d = Distribution("/some/path")
- self.assertEqual(d.py_version, sys.version[:3])
- self.assertEqual(d.platform, None)
-
- def testDistroParse(self):
- d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
- self.checkFooPkg(d)
- d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
- self.checkFooPkg(d)
-
- def testDistroMetadata(self):
- d = Distribution(
- "/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
- metadata = Metadata(
- ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
- )
- )
- self.checkFooPkg(d)
-
-
- def distRequires(self, txt):
- return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
-
- def checkRequires(self, dist, txt, extras=()):
- self.assertEqual(
- list(dist.requires(extras)),
- list(parse_requirements(txt))
- )
-
- def testDistroDependsSimple(self):
- for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
- self.checkRequires(self.distRequires(v), v)
-
-
- def testResolve(self):
- ad = Environment([]); ws = WorkingSet([])
- # Resolving no requirements -> nothing to install
- self.assertEqual( list(ws.resolve([],ad)), [] )
- # Request something not in the collection -> DistributionNotFound
- self.assertRaises(
- DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
- )
- Foo = Distribution.from_filename(
- "/foo_dir/Foo-1.2.egg",
- metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
- )
- ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
-
- # Request thing(s) that are available -> list to activate
- for i in range(3):
- targets = list(ws.resolve(parse_requirements("Foo"), ad))
- self.assertEqual(targets, [Foo])
- map(ws.add,targets)
- self.assertRaises(VersionConflict, ws.resolve,
- parse_requirements("Foo==0.9"), ad)
- ws = WorkingSet([]) # reset
-
- # Request an extra that causes an unresolved dependency for "Baz"
- self.assertRaises(
- DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
- )
- Baz = Distribution.from_filename(
- "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
- )
- ad.add(Baz)
-
- # Activation list now includes resolved dependency
- self.assertEqual(
- list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
- )
- # Requests for conflicting versions produce VersionConflict
- self.assertRaises( VersionConflict,
- ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
- )
-
- def testDistroDependsOptions(self):
- d = self.distRequires("""
- Twisted>=1.5
- [docgen]
- ZConfig>=2.0
- docutils>=0.3
- [fastcgi]
- fcgiapp>=0.1""")
- self.checkRequires(d,"Twisted>=1.5")
- self.checkRequires(
- d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
- ["docgen","fastcgi"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
- ["fastcgi", "docgen"]
- )
- self.assertRaises(UnknownExtra, d.requires, ["foo"])
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class EntryPointTests(TestCase):
-
- def assertfields(self, ep):
- self.assertEqual(ep.name,"foo")
- self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
- self.assertEqual(ep.attrs, ("EntryPointTests",))
- self.assertEqual(ep.extras, ("x",))
- self.failUnless(ep.load() is EntryPointTests)
- self.assertEqual(
- str(ep),
- "foo = setuptools.tests.test_resources:EntryPointTests [x]"
- )
-
- def setUp(self):
- self.dist = Distribution.from_filename(
- "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
-
- def testBasics(self):
- ep = EntryPoint(
- "foo", "setuptools.tests.test_resources", ["EntryPointTests"],
- ["x"], self.dist
- )
- self.assertfields(ep)
-
- def testParse(self):
- s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
- ep = EntryPoint.parse(s, self.dist)
- self.assertfields(ep)
-
- ep = EntryPoint.parse("bar baz= spammity[PING]")
- self.assertEqual(ep.name,"bar baz")
- self.assertEqual(ep.module_name,"spammity")
- self.assertEqual(ep.attrs, ())
- self.assertEqual(ep.extras, ("ping",))
-
- ep = EntryPoint.parse(" fizzly = wocka:foo")
- self.assertEqual(ep.name,"fizzly")
- self.assertEqual(ep.module_name,"wocka")
- self.assertEqual(ep.attrs, ("foo",))
- self.assertEqual(ep.extras, ())
-
- def testRejects(self):
- for ep in [
- "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
- ]:
- try: EntryPoint.parse(ep)
- except ValueError: pass
- else: raise AssertionError("Should've been bad", ep)
-
- def checkSubMap(self, m):
- self.assertEqual(str(m),
- "{"
- "'feature2': EntryPoint.parse("
- "'feature2 = another.module:SomeClass [extra1,extra2]'), "
- "'feature1': EntryPoint.parse("
- "'feature1 = somemodule:somefunction')"
- "}"
- )
-
- submap_str = """
- # define features for blah blah
- feature1 = somemodule:somefunction
- feature2 = another.module:SomeClass [extra1,extra2]
- """
-
- def testParseList(self):
- self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
- self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
- self.assertRaises(ValueError, EntryPoint.parse_group, "x",
- ["foo=baz", "foo=bar"])
-
- def testParseMap(self):
- m = EntryPoint.parse_map({'xyz':self.submap_str})
- self.checkSubMap(m['xyz'])
- self.assertEqual(m.keys(),['xyz'])
- m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
- self.checkSubMap(m['xyz'])
- self.assertEqual(m.keys(),['xyz'])
- self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
- self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
-
-
-class RequirementsTests(TestCase):
-
- def testBasics(self):
- r = Requirement.parse("Twisted>=1.2")
- self.assertEqual(str(r),"Twisted>=1.2")
- self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
- self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
- self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
- self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
- self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
- self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
- self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
-
- def testOrdering(self):
- r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
- r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
- self.assertEqual(r1,r2)
- self.assertEqual(str(r1),str(r2))
- self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
-
- def testBasicContains(self):
- r = Requirement("Twisted", [('>=','1.2')], ())
- foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
- twist11 = Distribution.from_filename("Twisted-1.1.egg")
- twist12 = Distribution.from_filename("Twisted-1.2.egg")
- self.failUnless(parse_version('1.2') in r)
- self.failUnless(parse_version('1.1') not in r)
- self.failUnless('1.2' in r)
- self.failUnless('1.1' not in r)
- self.failUnless(foo_dist not in r)
- self.failUnless(twist11 not in r)
- self.failUnless(twist12 in r)
-
- def testAdvancedContains(self):
- r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
- for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
- self.failUnless(v in r, (v,r))
- for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
- self.failUnless(v not in r, (v,r))
-
-
- def testOptionsAndHashing(self):
- r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
- r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
- r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
- self.assertEqual(r1,r2)
- self.assertEqual(r1,r3)
- self.assertEqual(r1.extras, ("foo","bar"))
- self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized
- self.assertEqual(hash(r1), hash(r2))
- self.assertEqual(
- hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
- ImmutableSet(["foo","bar"])))
- )
-
- def testVersionEquality(self):
- r1 = Requirement.parse("setuptools==0.3a2")
- r2 = Requirement.parse("setuptools!=0.3a4")
- d = Distribution.from_filename
-
- self.failIf(d("setuptools-0.3a4.egg") in r1)
- self.failIf(d("setuptools-0.3a1.egg") in r1)
- self.failIf(d("setuptools-0.3a4.egg") in r2)
-
- self.failUnless(d("setuptools-0.3a2.egg") in r1)
- self.failUnless(d("setuptools-0.3a2.egg") in r2)
- self.failUnless(d("setuptools-0.3a3.egg") in r2)
- self.failUnless(d("setuptools-0.3a5.egg") in r2)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class ParseTests(TestCase):
-
- def testEmptyParse(self):
- self.assertEqual(list(parse_requirements('')), [])
-
- def testYielding(self):
- for inp,out in [
- ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
- (['x\n\n','y'], ['x','y']),
- ]:
- self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
-
- def testSplitting(self):
- self.assertEqual(
- list(
- pkg_resources.split_sections("""
- x
- [Y]
- z
-
- a
- [b ]
- # foo
- c
- [ d]
- [q]
- v
- """
- )
- ),
- [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
- )
- self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
-
- def testSafeName(self):
- self.assertEqual(safe_name("adns-python"), "adns-python")
- self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
- self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
- self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
- self.assertNotEqual(safe_name("peak.web"), "peak-web")
-
- def testSafeVersion(self):
- self.assertEqual(safe_version("1.2-1"), "1.2-1")
- self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha")
- self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
- self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
- self.assertEqual(safe_version("peak.web"), "peak.web")
-
- def testSimpleRequirements(self):
- self.assertEqual(
- list(parse_requirements('Twis-Ted>=1.2-1')),
- [Requirement('Twis-Ted',[('>=','1.2-1')], ())]
- )
- self.assertEqual(
- list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
- [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
- )
- self.assertEqual(
- Requirement.parse("FooBar==1.99a3"),
- Requirement("FooBar", [('==','1.99a3')], ())
- )
- self.assertRaises(ValueError,Requirement.parse,">=2.3")
- self.assertRaises(ValueError,Requirement.parse,"x\\")
- self.assertRaises(ValueError,Requirement.parse,"x==2 q")
- self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
- self.assertRaises(ValueError,Requirement.parse,"#")
-
- def testVersionEquality(self):
- def c(s1,s2):
- p1, p2 = parse_version(s1),parse_version(s2)
- self.assertEqual(p1,p2, (s1,s2,p1,p2))
-
- c('1.2-rc1', '1.2rc1')
- c('0.4', '0.4.0')
- c('0.4.0.0', '0.4.0')
- c('0.4.0-0', '0.4-0')
- c('0pl1', '0.0pl1')
- c('0pre1', '0.0c1')
- c('0.0.0preview1', '0c1')
- c('0.0c1', '0-rc1')
- c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
-
- def testVersionOrdering(self):
- def c(s1,s2):
- p1, p2 = parse_version(s1),parse_version(s2)
- self.failUnless(p1<p2, (s1,s2,p1,p2))
-
- c('2.1','2.1.1')
- c('2a1','2b0')
- c('2a1','2.1')
- c('2.3a1', '2.3')
- c('2.1-1', '2.1-2')
- c('2.1-1', '2.1.1')
- c('2.1', '2.1pl4')
- c('2.1a0-20040501', '2.1')
- c('1.1', '02.1')
- c('A56','B27')
- c('3.2', '3.2.pl0')
- c('3.2-1', '3.2pl1')
- c('3.2pl1', '3.2pl1-1')
- c('0.4', '4.0')
- c('0.0.4', '0.4.0')
- c('0pl1', '0.4pl1')
- c('2.1.0-rc1','2.1.0')
-
- torture ="""
- 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
- 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
- 0.77.2-1 0.77.1-1 0.77.0-1
- """.split()
-
- for p,v1 in enumerate(torture):
- for v2 in torture[p+1:]:
- c(v2,v1)