summaryrefslogtreecommitdiff
path: root/Tools/cystdlib.py
diff options
context:
space:
mode:
authorStefan Behnel <stefan_ml@behnel.de>2013-08-24 08:09:39 +0200
committerStefan Behnel <stefan_ml@behnel.de>2013-08-24 08:09:39 +0200
commit0defa4ed2712e7376bca1bcfd7be244cb1cfb5a2 (patch)
tree7be10e223f4d6e366fd9a4d74ff8d930823d264c /Tools/cystdlib.py
parent902d7747898750e0f1163492815bcd5cc31cfb0c (diff)
downloadcython-0defa4ed2712e7376bca1bcfd7be244cb1cfb5a2.tar.gz
update and clean up cystdlib.py script
Diffstat (limited to 'Tools/cystdlib.py')
-rw-r--r--Tools/cystdlib.py78
1 files changed, 50 insertions, 28 deletions
diff --git a/Tools/cystdlib.py b/Tools/cystdlib.py
index 70452cc81..32c9d7871 100644
--- a/Tools/cystdlib.py
+++ b/Tools/cystdlib.py
@@ -5,13 +5,14 @@ Execute the script either in the CPython 'Lib' directory or pass the
option '--current-python' to compile the standard library of the running
Python interpreter.
-Pass '--parallel' to get a parallel build.
+Pass '-j N' to get a parallel build with N processes.
Usage example::
$ python cystdlib.py --current-python build_ext -i
"""
+import os
import sys
from distutils.core import setup
from Cython.Build import cythonize
@@ -78,6 +79,7 @@ del special_directives[:] # currently unused
def build_extensions(includes='**/*.py',
excludes=excludes+broken,
special_directives=special_directives,
+ language_level=sys.version_info[0],
parallel=None):
if isinstance(includes, str):
includes = [includes]
@@ -97,21 +99,25 @@ def build_extensions(includes='**/*.py',
cythonize(modules,
exclude=exclude_now,
exclude_failures=True,
- language_level=pyver,
+ language_level=language_level,
compiler_directives=d,
nthreads=parallel,
))
return extensions
+
def build(extensions):
try:
- setup(name = 'stuff', ext_modules = extensions)
- return extensions, True
+ setup(ext_modules=extensions)
+ result = True
except:
import traceback
- print('error building extensions %s' % ([ext.name for ext in extensions],))
+ print('error building extensions %s' % (
+ [ext.name for ext in extensions],))
traceback.print_exc()
- return extensions, False
+ result = False
+ return extensions, result
+
def _build(args):
sys_args, ext = args
@@ -119,31 +125,47 @@ def _build(args):
return build([ext])
+def parse_args():
+ from optparse import OptionParser
+ parser = OptionParser('%prog [options] [LIB_DIR (default: ./Lib)]')
+ parser.add_option(
+ '--current-python', dest='current_python', action='store_true',
+ help='compile the stdlib of the running Python')
+ parser.add_option(
+ '-j', '--jobs', dest='parallel_jobs', metavar='N',
+ type=int, default=1,
+ help='run builds in N parallel jobs (default: 1)')
+ options, args = parser.parse_args()
+ if not args:
+ args = ['./Lib']
+ elif len(args) > 1:
+ parser.error('only one argument expected, got %d' % len(args))
+ return options, args
+
+
if __name__ == '__main__':
- import sys
- pyver = sys.version_info[0]
- try:
- sys.argv.remove('--current-python')
- except ValueError:
- pass
- else:
+ options, args = parse_args()
+ if options.current_python:
# assume that the stdlib is where the "os" module lives
- import os
os.chdir(os.path.dirname(os.__file__))
-
- try:
- sys.argv.remove('--parallel')
- import multiprocessing
- parallel_compiles = multiprocessing.cpu_count() * 2
- print("Building in %d parallel processes" % parallel_compiles)
- except (ValueError, ImportError):
- parallel_compiles = None
-
- extensions = build_extensions(parallel=parallel_compiles)
- if parallel_compiles:
- pool = multiprocessing.Pool(parallel_compiles)
- sys_args = sys.argv[1:]
- results = pool.map(_build, [ (sys_args, ext) for ext in extensions ])
+ else:
+ os.chdir(args[0])
+
+ pool = None
+ parallel_jobs = options.parallel_jobs
+ if options.parallel_jobs:
+ try:
+ import multiprocessing
+ pool = multiprocessing.Pool(parallel_jobs)
+ print("Building in %d parallel processes" % parallel_jobs)
+ except (ImportError, OSError):
+ print("Not building in parallel")
+ parallel_jobs = 0
+
+ extensions = build_extensions(parallel=parallel_jobs)
+ if pool is not None:
+ sys_args = ['-i']
+ results = pool.map(_build, [(sys_args, ext) for ext in extensions])
pool.close()
pool.join()
for ext, result in results: