summaryrefslogtreecommitdiff
path: root/Cython/Build
diff options
context:
space:
mode:
Diffstat (limited to 'Cython/Build')
-rw-r--r--Cython/Build/BuildExecutable.py73
-rw-r--r--Cython/Build/Cythonize.py154
-rw-r--r--Cython/Build/Dependencies.py194
-rw-r--r--Cython/Build/Inline.py67
-rw-r--r--Cython/Build/IpythonMagic.py44
-rw-r--r--Cython/Build/Tests/TestCyCache.py30
-rw-r--r--Cython/Build/Tests/TestCythonizeArgsParser.py482
-rw-r--r--Cython/Build/Tests/TestDependencies.py142
-rw-r--r--Cython/Build/Tests/TestInline.py34
-rw-r--r--Cython/Build/Tests/TestIpythonMagic.py102
-rw-r--r--Cython/Build/Tests/TestRecythonize.py212
-rw-r--r--Cython/Build/Tests/TestStripLiterals.py1
12 files changed, 1294 insertions, 241 deletions
diff --git a/Cython/Build/BuildExecutable.py b/Cython/Build/BuildExecutable.py
index 2db9e5d74..334fbf069 100644
--- a/Cython/Build/BuildExecutable.py
+++ b/Cython/Build/BuildExecutable.py
@@ -1,10 +1,10 @@
"""
-Compile a Python script into an executable that embeds CPython and run it.
+Compile a Python script into an executable that embeds CPython.
Requires CPython to be built as a shared library ('libpythonX.Y').
Basic usage:
- python cythonrun somefile.py [ARGS]
+ python -m Cython.Build.BuildExecutable [ARGS] somefile.py
"""
from __future__ import absolute_import
@@ -28,7 +28,7 @@ if PYLIB_DYN == PYLIB:
# no shared library
PYLIB_DYN = ''
else:
- PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
+ PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
CC = get_config_var('CC', os.environ.get('CC', ''))
CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '')
@@ -38,12 +38,14 @@ LIBS = get_config_var('LIBS')
SYSLIBS = get_config_var('SYSLIBS')
EXE_EXT = sysconfig.get_config_var('EXE')
+
def _debug(msg, *args):
if DEBUG:
if args:
msg = msg % args
sys.stderr.write(msg + '\n')
+
def dump_config():
_debug('INCDIR: %s', INCDIR)
_debug('LIBDIR1: %s', LIBDIR1)
@@ -58,6 +60,26 @@ def dump_config():
_debug('SYSLIBS: %s', SYSLIBS)
_debug('EXE_EXT: %s', EXE_EXT)
+
+def _parse_args(args):
+ cy_args = []
+ last_arg = None
+ for i, arg in enumerate(args):
+ if arg.startswith('-'):
+ cy_args.append(arg)
+ elif last_arg in ('-X', '--directive'):
+ cy_args.append(arg)
+ else:
+ input_file = arg
+ args = args[i+1:]
+ break
+ last_arg = arg
+ else:
+ raise ValueError('no input file provided')
+
+ return input_file, cy_args, args
+
+
def runcmd(cmd, shell=True):
if shell:
cmd = ' '.join(cmd)
@@ -65,24 +87,23 @@ def runcmd(cmd, shell=True):
else:
_debug(' '.join(cmd))
- try:
- import subprocess
- except ImportError: # Python 2.3 ...
- returncode = os.system(cmd)
- else:
- returncode = subprocess.call(cmd, shell=shell)
+ import subprocess
+ returncode = subprocess.call(cmd, shell=shell)
if returncode:
sys.exit(returncode)
+
def clink(basename):
runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2]
+ [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)]
+ LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split())
+
def ccompile(basename):
runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split())
+
def cycompile(input_file, options=()):
from ..Compiler import Version, CmdLine, Main
options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file])
@@ -91,9 +112,11 @@ def cycompile(input_file, options=()):
if result.num_errors > 0:
sys.exit(1)
+
def exec_file(program_name, args=()):
runcmd([os.path.abspath(program_name)] + list(args), shell=False)
+
def build(input_file, compiler_args=(), force=False):
"""
Build an executable program from a Cython module.
@@ -105,7 +128,7 @@ def build(input_file, compiler_args=(), force=False):
if not force and os.path.abspath(exe_file) == os.path.abspath(input_file):
raise ValueError("Input and output file names are the same, refusing to overwrite")
if (not force and os.path.exists(exe_file) and os.path.exists(input_file)
- and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
+ and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
_debug("File is up to date, not regenerating %s", exe_file)
return exe_file
cycompile(input_file, compiler_args)
@@ -113,30 +136,22 @@ def build(input_file, compiler_args=(), force=False):
clink(basename)
return exe_file
+
def build_and_run(args):
"""
- Build an executable program from a Cython module and runs it.
+ Build an executable program from a Cython module and run it.
- Arguments after the module name will be passed verbatimely to the
- program.
+ Arguments after the module name will be passed verbatimly to the program.
"""
- cy_args = []
- last_arg = None
- for i, arg in enumerate(args):
- if arg.startswith('-'):
- cy_args.append(arg)
- elif last_arg in ('-X', '--directive'):
- cy_args.append(arg)
- else:
- input_file = arg
- args = args[i+1:]
- break
- last_arg = arg
- else:
- raise ValueError('no input file provided')
+ program_name, args = _build(args)
+ exec_file(program_name, args)
+
+def _build(args):
+ input_file, cy_args, args = _parse_args(args)
program_name = build(input_file, cy_args)
- exec_file(program_name, args)
+ return program_name, args
+
if __name__ == '__main__':
- build_and_run(sys.argv[1:])
+ _build(sys.argv[1:])
diff --git a/Cython/Build/Cythonize.py b/Cython/Build/Cythonize.py
index c85b6eaba..179c04060 100644
--- a/Cython/Build/Cythonize.py
+++ b/Cython/Build/Cythonize.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import os
import shutil
@@ -38,46 +38,19 @@ class _FakePool(object):
pass
-def parse_directives(option, name, value, parser):
- dest = option.dest
- old_directives = dict(getattr(parser.values, dest,
- Options.get_directive_defaults()))
- directives = Options.parse_directive_list(
- value, relaxed_bool=True, current_settings=old_directives)
- setattr(parser.values, dest, directives)
-
-
-def parse_options(option, name, value, parser):
- dest = option.dest
- options = dict(getattr(parser.values, dest, {}))
- for opt in value.split(','):
- if '=' in opt:
- n, v = opt.split('=', 1)
- v = v.lower() not in ('false', 'f', '0', 'no')
- else:
- n, v = opt, True
- options[n] = v
- setattr(parser.values, dest, options)
-
-
-def parse_compile_time_env(option, name, value, parser):
- dest = option.dest
- old_env = dict(getattr(parser.values, dest, {}))
- new_env = Options.parse_compile_time_env(value, current_settings=old_env)
- setattr(parser.values, dest, new_env)
-
-
def find_package_base(path):
base_dir, package_path = os.path.split(path)
- while os.path.isfile(os.path.join(base_dir, '__init__.py')):
+ while is_package_dir(base_dir):
base_dir, parent = os.path.split(base_dir)
package_path = '%s/%s' % (parent, package_path)
return base_dir, package_path
-
def cython_compile(path_pattern, options):
- pool = None
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
+ _cython_compile_files(all_paths, options)
+
+def _cython_compile_files(all_paths, options):
+ pool = None
try:
for path in all_paths:
if options.build_inplace:
@@ -149,55 +122,89 @@ def run_distutils(args):
shutil.rmtree(temp_dir)
-def parse_args(args):
- from optparse import OptionParser
- parser = OptionParser(usage='%prog [options] [sources and packages]+')
+def create_args_parser():
+ from argparse import ArgumentParser, RawDescriptionHelpFormatter
+ from ..Compiler.CmdLine import ParseDirectivesAction, ParseOptionsAction, ParseCompileTimeEnvAction
- parser.add_option('-X', '--directive', metavar='NAME=VALUE,...',
- dest='directives', default={}, type="str",
- action='callback', callback=parse_directives,
+ parser = ArgumentParser(
+ formatter_class=RawDescriptionHelpFormatter,
+ epilog="""\
+Environment variables:
+ CYTHON_FORCE_REGEN: if set to 1, forces cythonize to regenerate the output files regardless
+ of modification times and changes.
+ Environment variables accepted by setuptools are supported to configure the C compiler and build:
+ https://setuptools.pypa.io/en/latest/userguide/ext_modules.html#compiler-and-linker-options"""
+ )
+
+ parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
+ dest='directives', default={}, type=str,
+ action=ParseDirectivesAction,
help='set a compiler directive')
- parser.add_option('-E', '--compile-time-env', metavar='NAME=VALUE,...',
- dest='compile_time_env', default={}, type="str",
- action='callback', callback=parse_compile_time_env,
+ parser.add_argument('-E', '--compile-time-env', metavar='NAME=VALUE,...',
+ dest='compile_time_env', default={}, type=str,
+ action=ParseCompileTimeEnvAction,
help='set a compile time environment variable')
- parser.add_option('-s', '--option', metavar='NAME=VALUE',
- dest='options', default={}, type="str",
- action='callback', callback=parse_options,
+ parser.add_argument('-s', '--option', metavar='NAME=VALUE',
+ dest='options', default={}, type=str,
+ action=ParseOptionsAction,
help='set a cythonize option')
- parser.add_option('-2', dest='language_level', action='store_const', const=2, default=None,
+ parser.add_argument('-2', dest='language_level', action='store_const', const=2, default=None,
help='use Python 2 syntax mode by default')
- parser.add_option('-3', dest='language_level', action='store_const', const=3,
+ parser.add_argument('-3', dest='language_level', action='store_const', const=3,
help='use Python 3 syntax mode by default')
- parser.add_option('--3str', dest='language_level', action='store_const', const='3str',
+ parser.add_argument('--3str', dest='language_level', action='store_const', const='3str',
help='use Python 3 syntax mode by default')
- parser.add_option('-a', '--annotate', dest='annotate', action='store_true',
- help='generate annotated HTML page for source files')
-
- parser.add_option('-x', '--exclude', metavar='PATTERN', dest='excludes',
+ parser.add_argument('-a', '--annotate', action='store_const', const='default', dest='annotate',
+ help='Produce a colorized HTML version of the source.')
+ parser.add_argument('--annotate-fullc', action='store_const', const='fullc', dest='annotate',
+ help='Produce a colorized HTML version of the source '
+ 'which includes entire generated C/C++-code.')
+ parser.add_argument('-x', '--exclude', metavar='PATTERN', dest='excludes',
action='append', default=[],
help='exclude certain file patterns from the compilation')
- parser.add_option('-b', '--build', dest='build', action='store_true',
+ parser.add_argument('-b', '--build', dest='build', action='store_true', default=None,
help='build extension modules using distutils')
- parser.add_option('-i', '--inplace', dest='build_inplace', action='store_true',
+ parser.add_argument('-i', '--inplace', dest='build_inplace', action='store_true', default=None,
help='build extension modules in place using distutils (implies -b)')
- parser.add_option('-j', '--parallel', dest='parallel', metavar='N',
+ parser.add_argument('-j', '--parallel', dest='parallel', metavar='N',
type=int, default=parallel_compiles,
help=('run builds in N parallel jobs (default: %d)' %
parallel_compiles or 1))
- parser.add_option('-f', '--force', dest='force', action='store_true',
+ parser.add_argument('-f', '--force', dest='force', action='store_true', default=None,
help='force recompilation')
- parser.add_option('-q', '--quiet', dest='quiet', action='store_true',
+ parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', default=None,
help='be less verbose during compilation')
- parser.add_option('--lenient', dest='lenient', action='store_true',
+ parser.add_argument('--lenient', dest='lenient', action='store_true', default=None,
help='increase Python compatibility by ignoring some compile time errors')
- parser.add_option('-k', '--keep-going', dest='keep_going', action='store_true',
+ parser.add_argument('-k', '--keep-going', dest='keep_going', action='store_true', default=None,
help='compile as much as possible, ignore compilation failures')
- parser.add_option('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
+ parser.add_argument('--no-docstrings', dest='no_docstrings', action='store_true', default=None,
+ help='strip docstrings')
+ parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
+ parser.add_argument('sources', nargs='*')
+ return parser
+
+
+def parse_args_raw(parser, args):
+ options, unknown = parser.parse_known_args(args)
+ sources = options.sources
+ # if positional arguments were interspersed
+ # some of them are in unknown
+ for option in unknown:
+ if option.startswith('-'):
+ parser.error("unknown option "+option)
+ else:
+ sources.append(option)
+ del options.sources
+ return (options, sources)
+
+
+def parse_args(args):
+ parser = create_args_parser()
+ options, args = parse_args_raw(parser, args)
- options, args = parser.parse_args(args)
if not args:
parser.error("no source files provided")
if options.build_inplace:
@@ -207,11 +214,6 @@ def parse_args(args):
if options.language_level:
assert options.language_level in (2, 3, '3str')
options.options['language_level'] = options.language_level
- return options, args
-
-
-def main(args=None):
- options, paths = parse_args(args)
if options.lenient:
# increase Python compatibility by ignoring compile time errors
@@ -219,10 +221,26 @@ def main(args=None):
Options.error_on_uninitialized = False
if options.annotate:
- Options.annotate = True
+ Options.annotate = options.annotate
+
+ if options.no_docstrings:
+ Options.docstrings = False
+
+ return options, args
+
+
+def main(args=None):
+ options, paths = parse_args(args)
+ all_paths = []
for path in paths:
- cython_compile(path, options)
+ expanded_path = [os.path.abspath(p) for p in extended_iglob(path)]
+ if not expanded_path:
+ import sys
+ print("{}: No such file or directory: '{}'".format(sys.argv[0], path), file=sys.stderr)
+ sys.exit(1)
+ all_paths.extend(expanded_path)
+ _cython_compile_files(all_paths, options)
if __name__ == '__main__':
diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py
index 28c48ed8c..c60cbf34a 100644
--- a/Cython/Build/Dependencies.py
+++ b/Cython/Build/Dependencies.py
@@ -10,7 +10,6 @@ import os
import shutil
import subprocess
import re, sys, time
-import warnings
from glob import iglob
from io import open as io_open
from os.path import relpath as _relpath
@@ -43,9 +42,12 @@ except:
pythran = None
from .. import Utils
-from ..Utils import (cached_function, cached_method, path_exists, write_depfile,
- safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, replace_suffix)
-from ..Compiler.Main import Context, CompilationOptions, default_options
+from ..Utils import (cached_function, cached_method, path_exists,
+ safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, write_depfile)
+from ..Compiler import Errors
+from ..Compiler.Main import Context
+from ..Compiler.Options import (CompilationOptions, default_options,
+ get_directive_defaults)
join_path = cached_function(os.path.join)
copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
@@ -84,11 +86,14 @@ def extended_iglob(pattern):
for path in extended_iglob(before + case + after):
yield path
return
- if '**/' in pattern:
+
+ # We always accept '/' and also '\' on Windows,
+ # because '/' is generally common for relative paths.
+ if '**/' in pattern or os.sep == '\\' and '**\\' in pattern:
seen = set()
- first, rest = pattern.split('**/', 1)
+ first, rest = re.split(r'\*\*[%s]' % ('/\\\\' if os.sep == '\\' else '/'), pattern, 1)
if first:
- first = iglob(first+'/')
+ first = iglob(first + os.sep)
else:
first = ['']
for root in first:
@@ -96,7 +101,7 @@ def extended_iglob(pattern):
if path not in seen:
seen.add(path)
yield path
- for path in extended_iglob(join_path(root, '*', '**/' + rest)):
+ for path in extended_iglob(join_path(root, '*', '**', rest)):
if path not in seen:
seen.add(path)
yield path
@@ -118,7 +123,7 @@ def nonempty(it, error_msg="expected non-empty iterator"):
def file_hash(filename):
path = os.path.normpath(filename)
prefix = ('%d:%s' % (len(path), path)).encode("UTF-8")
- m = hashlib.md5(prefix)
+ m = hashlib.sha1(prefix)
with open(path, 'rb') as f:
data = f.read(65000)
while data:
@@ -532,14 +537,14 @@ class DependencyTree(object):
for include in self.parse_dependencies(filename)[1]:
include_path = join_path(os.path.dirname(filename), include)
if not path_exists(include_path):
- include_path = self.context.find_include_file(include, None)
+ include_path = self.context.find_include_file(include, source_file_path=filename)
if include_path:
if '.' + os.path.sep in include_path:
include_path = os.path.normpath(include_path)
all.add(include_path)
all.update(self.included_files(include_path))
elif not self.quiet:
- print("Unable to locate '%s' referenced from '%s'" % (filename, include))
+ print(u"Unable to locate '%s' referenced from '%s'" % (filename, include))
return all
@cached_method
@@ -586,17 +591,18 @@ class DependencyTree(object):
return None # FIXME: error?
module_path.pop(0)
relative = '.'.join(package_path + module_path)
- pxd = self.context.find_pxd_file(relative, None)
+ pxd = self.context.find_pxd_file(relative, source_file_path=filename)
if pxd:
return pxd
if is_relative:
return None # FIXME: error?
- return self.context.find_pxd_file(module, None)
+ return self.context.find_pxd_file(module, source_file_path=filename)
@cached_method
def cimported_files(self, filename):
- if filename[-4:] == '.pyx' and path_exists(filename[:-4] + '.pxd'):
- pxd_list = [filename[:-4] + '.pxd']
+ filename_root, filename_ext = os.path.splitext(filename)
+ if filename_ext in ('.pyx', '.py') and path_exists(filename_root + '.pxd'):
+ pxd_list = [filename_root + '.pxd']
else:
pxd_list = []
# Cimports generates all possible combinations package.module
@@ -611,10 +617,10 @@ class DependencyTree(object):
@cached_method
def immediate_dependencies(self, filename):
- all = set([filename])
- all.update(self.cimported_files(filename))
- all.update(self.included_files(filename))
- return all
+ all_deps = {filename}
+ all_deps.update(self.cimported_files(filename))
+ all_deps.update(self.included_files(filename))
+ return all_deps
def all_dependencies(self, filename):
return self.transitive_merge(filename, self.immediate_dependencies, set.union)
@@ -638,7 +644,7 @@ class DependencyTree(object):
incorporate everything that has an influence on the generated code.
"""
try:
- m = hashlib.md5(__version__.encode('UTF-8'))
+ m = hashlib.sha1(__version__.encode('UTF-8'))
m.update(file_hash(filename).encode('UTF-8'))
for x in sorted(self.all_dependencies(filename)):
if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'):
@@ -726,7 +732,8 @@ def create_dependency_tree(ctx=None, quiet=False):
global _dep_tree
if _dep_tree is None:
if ctx is None:
- ctx = Context(["."], CompilationOptions(default_options))
+ ctx = Context(["."], get_directive_defaults(),
+ options=CompilationOptions(default_options))
_dep_tree = DependencyTree(ctx, quiet=quiet)
return _dep_tree
@@ -757,7 +764,7 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
return [], {}
elif isinstance(patterns, basestring) or not isinstance(patterns, Iterable):
patterns = [patterns]
- explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
+ explicit_modules = {m.name for m in patterns if isinstance(m, Extension)}
seen = set()
deps = create_dependency_tree(ctx, quiet=quiet)
to_exclude = set()
@@ -783,6 +790,8 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
create_extension = ctx.options.create_extension or default_create_extension
for pattern in patterns:
+ if not isinstance(pattern, (Extension_distutils, Extension_setuptools)):
+ pattern = encode_filename_in_py2(pattern)
if isinstance(pattern, str):
filepattern = pattern
template = Extension(pattern, []) # Fake Extension without sources
@@ -795,9 +804,9 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
if cython_sources:
filepattern = cython_sources[0]
if len(cython_sources) > 1:
- print("Warning: Multiple cython sources found for extension '%s': %s\n"
- "See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
- "for sharing declarations among Cython files." % (pattern.name, cython_sources))
+ print(u"Warning: Multiple cython sources found for extension '%s': %s\n"
+ u"See https://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
+ u"for sharing declarations among Cython files." % (pattern.name, cython_sources))
else:
# ignore non-cython modules
module_list.append(pattern)
@@ -871,15 +880,15 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
m.sources.remove(target_file)
except ValueError:
# never seen this in the wild, but probably better to warn about this unexpected case
- print("Warning: Cython source file not found in sources list, adding %s" % file)
+ print(u"Warning: Cython source file not found in sources list, adding %s" % file)
m.sources.insert(0, file)
seen.add(name)
return module_list, module_metadata
# This is the user-exposed entry point.
-def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=False, language=None,
- exclude_failures=False, **options):
+def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=None, language=None,
+ exclude_failures=False, show_all_warnings=False, **options):
"""
Compile a set of source modules into C/C++ files and return a list of distutils
Extension objects for them.
@@ -929,6 +938,9 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
really makes sense for compiling ``.py`` files which can also
be used without compilation.
+ :param show_all_warnings: By default, not all Cython warnings are printed.
+ Set to true to show all warnings.
+
:param annotate: If ``True``, will produce a HTML file for each of the ``.pyx`` or ``.py``
files compiled. The HTML file gives an indication
of how much Python interaction there is in
@@ -941,6 +953,11 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
See examples in :ref:`determining_where_to_add_types` or
:ref:`primes`.
+
+ :param annotate-fullc: If ``True`` will produce a colorized HTML version of
+ the source which includes entire generated C/C++-code.
+
+
:param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this:
``compiler_directives={'embedsignature': True}``.
See :ref:`compiler-directives`.
@@ -968,7 +985,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
c_options = CompilationOptions(**options)
cpp_options = CompilationOptions(**options); cpp_options.cplus = True
- ctx = c_options.create_context()
+ ctx = Context.from_options(c_options)
options = c_options
module_list, module_metadata = create_extension_list(
module_list,
@@ -978,6 +995,9 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
exclude_failures=exclude_failures,
language=language,
aliases=aliases)
+
+ fix_windows_unicode_modules(module_list)
+
deps = create_dependency_tree(ctx, quiet=quiet)
build_dir = getattr(options, 'build_dir', None)
@@ -1025,7 +1045,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
# setup for out of place build directory if enabled
if build_dir:
if os.path.isabs(c_file):
- warnings.warn("build_dir has no effect for absolute source paths")
+ c_file = os.path.splitdrive(c_file)[1]
+ c_file = c_file.split(os.sep, 1)[1]
c_file = os.path.join(build_dir, c_file)
dir = os.path.dirname(c_file)
safe_makedirs_once(dir)
@@ -1035,7 +1056,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
dependencies = deps.all_dependencies(source)
write_depfile(c_file, source, dependencies)
- if os.path.exists(c_file):
+ # Missing files and those generated by other Cython versions should always be recreated.
+ if Utils.file_generated_by_this_cython(c_file):
c_timestamp = os.path.getmtime(c_file)
else:
c_timestamp = -1
@@ -1051,9 +1073,12 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
if force or c_timestamp < dep_timestamp:
if not quiet and not force:
if source == dep:
- print("Compiling %s because it changed." % source)
+ print(u"Compiling %s because it changed." % Utils.decode_filename(source))
else:
- print("Compiling %s because it depends on %s." % (source, dep))
+ print(u"Compiling %s because it depends on %s." % (
+ Utils.decode_filename(source),
+ Utils.decode_filename(dep),
+ ))
if not force and options.cache:
fingerprint = deps.transitive_fingerprint(source, m, options)
else:
@@ -1061,7 +1086,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
to_compile.append((
priority, source, c_file, fingerprint, quiet,
options, not exclude_failures, module_metadata.get(m.name),
- full_module_name))
+ full_module_name, show_all_warnings))
new_sources.append(c_file)
modules_by_cfile[c_file].append(m)
else:
@@ -1085,32 +1110,26 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
if N <= 1:
nthreads = 0
if nthreads:
- # Requires multiprocessing (or Python >= 2.6)
+ import multiprocessing
+ pool = multiprocessing.Pool(
+ nthreads, initializer=_init_multiprocessing_helper)
+ # This is a bit more involved than it should be, because KeyboardInterrupts
+ # break the multiprocessing workers when using a normal pool.map().
+ # See, for example:
+ # https://noswap.com/blog/python-multiprocessing-keyboardinterrupt
try:
- import multiprocessing
- pool = multiprocessing.Pool(
- nthreads, initializer=_init_multiprocessing_helper)
- except (ImportError, OSError):
- print("multiprocessing required for parallel cythonization")
- nthreads = 0
- else:
- # This is a bit more involved than it should be, because KeyboardInterrupts
- # break the multiprocessing workers when using a normal pool.map().
- # See, for example:
- # http://noswap.com/blog/python-multiprocessing-keyboardinterrupt
- try:
- result = pool.map_async(cythonize_one_helper, to_compile, chunksize=1)
- pool.close()
- while not result.ready():
- try:
- result.get(99999) # seconds
- except multiprocessing.TimeoutError:
- pass
- except KeyboardInterrupt:
- pool.terminate()
- raise
- pool.join()
- if not nthreads:
+ result = pool.map_async(cythonize_one_helper, to_compile, chunksize=1)
+ pool.close()
+ while not result.ready():
+ try:
+ result.get(99999) # seconds
+ except multiprocessing.TimeoutError:
+ pass
+ except KeyboardInterrupt:
+ pool.terminate()
+ raise
+ pool.join()
+ else:
for args in to_compile:
cythonize_one(*args)
@@ -1130,7 +1149,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
if failed_modules:
for module in failed_modules:
module_list.remove(module)
- print("Failed compilations: %s" % ', '.join(sorted([
+ print(u"Failed compilations: %s" % ', '.join(sorted([
module.name for module in failed_modules])))
if options.cache:
@@ -1141,6 +1160,41 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
return module_list
+def fix_windows_unicode_modules(module_list):
+ # Hack around a distutils 3.[5678] bug on Windows for unicode module names.
+ # https://bugs.python.org/issue39432
+ if sys.platform != "win32":
+ return
+ if sys.version_info < (3, 5) or sys.version_info >= (3, 8, 2):
+ return
+
+ def make_filtered_list(ignored_symbol, old_entries):
+ class FilteredExportSymbols(list):
+ # export_symbols for unicode filename cause link errors on Windows
+ # Cython doesn't need them (it already defines PyInit with the correct linkage)
+ # so use this class as a temporary fix to stop them from being generated
+ def __contains__(self, val):
+ # so distutils doesn't "helpfully" add PyInit_<name>
+ return val == ignored_symbol or list.__contains__(self, val)
+
+ filtered_list = FilteredExportSymbols(old_entries)
+ if old_entries:
+ filtered_list.extend(name for name in old_entries if name != ignored_symbol)
+ return filtered_list
+
+ for m in module_list:
+ # TODO: use m.name.isascii() in Py3.7+
+ try:
+ m.name.encode("ascii")
+ continue
+ except UnicodeEncodeError:
+ pass
+ m.export_symbols = make_filtered_list(
+ "PyInit_" + m.name.rsplit(".", 1)[-1],
+ m.export_symbols,
+ )
+
+
if os.environ.get('XML_RESULTS'):
compile_result_dir = os.environ['XML_RESULTS']
def record_results(func):
@@ -1180,7 +1234,8 @@ else:
# TODO: Share context? Issue: pyx processing leaks into pxd module
@record_results
def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
- raise_on_failure=True, embedded_metadata=None, full_module_name=None,
+ raise_on_failure=True, embedded_metadata=None,
+ full_module_name=None, show_all_warnings=False,
progress=""):
from ..Compiler.Main import compile_single, default_options
from ..Compiler.Errors import CompileError, PyrexError
@@ -1196,7 +1251,7 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
zip_fingerprint_file = fingerprint_file_base + '.zip'
if os.path.exists(gz_fingerprint_file) or os.path.exists(zip_fingerprint_file):
if not quiet:
- print("%sFound compiled %s in cache" % (progress, pyx_file))
+ print(u"%sFound compiled %s in cache" % (progress, pyx_file))
if os.path.exists(gz_fingerprint_file):
os.utime(gz_fingerprint_file, None)
with contextlib.closing(gzip_open(gz_fingerprint_file, 'rb')) as g:
@@ -1210,12 +1265,16 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
z.extract(artifact, os.path.join(dirname, artifact))
return
if not quiet:
- print("%sCythonizing %s" % (progress, pyx_file))
+ print(u"%sCythonizing %s" % (progress, Utils.decode_filename(pyx_file)))
if options is None:
options = CompilationOptions(default_options)
options.output_file = c_file
options.embedded_metadata = embedded_metadata
+ old_warning_level = Errors.LEVEL
+ if show_all_warnings:
+ Errors.LEVEL = 0
+
any_failures = 0
try:
result = compile_single(pyx_file, options, full_module_name=full_module_name)
@@ -1233,6 +1292,10 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
import traceback
traceback.print_exc()
any_failures = 1
+ finally:
+ if show_all_warnings:
+ Errors.LEVEL = old_warning_level
+
if any_failures:
if raise_on_failure:
raise CompileError(None, pyx_file)
@@ -1250,7 +1313,7 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
else:
fingerprint_file = zip_fingerprint_file
with contextlib.closing(zipfile.ZipFile(
- fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
+ fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
for artifact in artifacts:
zip.write(artifact, os.path.basename(artifact))
os.rename(fingerprint_file + '.tmp', fingerprint_file)
@@ -1274,9 +1337,10 @@ def _init_multiprocessing_helper():
def cleanup_cache(cache, target_size, ratio=.85):
try:
p = subprocess.Popen(['du', '-s', '-k', os.path.abspath(cache)], stdout=subprocess.PIPE)
+ stdout, _ = p.communicate()
res = p.wait()
if res == 0:
- total_size = 1024 * int(p.stdout.read().strip().split()[0])
+ total_size = 1024 * int(stdout.strip().split()[0])
if total_size < target_size:
return
except (OSError, ValueError):
diff --git a/Cython/Build/Inline.py b/Cython/Build/Inline.py
index 69684e03f..abb891265 100644
--- a/Cython/Build/Inline.py
+++ b/Cython/Build/Inline.py
@@ -10,7 +10,9 @@ from distutils.core import Distribution, Extension
from distutils.command.build_ext import build_ext
import Cython
-from ..Compiler.Main import Context, default_options
+from ..Compiler.Main import Context
+from ..Compiler.Options import (default_options, CompilationOptions,
+ get_directive_defaults)
from ..Compiler.Visitor import CythonTransform, EnvTransform
from ..Compiler.ParseTreeTransforms import SkipDeclarations
@@ -34,6 +36,7 @@ if not IS_PY3:
else:
to_unicode = lambda x: x
+
if sys.version_info < (3, 5):
import imp
def load_dynamic(name, module_path):
@@ -48,9 +51,10 @@ else:
spec.loader.exec_module(module)
return module
+
class UnboundSymbols(EnvTransform, SkipDeclarations):
def __init__(self):
- CythonTransform.__init__(self, None)
+ super(EnvTransform, self).__init__(context=None)
self.unbound = set()
def visit_NameNode(self, node):
if not self.current_env().lookup(node.name):
@@ -65,7 +69,8 @@ class UnboundSymbols(EnvTransform, SkipDeclarations):
def unbound_symbols(code, context=None):
code = to_unicode(code)
if context is None:
- context = Context([], default_options)
+ context = Context([], get_directive_defaults(),
+ options=CompilationOptions(default_options))
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
tree = parse_from_strings('(tree fragment)', code)
for phase in Pipeline.create_pipeline(context, 'pyx'):
@@ -126,7 +131,11 @@ def _get_build_extension():
@cached_function
def _create_context(cython_include_dirs):
- return Context(list(cython_include_dirs), default_options)
+ return Context(
+ list(cython_include_dirs),
+ get_directive_defaults(),
+ options=CompilationOptions(default_options)
+ )
_cython_inline_cache = {}
@@ -170,6 +179,8 @@ def cython_inline(code, get_type=unsafe_type,
if language_level is not None:
cython_compiler_directives['language_level'] = language_level
+ key_hash = None
+
# Fast path if this has been called in this session.
_unbound_symbols = _cython_inline_cache.get(code)
if _unbound_symbols is not None:
@@ -205,7 +216,8 @@ def cython_inline(code, get_type=unsafe_type,
del kwds[name]
arg_names = sorted(kwds)
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
- key_hash = _inline_key(orig_code, arg_sigs, language_level)
+ if key_hash is None:
+ key_hash = _inline_key(orig_code, arg_sigs, language_level)
module_name = "_cython_inline_" + key_hash
if module_name in sys.modules:
@@ -224,6 +236,7 @@ def cython_inline(code, get_type=unsafe_type,
os.makedirs(lib_dir)
if force or not os.path.isfile(module_path):
cflags = []
+ define_macros = []
c_include_dirs = []
qualified = re.compile(r'([.\w]+)[.]')
for type, _ in arg_sigs:
@@ -234,6 +247,7 @@ def cython_inline(code, get_type=unsafe_type,
if m.groups()[0] == 'numpy':
import numpy
c_include_dirs.append(numpy.get_include())
+ define_macros.append(("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION"))
# cflags.append('-Wno-unused')
module_body, func_body = extract_func_code(code)
params = ', '.join(['%s %s' % a for a in arg_sigs])
@@ -256,10 +270,12 @@ def __invoke(%(params)s):
finally:
fh.close()
extension = Extension(
- name = module_name,
- sources = [pyx_file],
- include_dirs = c_include_dirs,
- extra_compile_args = cflags)
+ name=module_name,
+ sources=[pyx_file],
+ include_dirs=c_include_dirs or None,
+ extra_compile_args=cflags or None,
+ define_macros=define_macros or None,
+ )
if build_extension is None:
build_extension = _get_build_extension()
build_extension.extensions = cythonize(
@@ -322,37 +338,6 @@ def extract_func_code(code):
return '\n'.join(module), ' ' + '\n '.join(function)
-try:
- from inspect import getcallargs
-except ImportError:
- def getcallargs(func, *arg_values, **kwd_values):
- all = {}
- args, varargs, kwds, defaults = inspect.getargspec(func)
- if varargs is not None:
- all[varargs] = arg_values[len(args):]
- for name, value in zip(args, arg_values):
- all[name] = value
- for name, value in list(kwd_values.items()):
- if name in args:
- if name in all:
- raise TypeError("Duplicate argument %s" % name)
- all[name] = kwd_values.pop(name)
- if kwds is not None:
- all[kwds] = kwd_values
- elif kwd_values:
- raise TypeError("Unexpected keyword arguments: %s" % list(kwd_values))
- if defaults is None:
- defaults = ()
- first_default = len(args) - len(defaults)
- for ix, name in enumerate(args):
- if name not in all:
- if ix >= first_default:
- all[name] = defaults[ix - first_default]
- else:
- raise TypeError("Missing argument: %s" % name)
- return all
-
-
def get_body(source):
ix = source.index(':')
if source[:5] == 'lambda':
@@ -370,7 +355,7 @@ class RuntimeCompiledFunction(object):
self._body = get_body(inspect.getsource(f))
def __call__(self, *args, **kwds):
- all = getcallargs(self._f, *args, **kwds)
+ all = inspect.getcallargs(self._f, *args, **kwds)
if IS_PY3:
return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
else:
diff --git a/Cython/Build/IpythonMagic.py b/Cython/Build/IpythonMagic.py
index b3a9d7c58..3fa43c96d 100644
--- a/Cython/Build/IpythonMagic.py
+++ b/Cython/Build/IpythonMagic.py
@@ -58,16 +58,7 @@ import textwrap
IO_ENCODING = sys.getfilesystemencoding()
IS_PY2 = sys.version_info[0] < 3
-try:
- reload
-except NameError: # Python 3
- from imp import reload
-
-try:
- import hashlib
-except ImportError:
- import md5 as hashlib
-
+import hashlib
from distutils.core import Distribution, Extension
from distutils.command.build_ext import build_ext
@@ -85,6 +76,7 @@ from ..Shadow import __version__ as cython_version
from ..Compiler.Errors import CompileError
from .Inline import cython_inline, load_dynamic
from .Dependencies import cythonize
+from ..Utils import captured_fd, print_captured
PGO_CONFIG = {
@@ -191,10 +183,15 @@ class CythonMagics(Magics):
@magic_arguments.magic_arguments()
@magic_arguments.argument(
- '-a', '--annotate', action='store_true', default=False,
+ '-a', '--annotate', action='store_const', const='default', dest='annotate',
help="Produce a colorized HTML version of the source."
)
@magic_arguments.argument(
+ '--annotate-fullc', action='store_const', const='fullc', dest='annotate',
+ help="Produce a colorized HTML version of the source "
+ "which includes entire generated C/C++-code."
+ )
+ @magic_arguments.argument(
'-+', '--cplus', action='store_true', default=False,
help="Output a C++ rather than C file."
)
@@ -316,7 +313,7 @@ class CythonMagics(Magics):
if args.name:
module_name = str(args.name) # no-op in Py3
else:
- module_name = "_cython_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
+ module_name = "_cython_magic_" + hashlib.sha1(str(key).encode('utf-8')).hexdigest()
html_file = os.path.join(lib_dir, module_name + '.html')
module_path = os.path.join(lib_dir, module_name + self.so_ext)
@@ -340,13 +337,25 @@ class CythonMagics(Magics):
if args.pgo:
self._profile_pgo_wrapper(extension, lib_dir)
+ def print_compiler_output(stdout, stderr, where):
+ # On windows, errors are printed to stdout, we redirect both to sys.stderr.
+ print_captured(stdout, where, u"Content of stdout:\n")
+ print_captured(stderr, where, u"Content of stderr:\n")
+
+ get_stderr = get_stdout = None
try:
- self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None,
- quiet=args.quiet)
- except distutils.errors.CompileError:
- # Build failed and printed error message
+ with captured_fd(1) as get_stdout:
+ with captured_fd(2) as get_stderr:
+ self._build_extension(
+ extension, lib_dir, pgo_step_name='use' if args.pgo else None, quiet=args.quiet)
+ except (distutils.errors.CompileError, distutils.errors.LinkError):
+ # Build failed, print error message from compiler/linker
+ print_compiler_output(get_stdout(), get_stderr(), sys.stderr)
return None
+ # Build seems ok, but we might still want to show any warnings that occurred
+ print_compiler_output(get_stdout(), get_stderr(), sys.stdout)
+
module = load_dynamic(module_name, module_path)
self._import_all(module)
@@ -438,12 +447,11 @@ class CythonMagics(Magics):
quiet=quiet,
annotate=args.annotate,
force=True,
+ language_level=min(3, sys.version_info[0]),
)
if args.language_level is not None:
assert args.language_level in (2, 3)
opts['language_level'] = args.language_level
- elif sys.version_info[0] >= 3:
- opts['language_level'] = 3
return cythonize([extension], **opts)
except CompileError:
return None
diff --git a/Cython/Build/Tests/TestCyCache.py b/Cython/Build/Tests/TestCyCache.py
index a3224b417..7a44d89e9 100644
--- a/Cython/Build/Tests/TestCyCache.py
+++ b/Cython/Build/Tests/TestCyCache.py
@@ -33,25 +33,31 @@ class TestCyCache(CythonTest):
a_pyx = os.path.join(self.src_dir, 'a.pyx')
a_c = a_pyx[:-4] + '.c'
- open(a_pyx, 'w').write(content1)
+ with open(a_pyx, 'w') as f:
+ f.write(content1)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
self.assertEqual(1, len(self.cache_files('a.c*')))
- a_contents1 = open(a_c).read()
+ with open(a_c) as f:
+ a_contents1 = f.read()
os.unlink(a_c)
- open(a_pyx, 'w').write(content2)
+ with open(a_pyx, 'w') as f:
+ f.write(content2)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- a_contents2 = open(a_c).read()
+ with open(a_c) as f:
+ a_contents2 = f.read()
os.unlink(a_c)
self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
self.assertEqual(2, len(self.cache_files('a.c*')))
- open(a_pyx, 'w').write(content1)
+ with open(a_pyx, 'w') as f:
+ f.write(content1)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
self.assertEqual(2, len(self.cache_files('a.c*')))
- a_contents = open(a_c).read()
+ with open(a_c) as f:
+ a_contents = f.read()
self.assertEqual(
a_contents, a_contents1,
msg='\n'.join(list(difflib.unified_diff(
@@ -60,13 +66,15 @@ class TestCyCache(CythonTest):
def test_cycache_uses_cache(self):
a_pyx = os.path.join(self.src_dir, 'a.pyx')
a_c = a_pyx[:-4] + '.c'
- open(a_pyx, 'w').write('pass')
+ with open(a_pyx, 'w') as f:
+ f.write('pass')
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
gzip.GzipFile(a_cache, 'wb').write('fake stuff'.encode('ascii'))
os.unlink(a_c)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- a_contents = open(a_c).read()
+ with open(a_c) as f:
+ a_contents = f.read()
self.assertEqual(a_contents, 'fake stuff',
'Unexpected contents: %s...' % a_contents[:100])
@@ -75,7 +83,8 @@ class TestCyCache(CythonTest):
a_c = a_pyx[:-4] + '.c'
a_h = a_pyx[:-4] + '.h'
a_api_h = a_pyx[:-4] + '_api.h'
- open(a_pyx, 'w').write('cdef public api int foo(int x): return x\n')
+ with open(a_pyx, 'w') as f:
+ f.write('cdef public api int foo(int x): return x\n')
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
expected = [a_c, a_h, a_api_h]
for output in expected:
@@ -89,7 +98,8 @@ class TestCyCache(CythonTest):
hash_pyx = os.path.join(self.src_dir, 'options.pyx')
hash_c = hash_pyx[:-len('.pyx')] + '.c'
- open(hash_pyx, 'w').write('pass')
+ with open(hash_pyx, 'w') as f:
+ f.write('pass')
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
self.assertEqual(1, len(self.cache_files('options.c*')))
diff --git a/Cython/Build/Tests/TestCythonizeArgsParser.py b/Cython/Build/Tests/TestCythonizeArgsParser.py
new file mode 100644
index 000000000..c5a682dd6
--- /dev/null
+++ b/Cython/Build/Tests/TestCythonizeArgsParser.py
@@ -0,0 +1,482 @@
+from Cython.Build.Cythonize import (
+ create_args_parser, parse_args_raw, parse_args,
+ parallel_compiles
+)
+
+from Cython.Compiler import Options
+from Cython.Compiler.Tests.Utils import backup_Options, restore_Options, check_global_options
+
+from unittest import TestCase
+
+import sys
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO # doesn't accept 'str' in Py2
+
+
+class TestCythonizeArgsParser(TestCase):
+
+ def setUp(self):
+ TestCase.setUp(self)
+ self.parse_args = lambda x, parser=create_args_parser() : parse_args_raw(parser, x)
+
+
+ def are_default(self, options, skip):
+ # empty containers
+ empty_containers = ['directives', 'compile_time_env', 'options', 'excludes']
+ are_none = ['language_level', 'annotate', 'build', 'build_inplace', 'force', 'quiet', 'lenient', 'keep_going', 'no_docstrings']
+ for opt_name in empty_containers:
+ if len(getattr(options, opt_name))!=0 and (opt_name not in skip):
+ self.assertEqual(opt_name,"", msg="For option "+opt_name)
+ return False
+ for opt_name in are_none:
+ if (getattr(options, opt_name) is not None) and (opt_name not in skip):
+ self.assertEqual(opt_name,"", msg="For option "+opt_name)
+ return False
+ if options.parallel!=parallel_compiles and ('parallel' not in skip):
+ return False
+ return True
+
+ # testing directives:
+ def test_directive_short(self):
+ options, args = self.parse_args(['-X', 'cdivision=True'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['directives']))
+ self.assertEqual(options.directives['cdivision'], True)
+
+ def test_directive_long(self):
+ options, args = self.parse_args(['--directive', 'cdivision=True'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['directives']))
+ self.assertEqual(options.directives['cdivision'], True)
+
+ def test_directive_multiple(self):
+ options, args = self.parse_args(['-X', 'cdivision=True', '-X', 'c_string_type=bytes'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['directives']))
+ self.assertEqual(options.directives['cdivision'], True)
+ self.assertEqual(options.directives['c_string_type'], 'bytes')
+
+ def test_directive_multiple_v2(self):
+ options, args = self.parse_args(['-X', 'cdivision=True,c_string_type=bytes'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['directives']))
+ self.assertEqual(options.directives['cdivision'], True)
+ self.assertEqual(options.directives['c_string_type'], 'bytes')
+
+ def test_directive_value_yes(self):
+ options, args = self.parse_args(['-X', 'cdivision=YeS'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['directives']))
+ self.assertEqual(options.directives['cdivision'], True)
+
+ def test_directive_value_no(self):
+ options, args = self.parse_args(['-X', 'cdivision=no'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['directives']))
+ self.assertEqual(options.directives['cdivision'], False)
+
+ def test_directive_value_invalid(self):
+ with self.assertRaises(ValueError) as context:
+ options, args = self.parse_args(['-X', 'cdivision=sadfasd'])
+
+ def test_directive_key_invalid(self):
+ with self.assertRaises(ValueError) as context:
+ options, args = self.parse_args(['-X', 'abracadabra'])
+
+ def test_directive_no_value(self):
+ with self.assertRaises(ValueError) as context:
+ options, args = self.parse_args(['-X', 'cdivision'])
+
+ def test_directives_types(self):
+ directives = {
+ 'auto_pickle': True,
+ 'c_string_type': 'bytearray',
+ 'c_string_type': 'bytes',
+ 'c_string_type': 'str',
+ 'c_string_type': 'bytearray',
+ 'c_string_type': 'unicode',
+ 'c_string_encoding' : 'ascii',
+ 'language_level' : 2,
+ 'language_level' : 3,
+ 'language_level' : '3str',
+ 'set_initial_path' : 'my_initial_path',
+ }
+ for key, value in directives.items():
+ cmd = '{key}={value}'.format(key=key, value=str(value))
+ options, args = self.parse_args(['-X', cmd])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['directives']), msg = "Error for option: "+cmd)
+ self.assertEqual(options.directives[key], value, msg = "Error for option: "+cmd)
+
+ def test_directives_wrong(self):
+ directives = {
+ 'auto_pickle': 42, # for bool type
+ 'auto_pickle': 'NONONO', # for bool type
+ 'c_string_type': 'bites',
+ #'c_string_encoding' : 'a',
+ #'language_level' : 4,
+ }
+ for key, value in directives.items():
+ cmd = '{key}={value}'.format(key=key, value=str(value))
+ with self.assertRaises(ValueError, msg = "Error for option: "+cmd) as context:
+ options, args = self.parse_args(['-X', cmd])
+
+ def test_compile_time_env_short(self):
+ options, args = self.parse_args(['-E', 'MYSIZE=10'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['compile_time_env']))
+ self.assertEqual(options.compile_time_env['MYSIZE'], 10)
+
+ def test_compile_time_env_long(self):
+ options, args = self.parse_args(['--compile-time-env', 'MYSIZE=10'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['compile_time_env']))
+ self.assertEqual(options.compile_time_env['MYSIZE'], 10)
+
+ def test_compile_time_env_multiple(self):
+ options, args = self.parse_args(['-E', 'MYSIZE=10', '-E', 'ARRSIZE=11'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['compile_time_env']))
+ self.assertEqual(options.compile_time_env['MYSIZE'], 10)
+ self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
+
+ def test_compile_time_env_multiple_v2(self):
+ options, args = self.parse_args(['-E', 'MYSIZE=10,ARRSIZE=11'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['compile_time_env']))
+ self.assertEqual(options.compile_time_env['MYSIZE'], 10)
+ self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
+
+ #testing options
+ def test_option_short(self):
+ options, args = self.parse_args(['-s', 'docstrings=True'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+
+ def test_option_long(self):
+ options, args = self.parse_args(['--option', 'docstrings=True'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+
+ def test_option_multiple(self):
+ options, args = self.parse_args(['-s', 'docstrings=True', '-s', 'buffer_max_dims=8'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+ self.assertEqual(options.options['buffer_max_dims'], True) # really?
+
+ def test_option_multiple_v2(self):
+ options, args = self.parse_args(['-s', 'docstrings=True,buffer_max_dims=8'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+ self.assertEqual(options.options['buffer_max_dims'], True) # really?
+
+ def test_option_value_yes(self):
+ options, args = self.parse_args(['-s', 'docstrings=YeS'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+
+ def test_option_value_4242(self):
+ options, args = self.parse_args(['-s', 'docstrings=4242'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+
+ def test_option_value_0(self):
+ options, args = self.parse_args(['-s', 'docstrings=0'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], False)
+
+ def test_option_value_emptystr(self):
+ options, args = self.parse_args(['-s', 'docstrings='])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+
+ def test_option_value_a_str(self):
+ options, args = self.parse_args(['-s', 'docstrings=BB'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+
+ def test_option_value_no(self):
+ options, args = self.parse_args(['-s', 'docstrings=nO'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], False)
+
+ def test_option_no_value(self):
+ options, args = self.parse_args(['-s', 'docstrings'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['docstrings'], True)
+
+ def test_option_any_key(self):
+ options, args = self.parse_args(['-s', 'abracadabra'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['options']))
+ self.assertEqual(options.options['abracadabra'], True)
+
+ def test_language_level_2(self):
+ options, args = self.parse_args(['-2'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['language_level']))
+ self.assertEqual(options.language_level, 2)
+
+ def test_language_level_3(self):
+ options, args = self.parse_args(['-3'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['language_level']))
+ self.assertEqual(options.language_level, 3)
+
+ def test_language_level_3str(self):
+ options, args = self.parse_args(['--3str'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['language_level']))
+ self.assertEqual(options.language_level, '3str')
+
+ def test_annotate_short(self):
+ options, args = self.parse_args(['-a'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['annotate']))
+ self.assertEqual(options.annotate, 'default')
+
+ def test_annotate_long(self):
+ options, args = self.parse_args(['--annotate'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['annotate']))
+ self.assertEqual(options.annotate, 'default')
+
+ def test_annotate_fullc(self):
+ options, args = self.parse_args(['--annotate-fullc'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['annotate']))
+ self.assertEqual(options.annotate, 'fullc')
+
+ def test_annotate_and_positional(self):
+ options, args = self.parse_args(['-a', 'foo.pyx'])
+ self.assertEqual(args, ['foo.pyx'])
+ self.assertTrue(self.are_default(options, ['annotate']))
+ self.assertEqual(options.annotate, 'default')
+
+ def test_annotate_and_optional(self):
+ options, args = self.parse_args(['-a', '--3str'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['annotate', 'language_level']))
+ self.assertEqual(options.annotate, 'default')
+ self.assertEqual(options.language_level, '3str')
+
+ def test_exclude_short(self):
+ options, args = self.parse_args(['-x', '*.pyx'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['excludes']))
+ self.assertTrue('*.pyx' in options.excludes)
+
+ def test_exclude_long(self):
+ options, args = self.parse_args(['--exclude', '*.pyx'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['excludes']))
+ self.assertTrue('*.pyx' in options.excludes)
+
+ def test_exclude_multiple(self):
+ options, args = self.parse_args(['--exclude', '*.pyx', '--exclude', '*.py', ])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['excludes']))
+ self.assertEqual(options.excludes, ['*.pyx', '*.py'])
+
+ def test_build_short(self):
+ options, args = self.parse_args(['-b'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['build']))
+ self.assertEqual(options.build, True)
+
+ def test_build_long(self):
+ options, args = self.parse_args(['--build'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['build']))
+ self.assertEqual(options.build, True)
+
+ def test_inplace_short(self):
+ options, args = self.parse_args(['-i'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['build_inplace']))
+ self.assertEqual(options.build_inplace, True)
+
+ def test_inplace_long(self):
+ options, args = self.parse_args(['--inplace'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['build_inplace']))
+ self.assertEqual(options.build_inplace, True)
+
+ def test_parallel_short(self):
+ options, args = self.parse_args(['-j', '42'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['parallel']))
+ self.assertEqual(options.parallel, 42)
+
+ def test_parallel_long(self):
+ options, args = self.parse_args(['--parallel', '42'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['parallel']))
+ self.assertEqual(options.parallel, 42)
+
+ def test_force_short(self):
+ options, args = self.parse_args(['-f'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['force']))
+ self.assertEqual(options.force, True)
+
+ def test_force_long(self):
+ options, args = self.parse_args(['--force'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['force']))
+ self.assertEqual(options.force, True)
+
+ def test_quite_short(self):
+ options, args = self.parse_args(['-q'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['quiet']))
+ self.assertEqual(options.quiet, True)
+
+ def test_quite_long(self):
+ options, args = self.parse_args(['--quiet'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['quiet']))
+ self.assertEqual(options.quiet, True)
+
+ def test_lenient_long(self):
+ options, args = self.parse_args(['--lenient'])
+ self.assertTrue(self.are_default(options, ['lenient']))
+ self.assertFalse(args)
+ self.assertEqual(options.lenient, True)
+
+ def test_keep_going_short(self):
+ options, args = self.parse_args(['-k'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['keep_going']))
+ self.assertEqual(options.keep_going, True)
+
+ def test_keep_going_long(self):
+ options, args = self.parse_args(['--keep-going'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['keep_going']))
+ self.assertEqual(options.keep_going, True)
+
+ def test_no_docstrings_long(self):
+ options, args = self.parse_args(['--no-docstrings'])
+ self.assertFalse(args)
+ self.assertTrue(self.are_default(options, ['no_docstrings']))
+ self.assertEqual(options.no_docstrings, True)
+
+ def test_file_name(self):
+ options, args = self.parse_args(['file1.pyx', 'file2.pyx'])
+ self.assertEqual(len(args), 2)
+ self.assertEqual(args[0], 'file1.pyx')
+ self.assertEqual(args[1], 'file2.pyx')
+ self.assertTrue(self.are_default(options, []))
+
+ def test_option_first(self):
+ options, args = self.parse_args(['-i', 'file.pyx'])
+ self.assertEqual(args, ['file.pyx'])
+ self.assertEqual(options.build_inplace, True)
+ self.assertTrue(self.are_default(options, ['build_inplace']))
+
+ def test_file_inbetween(self):
+ options, args = self.parse_args(['-i', 'file.pyx', '-a'])
+ self.assertEqual(args, ['file.pyx'])
+ self.assertEqual(options.build_inplace, True)
+ self.assertEqual(options.annotate, 'default')
+ self.assertTrue(self.are_default(options, ['build_inplace', 'annotate']))
+
+ def test_option_trailing(self):
+ options, args = self.parse_args(['file.pyx', '-i'])
+ self.assertEqual(args, ['file.pyx'])
+ self.assertEqual(options.build_inplace, True)
+ self.assertTrue(self.are_default(options, ['build_inplace']))
+
+ def test_interspersed_positional(self):
+ options, sources = self.parse_args([
+ 'file1.pyx', '-a',
+ 'file2.pyx'
+ ])
+ self.assertEqual(sources, ['file1.pyx', 'file2.pyx'])
+ self.assertEqual(options.annotate, 'default')
+ self.assertTrue(self.are_default(options, ['annotate']))
+
+ def test_interspersed_positional2(self):
+ options, sources = self.parse_args([
+ 'file1.pyx', '-a',
+ 'file2.pyx', '-a', 'file3.pyx'
+ ])
+ self.assertEqual(sources, ['file1.pyx', 'file2.pyx', 'file3.pyx'])
+ self.assertEqual(options.annotate, 'default')
+ self.assertTrue(self.are_default(options, ['annotate']))
+
+ def test_interspersed_positional3(self):
+ options, sources = self.parse_args([
+ '-f', 'f1', 'f2', '-a',
+ 'f3', 'f4', '-a', 'f5'
+ ])
+ self.assertEqual(sources, ['f1', 'f2', 'f3', 'f4', 'f5'])
+ self.assertEqual(options.annotate, 'default')
+ self.assertEqual(options.force, True)
+ self.assertTrue(self.are_default(options, ['annotate', 'force']))
+
+ def test_wrong_option(self):
+ old_stderr = sys.stderr
+ stderr = sys.stderr = StringIO()
+ try:
+ self.assertRaises(SystemExit, self.parse_args,
+ ['--unknown-option']
+ )
+ finally:
+ sys.stderr = old_stderr
+ self.assertTrue(stderr.getvalue())
+
+
+class TestParseArgs(TestCase):
+ def setUp(self):
+ self._options_backup = backup_Options()
+
+ def tearDown(self):
+ restore_Options(self._options_backup)
+
+ def check_default_global_options(self, white_list=[]):
+ self.assertEqual(check_global_options(self._options_backup, white_list), "")
+
+ def test_build_set_for_inplace(self):
+ options, args = parse_args(['foo.pyx', '-i'])
+ self.assertEqual(options.build, True)
+ self.check_default_global_options()
+
+ def test_lenient(self):
+ options, sources = parse_args(['foo.pyx', '--lenient'])
+ self.assertEqual(sources, ['foo.pyx'])
+ self.assertEqual(Options.error_on_unknown_names, False)
+ self.assertEqual(Options.error_on_uninitialized, False)
+ self.check_default_global_options(['error_on_unknown_names', 'error_on_uninitialized'])
+
+ def test_annotate(self):
+ options, sources = parse_args(['foo.pyx', '--annotate'])
+ self.assertEqual(sources, ['foo.pyx'])
+ self.assertEqual(Options.annotate, 'default')
+ self.check_default_global_options(['annotate'])
+
+ def test_annotate_fullc(self):
+ options, sources = parse_args(['foo.pyx', '--annotate-fullc'])
+ self.assertEqual(sources, ['foo.pyx'])
+ self.assertEqual(Options.annotate, 'fullc')
+ self.check_default_global_options(['annotate'])
+
+ def test_no_docstrings(self):
+ options, sources = parse_args(['foo.pyx', '--no-docstrings'])
+ self.assertEqual(sources, ['foo.pyx'])
+ self.assertEqual(Options.docstrings, False)
+ self.check_default_global_options(['docstrings'])
diff --git a/Cython/Build/Tests/TestDependencies.py b/Cython/Build/Tests/TestDependencies.py
new file mode 100644
index 000000000..d3888117d
--- /dev/null
+++ b/Cython/Build/Tests/TestDependencies.py
@@ -0,0 +1,142 @@
+import contextlib
+import os.path
+import sys
+import tempfile
+import unittest
+from io import open
+from os.path import join as pjoin
+
+from ..Dependencies import extended_iglob
+
+
+@contextlib.contextmanager
+def writable_file(dir_path, filename):
+ with open(pjoin(dir_path, filename), "w", encoding="utf8") as f:
+ yield f
+
+
+class TestGlobbing(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls._orig_dir = os.getcwd()
+ if sys.version_info[0] < 3:
+ temp_path = cls._tmpdir = tempfile.mkdtemp()
+ else:
+ cls._tmpdir = tempfile.TemporaryDirectory()
+ temp_path = cls._tmpdir.name
+ os.chdir(temp_path)
+
+ for dir1 in "abcd":
+ for dir1x in [dir1, dir1 + 'x']:
+ for dir2 in "xyz":
+ dir_path = pjoin(dir1x, dir2)
+ os.makedirs(dir_path)
+ with writable_file(dir_path, "file2_pyx.pyx") as f:
+ f.write(u'""" PYX """')
+ with writable_file(dir_path, "file2_py.py") as f:
+ f.write(u'""" PY """')
+
+ with writable_file(dir1x, "file1_pyx.pyx") as f:
+ f.write(u'""" PYX """')
+ with writable_file(dir1x, "file1_py.py") as f:
+ f.write(u'""" PY """')
+
+ @classmethod
+ def tearDownClass(cls):
+ os.chdir(cls._orig_dir)
+ if sys.version_info[0] < 3:
+ import shutil
+ shutil.rmtree(cls._tmpdir)
+ else:
+ cls._tmpdir.cleanup()
+
+ def files_equal(self, pattern, expected_files):
+ expected_files = sorted(expected_files)
+ # It's the users's choice whether '/' will appear on Windows.
+ matched_files = sorted(path.replace('/', os.sep) for path in extended_iglob(pattern))
+ self.assertListEqual(matched_files, expected_files) # /
+
+ # Special case for Windows: also support '\' in patterns.
+ if os.sep == '\\' and '/' in pattern:
+ matched_files = sorted(extended_iglob(pattern.replace('/', '\\')))
+ self.assertListEqual(matched_files, expected_files) # \
+
+ def test_extended_iglob_simple(self):
+ ax_files = [pjoin("a", "x", "file2_pyx.pyx"), pjoin("a", "x", "file2_py.py")]
+ self.files_equal("a/x/*", ax_files)
+ self.files_equal("a/x/*.c12", [])
+ self.files_equal("a/x/*.{py,pyx,c12}", ax_files)
+ self.files_equal("a/x/*.{py,pyx}", ax_files)
+ self.files_equal("a/x/*.{pyx}", ax_files[:1])
+ self.files_equal("a/x/*.pyx", ax_files[:1])
+ self.files_equal("a/x/*.{py}", ax_files[1:])
+ self.files_equal("a/x/*.py", ax_files[1:])
+
+ def test_extended_iglob_simple_star(self):
+ for basedir in "ad":
+ files = [
+ pjoin(basedir, dirname, filename)
+ for dirname in "xyz"
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ self.files_equal(basedir + "/*/*", files)
+ self.files_equal(basedir + "/*/*.c12", [])
+ self.files_equal(basedir + "/*/*.{py,pyx,c12}", files)
+ self.files_equal(basedir + "/*/*.{py,pyx}", files)
+ self.files_equal(basedir + "/*/*.{pyx}", files[::2])
+ self.files_equal(basedir + "/*/*.pyx", files[::2])
+ self.files_equal(basedir + "/*/*.{py}", files[1::2])
+ self.files_equal(basedir + "/*/*.py", files[1::2])
+
+ for subdir in "xy*":
+ files = [
+ pjoin(basedir, dirname, filename)
+ for dirname in "xyz"
+ if subdir in ('*', dirname)
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ path = basedir + '/' + subdir + '/'
+ self.files_equal(path + "*", files)
+ self.files_equal(path + "*.{py,pyx}", files)
+ self.files_equal(path + "*.{pyx}", files[::2])
+ self.files_equal(path + "*.pyx", files[::2])
+ self.files_equal(path + "*.{py}", files[1::2])
+ self.files_equal(path + "*.py", files[1::2])
+
+ def test_extended_iglob_double_star(self):
+ basedirs = os.listdir(".")
+ files = [
+ pjoin(basedir, dirname, filename)
+ for basedir in basedirs
+ for dirname in "xyz"
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ all_files = [
+ pjoin(basedir, filename)
+ for basedir in basedirs
+ for filename in ["file1_pyx.pyx", "file1_py.py"]
+ ] + files
+ self.files_equal("*/*/*", files)
+ self.files_equal("*/*/**/*", files)
+ self.files_equal("*/**/*.*", all_files)
+ self.files_equal("**/*.*", all_files)
+ self.files_equal("*/**/*.c12", [])
+ self.files_equal("**/*.c12", [])
+ self.files_equal("*/*/*.{py,pyx,c12}", files)
+ self.files_equal("*/*/**/*.{py,pyx,c12}", files)
+ self.files_equal("*/**/*/*.{py,pyx,c12}", files)
+ self.files_equal("**/*/*/*.{py,pyx,c12}", files)
+ self.files_equal("**/*.{py,pyx,c12}", all_files)
+ self.files_equal("*/*/*.{py,pyx}", files)
+ self.files_equal("**/*/*/*.{py,pyx}", files)
+ self.files_equal("*/**/*/*.{py,pyx}", files)
+ self.files_equal("**/*.{py,pyx}", all_files)
+ self.files_equal("*/*/*.{pyx}", files[::2])
+ self.files_equal("**/*.{pyx}", all_files[::2])
+ self.files_equal("*/**/*/*.pyx", files[::2])
+ self.files_equal("*/*/*.pyx", files[::2])
+ self.files_equal("**/*.pyx", all_files[::2])
+ self.files_equal("*/*/*.{py}", files[1::2])
+ self.files_equal("**/*.{py}", all_files[1::2])
+ self.files_equal("*/*/*.py", files[1::2])
+ self.files_equal("**/*.py", all_files[1::2])
diff --git a/Cython/Build/Tests/TestInline.py b/Cython/Build/Tests/TestInline.py
index d20948808..35d9a29cd 100644
--- a/Cython/Build/Tests/TestInline.py
+++ b/Cython/Build/Tests/TestInline.py
@@ -1,4 +1,6 @@
-import os, tempfile
+import os
+import tempfile
+import unittest
from Cython.Shadow import inline
from Cython.Build.Inline import safe_type
from Cython.TestUtils import CythonTest
@@ -85,12 +87,26 @@ class TestInline(CythonTest):
inline(inline_divcode, language_level=3)['f'](5,2),
2.5
)
+ self.assertEqual(
+ inline(inline_divcode, language_level=2)['f'](5,2),
+ 2
+ )
- if has_numpy:
-
- def test_numpy(self):
- import numpy
- a = numpy.ndarray((10, 20))
- a[0,0] = 10
- self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
- self.assertEqual(inline("return a[0,0]", a=a, **self.test_kwds), 10.0)
+ def test_repeated_use(self):
+ inline_mulcode = "def f(int a, int b): return a * b"
+ self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
+ self.assertEqual(inline(inline_mulcode)['f'](5, 3), 15)
+ self.assertEqual(inline(inline_mulcode)['f'](6, 2), 12)
+ self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
+
+ f = inline(inline_mulcode)['f']
+ self.assertEqual(f(5, 2), 10)
+ self.assertEqual(f(5, 3), 15)
+
+ @unittest.skipIf(not has_numpy, "NumPy is not available")
+ def test_numpy(self):
+ import numpy
+ a = numpy.ndarray((10, 20))
+ a[0,0] = 10
+ self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
+ self.assertEqual(inline("return a[0,0]", a=a, **self.test_kwds), 10.0)
diff --git a/Cython/Build/Tests/TestIpythonMagic.py b/Cython/Build/Tests/TestIpythonMagic.py
index 24213091b..febb480ac 100644
--- a/Cython/Build/Tests/TestIpythonMagic.py
+++ b/Cython/Build/Tests/TestIpythonMagic.py
@@ -6,10 +6,12 @@
from __future__ import absolute_import
import os
+import io
import sys
from contextlib import contextmanager
from Cython.Build import IpythonMagic
from Cython.TestUtils import CythonTest
+from Cython.Compiler.Annotate import AnnotationCCodeWriter
try:
import IPython.testing.globalipapp
@@ -28,6 +30,26 @@ try:
except ImportError:
pass
+
+@contextmanager
+def capture_output():
+ backup = sys.stdout, sys.stderr
+ try:
+ replacement = [
+ io.TextIOWrapper(io.BytesIO(), encoding=sys.stdout.encoding),
+ io.TextIOWrapper(io.BytesIO(), encoding=sys.stderr.encoding),
+ ]
+ sys.stdout, sys.stderr = replacement
+ output = []
+ yield output
+ finally:
+ sys.stdout, sys.stderr = backup
+ for wrapper in replacement:
+ wrapper.seek(0) # rewind
+ output.append(wrapper.read())
+ wrapper.close()
+
+
code = u"""\
def f(x):
return 2*x
@@ -47,6 +69,27 @@ def main():
main()
"""
+compile_error_code = u'''\
+cdef extern from *:
+ """
+ xxx a=1;
+ """
+ int a;
+def doit():
+ return a
+'''
+
+compile_warning_code = u'''\
+cdef extern from *:
+ """
+ #pragma message ( "CWarning" )
+ int a = 42;
+ """
+ int a;
+def doit():
+ return a
+'''
+
if sys.platform == 'win32':
# not using IPython's decorators here because they depend on "nose"
@@ -142,6 +185,39 @@ class TestIPythonMagic(CythonTest):
self.assertEqual(ip.user_ns['g'], 2 // 10)
self.assertEqual(ip.user_ns['h'], 2 // 10)
+ def test_cython_compile_error_shown(self):
+ ip = self._ip
+ with capture_output() as out:
+ ip.run_cell_magic('cython', '-3', compile_error_code)
+ captured_out, captured_err = out
+
+ # it could be that c-level output is captured by distutil-extension
+ # (and not by us) and is printed to stdout:
+ captured_all = captured_out + "\n" + captured_err
+ self.assertTrue("error" in captured_all, msg="error in " + captured_all)
+
+ def test_cython_link_error_shown(self):
+ ip = self._ip
+ with capture_output() as out:
+ ip.run_cell_magic('cython', '-3 -l=xxxxxxxx', code)
+ captured_out, captured_err = out
+
+ # it could be that c-level output is captured by distutil-extension
+ # (and not by us) and is printed to stdout:
+ captured_all = captured_out + "\n!" + captured_err
+ self.assertTrue("error" in captured_all, msg="error in " + captured_all)
+
+ def test_cython_warning_shown(self):
+ ip = self._ip
+ with capture_output() as out:
+ # force rebuild, otherwise no warning as after the first success
+ # no build step is performed
+ ip.run_cell_magic('cython', '-3 -f', compile_warning_code)
+ captured_out, captured_err = out
+
+ # check that warning was printed to stdout even if build hasn't failed
+ self.assertTrue("CWarning" in captured_out)
+
@skip_win32('Skip on Windows')
def test_cython3_pgo(self):
# The Cython cell defines the functions f() and call().
@@ -203,3 +279,29 @@ x = sin(0.0)
ip.ex('g = f(10)')
self.assertEqual(ip.user_ns['g'], 20.0)
self.assertEqual([normal_log.INFO], normal_log.thresholds)
+
+ def test_cython_no_annotate(self):
+ ip = self._ip
+ html = ip.run_cell_magic('cython', '', code)
+ self.assertTrue(html is None)
+
+ def test_cython_annotate(self):
+ ip = self._ip
+ html = ip.run_cell_magic('cython', '--annotate', code)
+ # somewhat brittle way to differentiate between annotated htmls
+ # with/without complete source code:
+ self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE not in html.data)
+
+ def test_cython_annotate_default(self):
+ ip = self._ip
+ html = ip.run_cell_magic('cython', '-a', code)
+ # somewhat brittle way to differentiate between annotated htmls
+ # with/without complete source code:
+ self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE not in html.data)
+
+ def test_cython_annotate_complete_c_code(self):
+ ip = self._ip
+ html = ip.run_cell_magic('cython', '--annotate-fullc', code)
+ # somewhat brittle way to differentiate between annotated htmls
+ # with/without complete source code:
+ self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE in html.data)
diff --git a/Cython/Build/Tests/TestRecythonize.py b/Cython/Build/Tests/TestRecythonize.py
new file mode 100644
index 000000000..eb87018cb
--- /dev/null
+++ b/Cython/Build/Tests/TestRecythonize.py
@@ -0,0 +1,212 @@
+import shutil
+import os
+import tempfile
+import time
+
+import Cython.Build.Dependencies
+import Cython.Utils
+from Cython.TestUtils import CythonTest
+
+
+def fresh_cythonize(*args, **kwargs):
+ Cython.Utils.clear_function_caches()
+ Cython.Build.Dependencies._dep_tree = None # discard method caches
+ Cython.Build.Dependencies.cythonize(*args, **kwargs)
+
+class TestRecythonize(CythonTest):
+
+ def setUp(self):
+ CythonTest.setUp(self)
+ self.temp_dir = (
+ tempfile.mkdtemp(
+ prefix='recythonize-test',
+ dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None
+ )
+ )
+
+ def tearDown(self):
+ CythonTest.tearDown(self)
+ shutil.rmtree(self.temp_dir)
+
+ def test_recythonize_pyx_on_pxd_change(self):
+
+ src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
+
+ a_pxd = os.path.join(src_dir, 'a.pxd')
+ a_pyx = os.path.join(src_dir, 'a.pyx')
+ a_c = os.path.join(src_dir, 'a.c')
+ dep_tree = Cython.Build.Dependencies.create_dependency_tree()
+
+ with open(a_pxd, 'w') as f:
+ f.write('cdef int value\n')
+
+ with open(a_pyx, 'w') as f:
+ f.write('value = 1\n')
+
+
+ # The dependencies for "a.pyx" are "a.pxd" and "a.pyx".
+ self.assertEqual({a_pxd, a_pyx}, dep_tree.all_dependencies(a_pyx))
+
+ # Cythonize to create a.c
+ fresh_cythonize(a_pyx)
+
+ # Sleep to address coarse time-stamp precision.
+ time.sleep(1)
+
+ with open(a_c) as f:
+ a_c_contents1 = f.read()
+
+ with open(a_pxd, 'w') as f:
+ f.write('cdef double value\n')
+
+ fresh_cythonize(a_pyx)
+
+ with open(a_c) as f:
+ a_c_contents2 = f.read()
+
+ self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
+ self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
+ self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
+ self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
+
+
+ def test_recythonize_py_on_pxd_change(self):
+
+ src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
+
+ a_pxd = os.path.join(src_dir, 'a.pxd')
+ a_py = os.path.join(src_dir, 'a.py')
+ a_c = os.path.join(src_dir, 'a.c')
+ dep_tree = Cython.Build.Dependencies.create_dependency_tree()
+
+ with open(a_pxd, 'w') as f:
+ f.write('cdef int value\n')
+
+ with open(a_py, 'w') as f:
+ f.write('value = 1\n')
+
+
+ # The dependencies for "a.py" are "a.pxd" and "a.py".
+ self.assertEqual({a_pxd, a_py}, dep_tree.all_dependencies(a_py))
+
+ # Cythonize to create a.c
+ fresh_cythonize(a_py)
+
+ # Sleep to address coarse time-stamp precision.
+ time.sleep(1)
+
+ with open(a_c) as f:
+ a_c_contents1 = f.read()
+
+ with open(a_pxd, 'w') as f:
+ f.write('cdef double value\n')
+
+ fresh_cythonize(a_py)
+
+ with open(a_c) as f:
+ a_c_contents2 = f.read()
+
+
+ self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
+ self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
+ self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
+ self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
+
+ def test_recythonize_pyx_on_dep_pxd_change(self):
+ src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
+
+ a_pxd = os.path.join(src_dir, 'a.pxd')
+ a_pyx = os.path.join(src_dir, 'a.pyx')
+ b_pyx = os.path.join(src_dir, 'b.pyx')
+ b_c = os.path.join(src_dir, 'b.c')
+ dep_tree = Cython.Build.Dependencies.create_dependency_tree()
+
+ with open(a_pxd, 'w') as f:
+ f.write('cdef int value\n')
+
+ with open(a_pyx, 'w') as f:
+ f.write('value = 1\n')
+
+ with open(b_pyx, 'w') as f:
+ f.write('cimport a\n' + 'a.value = 2\n')
+
+
+ # The dependencies for "b.pyx" are "a.pxd" and "b.pyx".
+ self.assertEqual({a_pxd, b_pyx}, dep_tree.all_dependencies(b_pyx))
+
+
+ # Cythonize to create b.c
+ fresh_cythonize([a_pyx, b_pyx])
+
+ # Sleep to address coarse time-stamp precision.
+ time.sleep(1)
+
+ with open(b_c) as f:
+ b_c_contents1 = f.read()
+
+ with open(a_pxd, 'w') as f:
+ f.write('cdef double value\n')
+
+ fresh_cythonize([a_pyx, b_pyx])
+
+ with open(b_c) as f:
+ b_c_contents2 = f.read()
+
+
+
+ self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
+ self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
+ self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
+ self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
+
+
+
+ def test_recythonize_py_on_dep_pxd_change(self):
+
+ src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
+
+ a_pxd = os.path.join(src_dir, 'a.pxd')
+ a_pyx = os.path.join(src_dir, 'a.pyx')
+ b_pxd = os.path.join(src_dir, 'b.pxd')
+ b_py = os.path.join(src_dir, 'b.py')
+ b_c = os.path.join(src_dir, 'b.c')
+ dep_tree = Cython.Build.Dependencies.create_dependency_tree()
+
+ with open(a_pxd, 'w') as f:
+ f.write('cdef int value\n')
+
+ with open(a_pyx, 'w') as f:
+ f.write('value = 1\n')
+
+ with open(b_pxd, 'w') as f:
+ f.write('cimport a\n')
+
+ with open(b_py, 'w') as f:
+ f.write('a.value = 2\n')
+
+
+ # The dependencies for b.py are "a.pxd", "b.pxd" and "b.py".
+ self.assertEqual({a_pxd, b_pxd, b_py}, dep_tree.all_dependencies(b_py))
+
+
+ # Cythonize to create b.c
+ fresh_cythonize([a_pyx, b_py])
+
+ # Sleep to address coarse time-stamp precision.
+ time.sleep(1)
+
+ with open(b_c) as f:
+ b_c_contents1 = f.read()
+
+ with open(a_pxd, 'w') as f:
+ f.write('cdef double value\n')
+
+ fresh_cythonize([a_pyx, b_py])
+
+ with open(b_c) as f:
+ b_c_contents2 = f.read()
+
+ self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
+ self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
+ self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
+ self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
diff --git a/Cython/Build/Tests/TestStripLiterals.py b/Cython/Build/Tests/TestStripLiterals.py
index a7572a508..cbe5c65a9 100644
--- a/Cython/Build/Tests/TestStripLiterals.py
+++ b/Cython/Build/Tests/TestStripLiterals.py
@@ -54,4 +54,3 @@ class TestStripLiterals(CythonTest):
def test_extern(self):
self.t("cdef extern from 'a.h': # comment",
"cdef extern from '_L1_': #_L2_")
-