diff options
author | Eric Wieser <wieser.eric@gmail.com> | 2019-08-19 19:16:44 -0500 |
---|---|---|
committer | Eric Wieser <wieser.eric@gmail.com> | 2019-08-19 19:16:44 -0500 |
commit | 0f5e376d3eb6118b783cdd3ecd27722c2d1934ba (patch) | |
tree | c44850b579cbd27993c45dda1a7922e2d109b24f /numpy/distutils | |
parent | 483f565d85dadc899f94710531fba8355d554d59 (diff) | |
parent | 98bdde643af6443d68a8c6233807b75bd3f0ed80 (diff) | |
download | numpy-0f5e376d3eb6118b783cdd3ecd27722c2d1934ba.tar.gz |
Merge remote-tracking branch 'upstream/master' into fix-if-fields
Diffstat (limited to 'numpy/distutils')
30 files changed, 1176 insertions, 693 deletions
diff --git a/numpy/distutils/_shell_utils.py b/numpy/distutils/_shell_utils.py new file mode 100644 index 000000000..82abd5f4e --- /dev/null +++ b/numpy/distutils/_shell_utils.py @@ -0,0 +1,91 @@ +""" +Helper functions for interacting with the shell, and consuming shell-style +parameters provided in config files. +""" +import os +import shlex +import subprocess +try: + from shlex import quote +except ImportError: + from pipes import quote + +__all__ = ['WindowsParser', 'PosixParser', 'NativeParser'] + + +class CommandLineParser: + """ + An object that knows how to split and join command-line arguments. + + It must be true that ``argv == split(join(argv))`` for all ``argv``. + The reverse neednt be true - `join(split(cmd))` may result in the addition + or removal of unnecessary escaping. + """ + @staticmethod + def join(argv): + """ Join a list of arguments into a command line string """ + raise NotImplementedError + + @staticmethod + def split(cmd): + """ Split a command line string into a list of arguments """ + raise NotImplementedError + + +class WindowsParser: + """ + The parsing behavior used by `subprocess.call("string")` on Windows, which + matches the Microsoft C/C++ runtime. + + Note that this is _not_ the behavior of cmd. + """ + @staticmethod + def join(argv): + # note that list2cmdline is specific to the windows syntax + return subprocess.list2cmdline(argv) + + @staticmethod + def split(cmd): + import ctypes # guarded import for systems without ctypes + try: + ctypes.windll + except AttributeError: + raise NotImplementedError + + # Windows has special parsing rules for the executable (no quotes), + # that we do not care about - insert a dummy element + if not cmd: + return [] + cmd = 'dummy ' + cmd + + CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW + CommandLineToArgvW.restype = ctypes.POINTER(ctypes.c_wchar_p) + CommandLineToArgvW.argtypes = (ctypes.c_wchar_p, ctypes.POINTER(ctypes.c_int)) + + nargs = ctypes.c_int() + lpargs = CommandLineToArgvW(cmd, ctypes.byref(nargs)) + args = [lpargs[i] for i in range(nargs.value)] + assert not ctypes.windll.kernel32.LocalFree(lpargs) + + # strip the element we inserted + assert args[0] == "dummy" + return args[1:] + + +class PosixParser: + """ + The parsing behavior used by `subprocess.call("string", shell=True)` on Posix. + """ + @staticmethod + def join(argv): + return ' '.join(quote(arg) for arg in argv) + + @staticmethod + def split(cmd): + return shlex.split(cmd, posix=True) + + +if os.name == 'nt': + NativeParser = WindowsParser +elif os.name == 'posix': + NativeParser = PosixParser diff --git a/numpy/distutils/ccompiler.py b/numpy/distutils/ccompiler.py index 5b7cb3fcf..14451fa66 100644 --- a/numpy/distutils/ccompiler.py +++ b/numpy/distutils/ccompiler.py @@ -17,7 +17,9 @@ from distutils.version import LooseVersion from numpy.distutils import log from numpy.distutils.compat import get_exception -from numpy.distutils.exec_command import filepath_from_subprocess_output +from numpy.distutils.exec_command import ( + filepath_from_subprocess_output, forward_bytes_to_stdout +) from numpy.distutils.misc_util import cyg2win32, is_sequence, mingw32, \ get_num_build_jobs, \ _commandline_dep_string @@ -159,11 +161,9 @@ def CCompiler_spawn(self, cmd, display=None): if is_sequence(cmd): cmd = ' '.join(list(cmd)) - try: - print(o) - except UnicodeError: - # When installing through pip, `o` can contain non-ascii chars - pass + + forward_bytes_to_stdout(o) + if re.search(b'Too many open files', o): msg = '\nTry rerunning setup command until build succeeds.' else: @@ -639,7 +639,7 @@ def CCompiler_get_version(self, force=False, ok_status=[0]): return version try: - output = subprocess.check_output(version_cmd) + output = subprocess.check_output(version_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: output = exc.output status = exc.returncode @@ -796,63 +796,3 @@ for _cc in ['msvc9', 'msvc', '_msvc', 'bcpp', 'cygwinc', 'emxc', 'unixc']: if _m is not None: setattr(_m, 'gen_lib_options', gen_lib_options) - -##Fix distutils.util.split_quoted: -# NOTE: I removed this fix in revision 4481 (see ticket #619), but it appears -# that removing this fix causes f2py problems on Windows XP (see ticket #723). -# Specifically, on WinXP when gfortran is installed in a directory path, which -# contains spaces, then f2py is unable to find it. -import string -_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) -_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") -_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') -_has_white_re = re.compile(r'\s') -def split_quoted(s): - s = s.strip() - words = [] - pos = 0 - - while s: - m = _wordchars_re.match(s, pos) - end = m.end() - if end == len(s): - words.append(s[:end]) - break - - if s[end] in string.whitespace: # unescaped, unquoted whitespace: now - words.append(s[:end]) # we definitely have a word delimiter - s = s[end:].lstrip() - pos = 0 - - elif s[end] == '\\': # preserve whatever is being escaped; - # will become part of the current word - s = s[:end] + s[end+1:] - pos = end+1 - - else: - if s[end] == "'": # slurp singly-quoted string - m = _squote_re.match(s, end) - elif s[end] == '"': # slurp doubly-quoted string - m = _dquote_re.match(s, end) - else: - raise RuntimeError("this can't happen (bad char '%c')" % s[end]) - - if m is None: - raise ValueError("bad string (mismatched %s quotes?)" % s[end]) - - (beg, end) = m.span() - if _has_white_re.search(s[beg+1:end-1]): - s = s[:beg] + s[beg+1:end-1] + s[end:] - pos = m.end() - 2 - else: - # Keeping quotes when a quoted word does not contain - # white-space. XXX: send a patch to distutils - pos = m.end() - - if pos >= len(s): - words.append(s) - break - - return words -ccompiler.split_quoted = split_quoted -##Fix distutils.util.split_quoted: diff --git a/numpy/distutils/command/autodist.py b/numpy/distutils/command/autodist.py index d5e78963c..9c98b84d8 100644 --- a/numpy/distutils/command/autodist.py +++ b/numpy/distutils/command/autodist.py @@ -3,23 +3,24 @@ """ from __future__ import division, absolute_import, print_function +import textwrap # We put them here since they could be easily reused outside numpy.distutils def check_inline(cmd): """Return the inline identifier (may be empty).""" cmd._check_compiler() - body = """ -#ifndef __cplusplus -static %(inline)s int static_func (void) -{ - return 0; -} -%(inline)s int nostatic_func (void) -{ - return 0; -} -#endif""" + body = textwrap.dedent(""" + #ifndef __cplusplus + static %(inline)s int static_func (void) + { + return 0; + } + %(inline)s int nostatic_func (void) + { + return 0; + } + #endif""") for kw in ['inline', '__inline__', '__inline']: st = cmd.try_compile(body % {'inline': kw}, None, None) @@ -28,15 +29,16 @@ static %(inline)s int static_func (void) return '' + def check_restrict(cmd): """Return the restrict identifier (may be empty).""" cmd._check_compiler() - body = """ -static int static_func (char * %(restrict)s a) -{ - return 0; -} -""" + body = textwrap.dedent(""" + static int static_func (char * %(restrict)s a) + { + return 0; + } + """) for kw in ['restrict', '__restrict__', '__restrict']: st = cmd.try_compile(body % {'restrict': kw}, None, None) @@ -45,52 +47,76 @@ static int static_func (char * %(restrict)s a) return '' + def check_compiler_gcc4(cmd): """Return True if the C compiler is GCC 4.x.""" cmd._check_compiler() - body = """ -int -main() -{ -#if (! defined __GNUC__) || (__GNUC__ < 4) -#error gcc >= 4 required -#endif - return 0; -} -""" + body = textwrap.dedent(""" + int + main() + { + #if (! defined __GNUC__) || (__GNUC__ < 4) + #error gcc >= 4 required + #endif + return 0; + } + """) return cmd.try_compile(body, None, None) def check_gcc_function_attribute(cmd, attribute, name): """Return True if the given function attribute is supported.""" cmd._check_compiler() - body = """ -#pragma GCC diagnostic error "-Wattributes" -#pragma clang diagnostic error "-Wattributes" - -int %s %s(void*); - -int -main() -{ - return 0; -} -""" % (attribute, name) + body = textwrap.dedent(""" + #pragma GCC diagnostic error "-Wattributes" + #pragma clang diagnostic error "-Wattributes" + + int %s %s(void*); + + int + main() + { + return 0; + } + """) % (attribute, name) return cmd.try_compile(body, None, None) != 0 + +def check_gcc_function_attribute_with_intrinsics(cmd, attribute, name, code, + include): + """Return True if the given function attribute is supported with + intrinsics.""" + cmd._check_compiler() + body = textwrap.dedent(""" + #include<%s> + int %s %s(void) + { + %s; + return 0; + } + + int + main() + { + return 0; + } + """) % (include, attribute, name, code) + return cmd.try_compile(body, None, None) != 0 + + def check_gcc_variable_attribute(cmd, attribute): """Return True if the given variable attribute is supported.""" cmd._check_compiler() - body = """ -#pragma GCC diagnostic error "-Wattributes" -#pragma clang diagnostic error "-Wattributes" - -int %s foo; - -int -main() -{ - return 0; -} -""" % (attribute, ) + body = textwrap.dedent(""" + #pragma GCC diagnostic error "-Wattributes" + #pragma clang diagnostic error "-Wattributes" + + int %s foo; + + int + main() + { + return 0; + } + """) % (attribute, ) return cmd.try_compile(body, None, None) != 0 diff --git a/numpy/distutils/command/build_ext.py b/numpy/distutils/command/build_ext.py index ab9d585a5..ef54fb25e 100644 --- a/numpy/distutils/command/build_ext.py +++ b/numpy/distutils/command/build_ext.py @@ -281,8 +281,8 @@ class build_ext (old_build_ext): runtime_lib = os.path.join(self.extra_dll_dir, fn) copy_file(runtime_lib, shared_lib_dir) - def swig_sources(self, sources): - # Do nothing. Swig sources have beed handled in build_src command. + def swig_sources(self, sources, extensions=None): + # Do nothing. Swig sources have been handled in build_src command. return sources def build_extension(self, ext): diff --git a/numpy/distutils/command/build_src.py b/numpy/distutils/command/build_src.py index 668bc23fe..41bb01da5 100644 --- a/numpy/distutils/command/build_src.py +++ b/numpy/distutils/command/build_src.py @@ -28,20 +28,14 @@ def subst_vars(target, source, d): """Substitute any occurrence of @foo@ by d['foo'] from source file into target.""" var = re.compile('@([a-zA-Z_]+)@') - fs = open(source, 'r') - try: - ft = open(target, 'w') - try: + with open(source, 'r') as fs: + with open(target, 'w') as ft: for l in fs: m = var.search(l) if m: ft.write(l.replace('@%s@' % m.group(1), d[m.group(1)])) else: ft.write(l) - finally: - ft.close() - finally: - fs.close() class build_src(build_ext.build_ext): @@ -368,7 +362,7 @@ class build_src(build_ext.build_ext): # incl_dirs = extension.include_dirs #if self.build_src not in incl_dirs: # incl_dirs.append(self.build_src) - build_dir = os.path.join(*([self.build_src]\ + build_dir = os.path.join(*([self.build_src] +name.split('.')[:-1])) self.mkpath(build_dir) for func in func_sources: @@ -425,9 +419,8 @@ class build_src(build_ext.build_ext): else: log.info("conv_template:> %s" % (target_file)) outstr = process_c_file(source) - fid = open(target_file, 'w') - fid.write(outstr) - fid.close() + with open(target_file, 'w') as fid: + fid.write(outstr) if _header_ext_match(target_file): d = os.path.dirname(target_file) if d not in include_dirs: @@ -547,7 +540,7 @@ class build_src(build_ext.build_ext): if is_sequence(extension): name = extension[0] else: name = extension.name - target_dir = os.path.join(*([self.build_src]\ + target_dir = os.path.join(*([self.build_src] +name.split('.')[:-1])) target_file = os.path.join(target_dir, ext_name + 'module.c') new_sources.append(target_file) @@ -723,25 +716,23 @@ _has_c_header = re.compile(r'-[*]-\s*c\s*-[*]-', re.I).search _has_cpp_header = re.compile(r'-[*]-\s*c[+][+]\s*-[*]-', re.I).search def get_swig_target(source): - f = open(source, 'r') - result = None - line = f.readline() - if _has_cpp_header(line): - result = 'c++' - if _has_c_header(line): - result = 'c' - f.close() + with open(source, 'r') as f: + result = None + line = f.readline() + if _has_cpp_header(line): + result = 'c++' + if _has_c_header(line): + result = 'c' return result def get_swig_modulename(source): - f = open(source, 'r') - name = None - for line in f: - m = _swig_module_name_match(line) - if m: - name = m.group('name') - break - f.close() + with open(source, 'r') as f: + name = None + for line in f: + m = _swig_module_name_match(line) + if m: + name = m.group('name') + break return name def _find_swig_target(target_dir, name): @@ -760,15 +751,14 @@ _f2py_user_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]* def get_f2py_modulename(source): name = None - f = open(source) - for line in f: - m = _f2py_module_name_match(line) - if m: - if _f2py_user_module_name_match(line): # skip *__user__* names - continue - name = m.group('name') - break - f.close() + with open(source) as f: + for line in f: + m = _f2py_module_name_match(line) + if m: + if _f2py_user_module_name_match(line): # skip *__user__* names + continue + name = m.group('name') + break return name ########################################## diff --git a/numpy/distutils/command/config.py b/numpy/distutils/command/config.py index d9b1e8488..b9f2fa76e 100644 --- a/numpy/distutils/command/config.py +++ b/numpy/distutils/command/config.py @@ -8,6 +8,7 @@ import os, signal import warnings import sys import subprocess +import textwrap from distutils.command.config import config as old_config from distutils.command.config import LANG_EXT @@ -18,6 +19,7 @@ import distutils from numpy.distutils.exec_command import filepath_from_subprocess_output from numpy.distutils.mingw32ccompiler import generate_manifest from numpy.distutils.command.autodist import (check_gcc_function_attribute, + check_gcc_function_attribute_with_intrinsics, check_gcc_variable_attribute, check_inline, check_restrict, @@ -52,18 +54,18 @@ class config(old_config): self.compiler.initialize() except IOError: e = get_exception() - msg = """\ -Could not initialize compiler instance: do you have Visual Studio -installed? If you are trying to build with MinGW, please use "python setup.py -build -c mingw32" instead. If you have Visual Studio installed, check it is -correctly installed, and the right version (VS 2008 for python 2.6, 2.7 and 3.2, -VS 2010 for >= 3.3). - -Original exception was: %s, and the Compiler class was %s -============================================================================""" \ + msg = textwrap.dedent("""\ + Could not initialize compiler instance: do you have Visual Studio + installed? If you are trying to build with MinGW, please use "python setup.py + build -c mingw32" instead. If you have Visual Studio installed, check it is + correctly installed, and the right version (VS 2008 for python 2.6, 2.7 and 3.2, + VS 2010 for >= 3.3). + + Original exception was: %s, and the Compiler class was %s + ============================================================================""") \ % (e, self.compiler.__class__.__name__) - print ("""\ -============================================================================""") + print(textwrap.dedent("""\ + ============================================================================""")) raise distutils.errors.DistutilsPlatformError(msg) # After MSVC is initialized, add an explicit /MANIFEST to linker @@ -172,31 +174,31 @@ Original exception was: %s, and the Compiler class was %s def check_decl(self, symbol, headers=None, include_dirs=None): self._check_compiler() - body = """ -int main(void) -{ -#ifndef %s - (void) %s; -#endif - ; - return 0; -}""" % (symbol, symbol) + body = textwrap.dedent(""" + int main(void) + { + #ifndef %s + (void) %s; + #endif + ; + return 0; + }""") % (symbol, symbol) return self.try_compile(body, headers, include_dirs) def check_macro_true(self, symbol, headers=None, include_dirs=None): self._check_compiler() - body = """ -int main(void) -{ -#if %s -#else -#error false or undefined macro -#endif - ; - return 0; -}""" % (symbol,) + body = textwrap.dedent(""" + int main(void) + { + #if %s + #else + #error false or undefined macro + #endif + ; + return 0; + }""") % (symbol,) return self.try_compile(body, headers, include_dirs) @@ -207,14 +209,14 @@ int main(void) self._check_compiler() # First check the type can be compiled - body = r""" -int main(void) { - if ((%(name)s *) 0) - return 0; - if (sizeof (%(name)s)) - return 0; -} -""" % {'name': type_name} + body = textwrap.dedent(r""" + int main(void) { + if ((%(name)s *) 0) + return 0; + if (sizeof (%(name)s)) + return 0; + } + """) % {'name': type_name} st = False try: @@ -234,33 +236,33 @@ int main(void) { self._check_compiler() # First check the type can be compiled - body = r""" -typedef %(type)s npy_check_sizeof_type; -int main (void) -{ - static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) >= 0)]; - test_array [0] = 0 - - ; - return 0; -} -""" + body = textwrap.dedent(r""" + typedef %(type)s npy_check_sizeof_type; + int main (void) + { + static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) >= 0)]; + test_array [0] = 0 + + ; + return 0; + } + """) self._compile(body % {'type': type_name}, headers, include_dirs, 'c') self._clean() if expected: - body = r""" -typedef %(type)s npy_check_sizeof_type; -int main (void) -{ - static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) == %(size)s)]; - test_array [0] = 0 - - ; - return 0; -} -""" + body = textwrap.dedent(r""" + typedef %(type)s npy_check_sizeof_type; + int main (void) + { + static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) == %(size)s)]; + test_array [0] = 0 + + ; + return 0; + } + """) for size in expected: try: self._compile(body % {'type': type_name, 'size': size}, @@ -271,17 +273,17 @@ int main (void) pass # this fails to *compile* if size > sizeof(type) - body = r""" -typedef %(type)s npy_check_sizeof_type; -int main (void) -{ - static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) <= %(size)s)]; - test_array [0] = 0 - - ; - return 0; -} -""" + body = textwrap.dedent(r""" + typedef %(type)s npy_check_sizeof_type; + int main (void) + { + static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) <= %(size)s)]; + test_array [0] = 0 + + ; + return 0; + } + """) # The principle is simple: we first find low and high bounds of size # for the type, where low/high are looked up on a log scale. Then, we @@ -424,6 +426,11 @@ int main (void) def check_gcc_function_attribute(self, attribute, name): return check_gcc_function_attribute(self, attribute, name) + def check_gcc_function_attribute_with_intrinsics(self, attribute, name, + code, include): + return check_gcc_function_attribute_with_intrinsics(self, attribute, + name, code, include) + def check_gcc_variable_attribute(self, attribute): return check_gcc_variable_attribute(self, attribute) @@ -435,10 +442,10 @@ int main (void) of the program and its output. """ # 2008-11-16, RemoveMe - warnings.warn("\n+++++++++++++++++++++++++++++++++++++++++++++++++\n" \ - "Usage of get_output is deprecated: please do not \n" \ - "use it anymore, and avoid configuration checks \n" \ - "involving running executable on the target machine.\n" \ + warnings.warn("\n+++++++++++++++++++++++++++++++++++++++++++++++++\n" + "Usage of get_output is deprecated: please do not \n" + "use it anymore, and avoid configuration checks \n" + "involving running executable on the target machine.\n" "+++++++++++++++++++++++++++++++++++++++++++++++++\n", DeprecationWarning, stacklevel=2) self._check_compiler() diff --git a/numpy/distutils/command/install.py b/numpy/distutils/command/install.py index a1dd47755..c74ae9446 100644 --- a/numpy/distutils/command/install.py +++ b/numpy/distutils/command/install.py @@ -64,16 +64,15 @@ class install(old_install): # bdist_rpm fails when INSTALLED_FILES contains # paths with spaces. Such paths must be enclosed # with double-quotes. - f = open(self.record, 'r') - lines = [] - need_rewrite = False - for l in f: - l = l.rstrip() - if ' ' in l: - need_rewrite = True - l = '"%s"' % (l) - lines.append(l) - f.close() + with open(self.record, 'r') as f: + lines = [] + need_rewrite = False + for l in f: + l = l.rstrip() + if ' ' in l: + need_rewrite = True + l = '"%s"' % (l) + lines.append(l) if need_rewrite: self.execute(write_file, (self.record, lines), diff --git a/numpy/distutils/command/install_clib.py b/numpy/distutils/command/install_clib.py index 662aa00bd..6a73f7e33 100644 --- a/numpy/distutils/command/install_clib.py +++ b/numpy/distutils/command/install_clib.py @@ -19,6 +19,9 @@ class install_clib(Command): def run (self): build_clib_cmd = get_cmd("build_clib") + if not build_clib_cmd.build_clib: + # can happen if the user specified `--skip-build` + build_clib_cmd.finalize_options() build_dir = build_clib_cmd.build_clib # We need the compiler to get the library name -> filename association diff --git a/numpy/distutils/conv_template.py b/numpy/distutils/conv_template.py index b33e315b4..3bcb7b884 100644 --- a/numpy/distutils/conv_template.py +++ b/numpy/distutils/conv_template.py @@ -267,22 +267,21 @@ include_src_re = re.compile(r"(\n|\A)#include\s*['\"]" def resolve_includes(source): d = os.path.dirname(source) - fid = open(source) - lines = [] - for line in fid: - m = include_src_re.match(line) - if m: - fn = m.group('name') - if not os.path.isabs(fn): - fn = os.path.join(d, fn) - if os.path.isfile(fn): - print('Including file', fn) - lines.extend(resolve_includes(fn)) + with open(source) as fid: + lines = [] + for line in fid: + m = include_src_re.match(line) + if m: + fn = m.group('name') + if not os.path.isabs(fn): + fn = os.path.join(d, fn) + if os.path.isfile(fn): + print('Including file', fn) + lines.extend(resolve_includes(fn)) + else: + lines.append(line) else: lines.append(line) - else: - lines.append(line) - fid.close() return lines def process_file(source): @@ -331,6 +330,7 @@ def main(): except ValueError: e = get_exception() raise ValueError("In %s loop at %s" % (file, e)) + outfile.write(writestr) if __name__ == "__main__": diff --git a/numpy/distutils/cpuinfo.py b/numpy/distutils/cpuinfo.py index 580299347..bc9728335 100644 --- a/numpy/distutils/cpuinfo.py +++ b/numpy/distutils/cpuinfo.py @@ -242,16 +242,16 @@ class LinuxCPUInfo(CPUInfoBase): return self.is_PentiumIV() and self.has_sse3() def _is_Nocona(self): - return self.is_Intel() \ - and (self.info[0]['cpu family'] == '6' \ - or self.info[0]['cpu family'] == '15' ) \ - and (self.has_sse3() and not self.has_ssse3())\ - and re.match(r'.*?\blm\b', self.info[0]['flags']) is not None + return (self.is_Intel() + and (self.info[0]['cpu family'] == '6' + or self.info[0]['cpu family'] == '15') + and (self.has_sse3() and not self.has_ssse3()) + and re.match(r'.*?\blm\b', self.info[0]['flags']) is not None) def _is_Core2(self): - return self.is_64bit() and self.is_Intel() and \ - re.match(r'.*?Core\(TM\)2\b', \ - self.info[0]['model name']) is not None + return (self.is_64bit() and self.is_Intel() and + re.match(r'.*?Core\(TM\)2\b', + self.info[0]['model name']) is not None) def _is_Itanium(self): return re.match(r'.*?Itanium\b', @@ -632,13 +632,13 @@ class Win32CPUInfo(CPUInfoBase): def _has_sse(self): if self.is_Intel(): - return (self.info[0]['Family']==6 and \ - self.info[0]['Model'] in [7, 8, 9, 10, 11]) \ - or self.info[0]['Family']==15 + return ((self.info[0]['Family']==6 and + self.info[0]['Model'] in [7, 8, 9, 10, 11]) + or self.info[0]['Family']==15) elif self.is_AMD(): - return (self.info[0]['Family']==6 and \ - self.info[0]['Model'] in [6, 7, 8, 10]) \ - or self.info[0]['Family']==15 + return ((self.info[0]['Family']==6 and + self.info[0]['Model'] in [6, 7, 8, 10]) + or self.info[0]['Family']==15) else: return False diff --git a/numpy/distutils/exec_command.py b/numpy/distutils/exec_command.py index aaeca99ee..712f22666 100644 --- a/numpy/distutils/exec_command.py +++ b/numpy/distutils/exec_command.py @@ -57,6 +57,7 @@ import os import sys import subprocess import locale +import warnings from numpy.distutils.misc_util import is_sequence, make_temp_file from numpy.distutils import log @@ -81,7 +82,33 @@ def filepath_from_subprocess_output(output): output = output.encode('ascii', errors='replace') return output + +def forward_bytes_to_stdout(val): + """ + Forward bytes from a subprocess call to the console, without attempting to + decode them. + + The assumption is that the subprocess call already returned bytes in + a suitable encoding. + """ + if sys.version_info.major < 3: + # python 2 has binary output anyway + sys.stdout.write(val) + elif hasattr(sys.stdout, 'buffer'): + # use the underlying binary output if there is one + sys.stdout.buffer.write(val) + elif hasattr(sys.stdout, 'encoding'): + # round-trip the encoding if necessary + sys.stdout.write(val.decode(sys.stdout.encoding)) + else: + # make a best-guess at the encoding + sys.stdout.write(val.decode('utf8', errors='replace')) + + def temp_file_name(): + # 2019-01-30, 1.17 + warnings.warn('temp_file_name is deprecated since NumPy v1.17, use ' + 'tempfile.mkstemp instead', DeprecationWarning, stacklevel=1) fo, name = make_temp_file() fo.close() return name @@ -156,24 +183,14 @@ def _update_environment( **env ): for name, value in env.items(): os.environ[name] = value or '' -def _supports_fileno(stream): - """ - Returns True if 'stream' supports the file descriptor and allows fileno(). - """ - if hasattr(stream, 'fileno'): - try: - stream.fileno() - return True - except IOError: - return False - else: - return False - def exec_command(command, execute_in='', use_shell=None, use_tee=None, _with_python = 1, **env ): """ Return (status,output) of executed command. + .. deprecated:: 1.17 + Use subprocess.Popen instead + Parameters ---------- command : str @@ -197,7 +214,10 @@ def exec_command(command, execute_in='', use_shell=None, use_tee=None, Wild cards will not work for non-posix systems or when use_shell=0. """ - log.debug('exec_command(%r,%s)' % (command,\ + # 2019-01-30, 1.17 + warnings.warn('exec_command is deprecated since NumPy v1.17, use ' + 'subprocess.Popen instead', DeprecationWarning, stacklevel=1) + log.debug('exec_command(%r,%s)' % (command, ','.join(['%s=%r'%kv for kv in env.items()]))) if use_tee is None: diff --git a/numpy/distutils/fcompiler/__init__.py b/numpy/distutils/fcompiler/__init__.py index 12b32832e..3723470f3 100644 --- a/numpy/distutils/fcompiler/__init__.py +++ b/numpy/distutils/fcompiler/__init__.py @@ -22,7 +22,6 @@ import os import sys import re import types -import shlex from numpy.compat import open_latin1 @@ -38,6 +37,7 @@ from numpy.distutils.misc_util import is_string, all_strings, is_sequence, \ make_temp_file, get_shared_lib_extension from numpy.distutils.exec_command import find_executable from numpy.distutils.compat import get_exception +from numpy.distutils import _shell_utils from .environment import EnvironmentConfig @@ -362,7 +362,7 @@ class FCompiler(CCompiler): set_exe('archiver') set_exe('ranlib') - def update_executables(elf): + def update_executables(self): """Called at the beginning of customisation. Subclasses should override this if they need to set up the executables dictionary. @@ -466,10 +466,8 @@ class FCompiler(CCompiler): noarch = self.distutils_vars.get('noarch', noopt) debug = self.distutils_vars.get('debug', False) - f77 = shlex.split(self.command_vars.compiler_f77, - posix=(os.name == 'posix')) - f90 = shlex.split(self.command_vars.compiler_f90, - posix=(os.name == 'posix')) + f77 = self.command_vars.compiler_f77 + f90 = self.command_vars.compiler_f90 f77flags = [] f90flags = [] @@ -477,21 +475,23 @@ class FCompiler(CCompiler): fixflags = [] if f77: + f77 = _shell_utils.NativeParser.split(f77) f77flags = self.flag_vars.f77 if f90: + f90 = _shell_utils.NativeParser.split(f90) f90flags = self.flag_vars.f90 freeflags = self.flag_vars.free # XXX Assuming that free format is default for f90 compiler. fix = self.command_vars.compiler_fix # NOTE: this and similar examples are probably just - # exluding --coverage flag when F90 = gfortran --coverage + # excluding --coverage flag when F90 = gfortran --coverage # instead of putting that flag somewhere more appropriate # this and similar examples where a Fortran compiler # environment variable has been customized by CI or a user - # should perhaps eventually be more throughly tested and more + # should perhaps eventually be more thoroughly tested and more # robustly handled - fix = shlex.split(fix, posix=(os.name == 'posix')) if fix: + fix = _shell_utils.NativeParser.split(fix) fixflags = self.flag_vars.fix + f90flags oflags, aflags, dflags = [], [], [] diff --git a/numpy/distutils/fcompiler/absoft.py b/numpy/distutils/fcompiler/absoft.py index 2c3edfe02..d14fee0e1 100644 --- a/numpy/distutils/fcompiler/absoft.py +++ b/numpy/distutils/fcompiler/absoft.py @@ -66,7 +66,7 @@ class AbsoftFCompiler(FCompiler): def library_dir_option(self, dir): if os.name=='nt': - return ['-link', '/PATH:"%s"' % (dir)] + return ['-link', '/PATH:%s' % (dir)] return "-L" + dir def library_option(self, lib): diff --git a/numpy/distutils/fcompiler/compaq.py b/numpy/distutils/fcompiler/compaq.py index 07d502706..671b3a55f 100644 --- a/numpy/distutils/fcompiler/compaq.py +++ b/numpy/distutils/fcompiler/compaq.py @@ -95,7 +95,7 @@ class CompaqVisualFCompiler(FCompiler): raise e except ValueError: e = get_exception() - if not "path']" in str(e): + if not "'path'" in str(e): print("Unexpected ValueError in", __file__) raise e diff --git a/numpy/distutils/fcompiler/environment.py b/numpy/distutils/fcompiler/environment.py index 489784580..73a5e98e1 100644 --- a/numpy/distutils/fcompiler/environment.py +++ b/numpy/distutils/fcompiler/environment.py @@ -1,6 +1,7 @@ from __future__ import division, absolute_import, print_function import os +import warnings from distutils.dist import Distribution __metaclass__ = type @@ -50,20 +51,34 @@ class EnvironmentConfig(object): def _get_var(self, name, conf_desc): hook, envvar, confvar, convert, append = conf_desc + if convert is None: + convert = lambda x: x var = self._hook_handler(name, hook) if envvar is not None: envvar_contents = os.environ.get(envvar) if envvar_contents is not None: - if var and append and os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1': - var = var + [envvar_contents] + envvar_contents = convert(envvar_contents) + if var and append: + if os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1': + var.extend(envvar_contents) + else: + var = envvar_contents + if 'NPY_DISTUTILS_APPEND_FLAGS' not in os.environ.keys(): + msg = "{} is used as is, not appended ".format(envvar) + \ + "to flags already defined " + \ + "by numpy.distutils! Use NPY_DISTUTILS_APPEND_FLAGS=1 " + \ + "to obtain appending behavior instead (this " + \ + "behavior will become default in a future release)." + warnings.warn(msg, UserWarning, stacklevel=3) else: var = envvar_contents if confvar is not None and self._conf: - var = self._conf.get(confvar, (None, var))[1] - if convert is not None: - var = convert(var) + if confvar in self._conf: + source, confvar_contents = self._conf[confvar] + var = convert(confvar_contents) return var + def clone(self, hook_handler): ec = self.__class__(distutils_section=self._distutils_section, **self._conf_keys) diff --git a/numpy/distutils/fcompiler/gnu.py b/numpy/distutils/fcompiler/gnu.py index 81769e562..965c67041 100644 --- a/numpy/distutils/fcompiler/gnu.py +++ b/numpy/distutils/fcompiler/gnu.py @@ -269,8 +269,11 @@ class GnuFCompiler(FCompiler): # Linux/Solaris/Unix support RPATH, Windows and AIX do not raise NotImplementedError + # TODO: could use -Xlinker here, if it's supported + assert "," not in dir + sep = ',' if sys.platform == 'darwin' else '=' - return '-Wl,-rpath%s"%s"' % (sep, dir) + return '-Wl,-rpath%s%s' % (sep, dir) class Gnu95FCompiler(GnuFCompiler): diff --git a/numpy/distutils/fcompiler/ibm.py b/numpy/distutils/fcompiler/ibm.py index c4cb2fca7..70d2132e1 100644 --- a/numpy/distutils/fcompiler/ibm.py +++ b/numpy/distutils/fcompiler/ibm.py @@ -78,15 +78,14 @@ class IBMFCompiler(FCompiler): xlf_cfg = '/etc/opt/ibmcmp/xlf/%s/xlf.cfg' % version fo, new_cfg = make_temp_file(suffix='_xlf.cfg') log.info('Creating '+new_cfg) - fi = open(xlf_cfg, 'r') - crt1_match = re.compile(r'\s*crt\s*[=]\s*(?P<path>.*)/crt1.o').match - for line in fi: - m = crt1_match(line) - if m: - fo.write('crt = %s/bundle1.o\n' % (m.group('path'))) - else: - fo.write(line) - fi.close() + with open(xlf_cfg, 'r') as fi: + crt1_match = re.compile(r'\s*crt\s*[=]\s*(?P<path>.*)/crt1.o').match + for line in fi: + m = crt1_match(line) + if m: + fo.write('crt = %s/bundle1.o\n' % (m.group('path'))) + else: + fo.write(line) fo.close() opt.append('-F'+new_cfg) return opt diff --git a/numpy/distutils/fcompiler/intel.py b/numpy/distutils/fcompiler/intel.py index 217eac8fb..51f681274 100644 --- a/numpy/distutils/fcompiler/intel.py +++ b/numpy/distutils/fcompiler/intel.py @@ -23,7 +23,10 @@ class BaseIntelFCompiler(FCompiler): f + '.f', '-o', f + '.o'] def runtime_library_dir_option(self, dir): - return '-Wl,-rpath="%s"' % dir + # TODO: could use -Xlinker here, if it's supported + assert "," not in dir + + return '-Wl,-rpath=%s' % dir class IntelFCompiler(BaseIntelFCompiler): diff --git a/numpy/distutils/fcompiler/pg.py b/numpy/distutils/fcompiler/pg.py index 99071800a..9c51947fd 100644 --- a/numpy/distutils/fcompiler/pg.py +++ b/numpy/distutils/fcompiler/pg.py @@ -33,7 +33,7 @@ class PGroupFCompiler(FCompiler): 'compiler_f77': ["pgfortran"], 'compiler_fix': ["pgfortran", "-Mfixed"], 'compiler_f90': ["pgfortran"], - 'linker_so': ["pgfortran", "-shared", "-fpic"], + 'linker_so': ["pgfortran"], 'archiver': ["ar", "-cr"], 'ranlib': ["ranlib"] } @@ -56,8 +56,12 @@ class PGroupFCompiler(FCompiler): def get_flags_linker_so(self): return ["-dynamic", '-undefined', 'dynamic_lookup'] + else: + def get_flags_linker_so(self): + return ["-shared", '-fpic'] + def runtime_library_dir_option(self, dir): - return '-R"%s"' % dir + return '-R%s' % dir if sys.version_info >= (3, 5): diff --git a/numpy/distutils/fcompiler/sun.py b/numpy/distutils/fcompiler/sun.py index d477d3308..561ea854f 100644 --- a/numpy/distutils/fcompiler/sun.py +++ b/numpy/distutils/fcompiler/sun.py @@ -44,7 +44,7 @@ class SunFCompiler(FCompiler): return opt def runtime_library_dir_option(self, dir): - return '-R"%s"' % dir + return '-R%s' % dir if __name__ == '__main__': from distutils import log diff --git a/numpy/distutils/from_template.py b/numpy/distutils/from_template.py index 65c60c498..c5c1163c6 100644 --- a/numpy/distutils/from_template.py +++ b/numpy/distutils/from_template.py @@ -212,22 +212,21 @@ include_src_re = re.compile(r"(\n|\A)\s*include\s*['\"](?P<name>[\w\d./\\]+[.]sr def resolve_includes(source): d = os.path.dirname(source) - fid = open(source) - lines = [] - for line in fid: - m = include_src_re.match(line) - if m: - fn = m.group('name') - if not os.path.isabs(fn): - fn = os.path.join(d, fn) - if os.path.isfile(fn): - print('Including file', fn) - lines.extend(resolve_includes(fn)) + with open(source) as fid: + lines = [] + for line in fid: + m = include_src_re.match(line) + if m: + fn = m.group('name') + if not os.path.isabs(fn): + fn = os.path.join(d, fn) + if os.path.isfile(fn): + print('Including file', fn) + lines.extend(resolve_includes(fn)) + else: + lines.append(line) else: lines.append(line) - else: - lines.append(line) - fid.close() return lines def process_file(source): @@ -260,5 +259,6 @@ def main(): writestr = process_str(allstr) outfile.write(writestr) + if __name__ == "__main__": main() diff --git a/numpy/distutils/line_endings.py b/numpy/distutils/line_endings.py index 5ecb104ff..fe8fd1b0f 100644 --- a/numpy/distutils/line_endings.py +++ b/numpy/distutils/line_endings.py @@ -11,7 +11,8 @@ def dos2unix(file): print(file, "Directory!") return - data = open(file, "rb").read() + with open(file, "rb") as fp: + data = fp.read() if '\0' in data: print(file, "Binary!") return @@ -19,9 +20,8 @@ def dos2unix(file): newdata = re.sub("\r\n", "\n", data) if newdata != data: print('dos2unix:', file) - f = open(file, "wb") - f.write(newdata) - f.close() + with open(file, "wb") as f: + f.write(newdata) return file else: print(file, 'ok') @@ -45,7 +45,8 @@ def unix2dos(file): print(file, "Directory!") return - data = open(file, "rb").read() + with open(file, "rb") as fp: + data = fp.read() if '\0' in data: print(file, "Binary!") return @@ -53,9 +54,8 @@ def unix2dos(file): newdata = re.sub("\n", "\r\n", newdata) if newdata != data: print('unix2dos:', file) - f = open(file, "wb") - f.write(newdata) - f.close() + with open(file, "wb") as f: + f.write(newdata) return file else: print(file, 'ok') diff --git a/numpy/distutils/mingw32ccompiler.py b/numpy/distutils/mingw32ccompiler.py index e6bbe1996..075858cfe 100644 --- a/numpy/distutils/mingw32ccompiler.py +++ b/numpy/distutils/mingw32ccompiler.py @@ -13,6 +13,7 @@ import os import sys import subprocess import re +import textwrap # Overwrite certain distutils.ccompiler functions: import numpy.distutils.ccompiler @@ -29,7 +30,6 @@ else: import distutils.cygwinccompiler from distutils.version import StrictVersion -from numpy.distutils.ccompiler import gen_preprocess_options, gen_lib_options from distutils.unixccompiler import UnixCCompiler from distutils.msvccompiler import get_build_version as get_build_msvc_version from distutils.errors import (DistutilsExecError, CompileError, @@ -572,21 +572,21 @@ def msvc_manifest_xml(maj, min): # embedded in the binary... # This template was copied directly from the python 2.6 binary (using # strings.exe from mingw on python.exe). - template = """\ -<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> - <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> - <security> - <requestedPrivileges> - <requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel> - </requestedPrivileges> - </security> - </trustInfo> - <dependency> - <dependentAssembly> - <assemblyIdentity type="win32" name="Microsoft.VC%(maj)d%(min)d.CRT" version="%(fullver)s" processorArchitecture="*" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity> - </dependentAssembly> - </dependency> -</assembly>""" + template = textwrap.dedent("""\ + <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> + <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> + <security> + <requestedPrivileges> + <requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel> + </requestedPrivileges> + </security> + </trustInfo> + <dependency> + <dependentAssembly> + <assemblyIdentity type="win32" name="Microsoft.VC%(maj)d%(min)d.CRT" version="%(fullver)s" processorArchitecture="*" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity> + </dependentAssembly> + </dependency> + </assembly>""") return template % {'fullver': fullver, 'maj': maj, 'min': min} diff --git a/numpy/distutils/misc_util.py b/numpy/distutils/misc_util.py index 67a5f7234..89171eede 100644 --- a/numpy/distutils/misc_util.py +++ b/numpy/distutils/misc_util.py @@ -10,6 +10,7 @@ import tempfile import subprocess import shutil import multiprocessing +import textwrap import distutils from distutils.errors import DistutilsError @@ -218,15 +219,14 @@ def get_mathlibs(path=None): raise DistutilsError('_numpyconfig.h not found in numpy include ' 'dirs %r' % (dirs,)) - fid = open(config_file) - mathlibs = [] - s = '#define MATHLIB' - for line in fid: - if line.startswith(s): - value = line[len(s):].strip() - if value: - mathlibs.extend(value.split(',')) - fid.close() + with open(config_file) as fid: + mathlibs = [] + s = '#define MATHLIB' + for line in fid: + if line.startswith(s): + value = line[len(s):].strip() + if value: + mathlibs.extend(value.split(',')) return mathlibs def minrelpath(path): @@ -443,14 +443,13 @@ def _get_f90_modules(source): if not f90_ext_match(source): return [] modules = [] - f = open(source, 'r') - for line in f: - m = f90_module_name_match(line) - if m: - name = m.group('name') - modules.append(name) - # break # XXX can we assume that there is one module per file? - f.close() + with open(source, 'r') as f: + for line in f: + m = f90_module_name_match(line) + if m: + name = m.group('name') + modules.append(name) + # break # XXX can we assume that there is one module per file? return modules def is_string(s): @@ -473,7 +472,7 @@ def is_sequence(seq): return True def is_glob_pattern(s): - return is_string(s) and ('*' in s or '?' is s) + return is_string(s) and ('*' in s or '?' in s) def as_list(seq): if is_sequence(seq): @@ -1833,67 +1832,53 @@ class Configuration(object): def _get_svn_revision(self, path): """Return path's SVN revision number. """ - revision = None - m = None - cwd = os.getcwd() try: - os.chdir(path or '.') - p = subprocess.Popen(['svnversion'], shell=True, - stdout=subprocess.PIPE, stderr=None, - close_fds=True) - sout = p.stdout - m = re.match(r'(?P<revision>\d+)', sout.read()) - except Exception: + output = subprocess.check_output( + ['svnversion'], shell=True, cwd=path) + except (subprocess.CalledProcessError, OSError): pass - os.chdir(cwd) - if m: - revision = int(m.group('revision')) - return revision + else: + m = re.match(rb'(?P<revision>\d+)', output) + if m: + return int(m.group('revision')) + if sys.platform=='win32' and os.environ.get('SVN_ASP_DOT_NET_HACK', None): entries = njoin(path, '_svn', 'entries') else: entries = njoin(path, '.svn', 'entries') if os.path.isfile(entries): - f = open(entries) - fstr = f.read() - f.close() + with open(entries) as f: + fstr = f.read() if fstr[:5] == '<?xml': # pre 1.4 m = re.search(r'revision="(?P<revision>\d+)"', fstr) if m: - revision = int(m.group('revision')) + return int(m.group('revision')) else: # non-xml entries file --- check to be sure that m = re.search(r'dir[\n\r]+(?P<revision>\d+)', fstr) if m: - revision = int(m.group('revision')) - return revision + return int(m.group('revision')) + return None def _get_hg_revision(self, path): """Return path's Mercurial revision number. """ - revision = None - m = None - cwd = os.getcwd() try: - os.chdir(path or '.') - p = subprocess.Popen(['hg identify --num'], shell=True, - stdout=subprocess.PIPE, stderr=None, - close_fds=True) - sout = p.stdout - m = re.match(r'(?P<revision>\d+)', sout.read()) - except Exception: + output = subprocess.check_output( + ['hg identify --num'], shell=True, cwd=path) + except (subprocess.CalledProcessError, OSError): pass - os.chdir(cwd) - if m: - revision = int(m.group('revision')) - return revision + else: + m = re.match(rb'(?P<revision>\d+)', output) + if m: + return int(m.group('revision')) + branch_fn = njoin(path, '.hg', 'branch') branch_cache_fn = njoin(path, '.hg', 'branch.cache') if os.path.isfile(branch_fn): branch0 = None - f = open(branch_fn) - revision0 = f.read().strip() - f.close() + with open(branch_fn) as f: + revision0 = f.read().strip() branch_map = {} for line in file(branch_cache_fn, 'r'): @@ -1906,8 +1891,9 @@ class Configuration(object): continue branch_map[branch1] = revision1 - revision = branch_map.get(branch0) - return revision + return branch_map.get(branch0) + + return None def get_version(self, version_file=None, version_variable=None): @@ -2005,9 +1991,8 @@ class Configuration(object): if not os.path.isfile(target): version = str(revision) self.info('Creating %s (version=%r)' % (target, version)) - f = open(target, 'w') - f.write('version = %r\n' % (version)) - f.close() + with open(target, 'w') as f: + f.write('version = %r\n' % (version)) def rm_file(f=target,p=self.info): if delete: @@ -2046,9 +2031,8 @@ class Configuration(object): if not os.path.isfile(target): version = str(revision) self.info('Creating %s (version=%r)' % (target, version)) - f = open(target, 'w') - f.write('version = %r\n' % (version)) - f.close() + with open(target, 'w') as f: + f.write('version = %r\n' % (version)) def rm_file(f=target,p=self.info): if delete: @@ -2284,46 +2268,44 @@ def generate_config_py(target): from numpy.distutils.system_info import system_info from distutils.dir_util import mkpath mkpath(os.path.dirname(target)) - f = open(target, 'w') - f.write('# This file is generated by numpy\'s %s\n' % (os.path.basename(sys.argv[0]))) - f.write('# It contains system_info results at the time of building this package.\n') - f.write('__all__ = ["get_info","show"]\n\n') - - # For gfortran+msvc combination, extra shared libraries may exist - f.write(""" - -import os -import sys + with open(target, 'w') as f: + f.write('# This file is generated by numpy\'s %s\n' % (os.path.basename(sys.argv[0]))) + f.write('# It contains system_info results at the time of building this package.\n') + f.write('__all__ = ["get_info","show"]\n\n') + + # For gfortran+msvc combination, extra shared libraries may exist + f.write(textwrap.dedent(""" + import os + import sys + + extra_dll_dir = os.path.join(os.path.dirname(__file__), '.libs') + + if sys.platform == 'win32' and os.path.isdir(extra_dll_dir): + os.environ.setdefault('PATH', '') + os.environ['PATH'] += os.pathsep + extra_dll_dir + + """)) + + for k, i in system_info.saved_results.items(): + f.write('%s=%r\n' % (k, i)) + f.write(textwrap.dedent(r''' + def get_info(name): + g = globals() + return g.get(name, g.get(name + "_info", {})) + + def show(): + for name,info_dict in globals().items(): + if name[0] == "_" or type(info_dict) is not type({}): continue + print(name + ":") + if not info_dict: + print(" NOT AVAILABLE") + for k,v in info_dict.items(): + v = str(v) + if k == "sources" and len(v) > 200: + v = v[:60] + " ...\n... " + v[-60:] + print(" %s = %s" % (k,v)) + ''')) -extra_dll_dir = os.path.join(os.path.dirname(__file__), '.libs') - -if sys.platform == 'win32' and os.path.isdir(extra_dll_dir): - os.environ.setdefault('PATH', '') - os.environ['PATH'] += os.pathsep + extra_dll_dir - -""") - - for k, i in system_info.saved_results.items(): - f.write('%s=%r\n' % (k, i)) - f.write(r''' -def get_info(name): - g = globals() - return g.get(name, g.get(name + "_info", {})) - -def show(): - for name,info_dict in globals().items(): - if name[0] == "_" or type(info_dict) is not type({}): continue - print(name + ":") - if not info_dict: - print(" NOT AVAILABLE") - for k,v in info_dict.items(): - v = str(v) - if k == "sources" and len(v) > 200: - v = v[:60] + " ...\n... " + v[-60:] - print(" %s = %s" % (k,v)) - ''') - - f.close() return target def msvc_version(compiler): diff --git a/numpy/distutils/npy_pkg_config.py b/numpy/distutils/npy_pkg_config.py index bfe8b9f77..48584b4c4 100644 --- a/numpy/distutils/npy_pkg_config.py +++ b/numpy/distutils/npy_pkg_config.py @@ -426,7 +426,7 @@ if __name__ == '__main__': if options.define_variable: m = re.search(r'([\S]+)=([\S]+)', options.define_variable) if not m: - raise ValueError("--define-variable option should be of " \ + raise ValueError("--define-variable option should be of " "the form --define-variable=foo=bar") else: name = m.group(1) diff --git a/numpy/distutils/system_info.py b/numpy/distutils/system_info.py index cd63cc849..6cfce3b1c 100644 --- a/numpy/distutils/system_info.py +++ b/numpy/distutils/system_info.py @@ -17,6 +17,7 @@ classes are available: atlas_3_10_blas_threads_info, lapack_atlas_3_10_info lapack_atlas_3_10_threads_info + flame_info blas_info lapack_info openblas_info @@ -92,20 +93,20 @@ src_dirs = /usr/local/src:/opt/src search_static_first = 0 [fftw] -fftw_libs = rfftw, fftw -fftw_opt_libs = rfftw_threaded, fftw_threaded -# if the above aren't found, look for {s,d}fftw_libs and {s,d}fftw_opt_libs +libraries = rfftw, fftw [atlas] library_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas # for overriding the names of the atlas libraries -atlas_libs = lapack, f77blas, cblas, atlas +libraries = lapack, f77blas, cblas, atlas [x11] library_dirs = /usr/X11R6/lib include_dirs = /usr/X11R6/include ---------- +Note that the ``libraries`` key is the default setting for libraries. + Authors: Pearu Peterson <pearu@cens.ioc.ee>, February 2002 David M. Cooke <cookedm@physics.mcmaster.ca>, April 2002 @@ -126,6 +127,9 @@ import os import re import copy import warnings +import subprocess +import textwrap + from glob import glob from functools import reduce if sys.version_info[0] < 3: @@ -153,6 +157,7 @@ from numpy.distutils.misc_util import (is_sequence, is_string, from numpy.distutils.command.config import config as cmd_config from numpy.distutils.compat import get_exception from numpy.distutils import customized_ccompiler +from numpy.distutils import _shell_utils import distutils.ccompiler import tempfile import shutil @@ -164,6 +169,17 @@ _bits = {'32bit': 32, '64bit': 64} platform_bits = _bits[platform.architecture()[0]] +def _c_string_literal(s): + """ + Convert a python string into a literal suitable for inclusion into C code + """ + # only these three characters are forbidden in C strings + s = s.replace('\\', r'\\') + s = s.replace('"', r'\"') + s = s.replace('\n', r'\n') + return '"{}"'.format(s) + + def libpaths(paths, bits): """Return a list of library paths valid on 32 or 64 bit systems. @@ -286,27 +302,21 @@ else: default_x11_include_dirs.extend(['/usr/lib/X11/include', '/usr/include/X11']) - import subprocess as sp - tmp = None - try: - # Explicitly open/close file to avoid ResourceWarning when - # tests are run in debug mode Python 3. - tmp = open(os.devnull, 'w') - p = sp.Popen(["gcc", "-print-multiarch"], stdout=sp.PIPE, - stderr=tmp) - except (OSError, DistutilsError): - # OSError if gcc is not installed, or SandboxViolation (DistutilsError - # subclass) if an old setuptools bug is triggered (see gh-3160). - pass - else: - triplet = str(p.communicate()[0].decode().strip()) - if p.returncode == 0: - # gcc supports the "-print-multiarch" option - default_x11_lib_dirs += [os.path.join("/usr/lib/", triplet)] - default_lib_dirs += [os.path.join("/usr/lib/", triplet)] - finally: - if tmp is not None: - tmp.close() + with open(os.devnull, 'w') as tmp: + try: + p = subprocess.Popen(["gcc", "-print-multiarch"], stdout=subprocess.PIPE, + stderr=tmp) + except (OSError, DistutilsError): + # OSError if gcc is not installed, or SandboxViolation (DistutilsError + # subclass) if an old setuptools bug is triggered (see gh-3160). + pass + else: + triplet = str(p.communicate()[0].decode().strip()) + if p.returncode == 0: + # gcc supports the "-print-multiarch" option + default_x11_lib_dirs += [os.path.join("/usr/lib/", triplet)] + default_lib_dirs += [os.path.join("/usr/lib/", triplet)] + if os.path.join(sys.prefix, 'lib') not in default_lib_dirs: default_lib_dirs.insert(0, os.path.join(sys.prefix, 'lib')) @@ -376,6 +386,7 @@ def get_info(name, notfound_action=0): 'atlas_3_10_blas_threads': atlas_3_10_blas_threads_info, 'lapack_atlas_3_10': lapack_atlas_3_10_info, # use lapack_opt instead 'lapack_atlas_3_10_threads': lapack_atlas_3_10_threads_info, # ditto + 'flame': flame_info, # use lapack_opt instead 'mkl': mkl_info, # openblas which may or may not have embedded lapack 'openblas': openblas_info, # use blas_opt instead @@ -437,14 +448,27 @@ class NotFoundError(DistutilsError): """Some third-party program or library is not found.""" +class AliasedOptionError(DistutilsError): + """ + Aliases entries in config files should not be existing. + In section '{section}' we found multiple appearances of options {options}.""" + + class AtlasNotFoundError(NotFoundError): """ - Atlas (http://math-atlas.sourceforge.net/) libraries not found. + Atlas (http://github.com/math-atlas/math-atlas) libraries not found. Directories to search for the libraries can be specified in the numpy/distutils/site.cfg file (section [atlas]) or by setting the ATLAS environment variable.""" +class FlameNotFoundError(NotFoundError): + """ + FLAME (http://www.cs.utexas.edu/~flame/web/) libraries not found. + Directories to search for the libraries can be specified in the + numpy/distutils/site.cfg file (section [flame]).""" + + class LapackNotFoundError(NotFoundError): """ Lapack (http://www.netlib.org/lapack/) libraries not found. @@ -461,6 +485,13 @@ class LapackSrcNotFoundError(LapackNotFoundError): the LAPACK_SRC environment variable.""" +class BlasOptNotFoundError(NotFoundError): + """ + Optimized (vendor) Blas libraries are not found. + Falls back to netlib Blas library which has worse performance. + A better performance should be easily gained by switching + Blas library.""" + class BlasNotFoundError(NotFoundError): """ Blas (http://www.netlib.org/blas/) libraries not found. @@ -595,6 +626,39 @@ class system_info(object): dict_append(info, **extra_info) self.saved_results[self.__class__.__name__] = info + def get_option_single(self, *options): + """ Ensure that only one of `options` are found in the section + + Parameters + ---------- + *options : list of str + a list of options to be found in the section (``self.section``) + + Returns + ------- + str : + the option that is uniquely found in the section + + Raises + ------ + AliasedOptionError : + in case more than one of the options are found + """ + found = map(lambda opt: self.cp.has_option(self.section, opt), options) + found = list(found) + if sum(found) == 1: + return options[found.index(True)] + elif sum(found) == 0: + # nothing is found anyways + return options[0] + + # Else we have more than 1 key found + if AliasedOptionError.__doc__ is None: + raise AliasedOptionError() + raise AliasedOptionError(AliasedOptionError.__doc__.format( + section=self.section, options='[{}]'.format(', '.join(options)))) + + def has_info(self): return self.__class__.__name__ in self.saved_results @@ -608,8 +672,9 @@ class system_info(object): for key in ['extra_compile_args', 'extra_link_args']: # Get values opt = self.cp.get(self.section, key) + opt = _shell_utils.NativeParser.split(opt) if opt: - tmp = {key : [opt]} + tmp = {key: opt} dict_append(info, **tmp) return info @@ -884,7 +949,9 @@ class fftw_info(system_info): """Returns True on successful version detection, else False""" lib_dirs = self.get_lib_dirs() incl_dirs = self.get_include_dirs() - libs = self.get_libs(self.section + '_libs', ver_param['libs']) + + opt = self.get_option_single(self.section + '_libs', 'libraries') + libs = self.get_libs(opt, ver_param['libs']) info = self.check_libs(lib_dirs, libs) if info is not None: flag = 0 @@ -893,7 +960,6 @@ class fftw_info(system_info): == len(ver_param['includes']): dict_append(info, include_dirs=[d]) flag = 1 - incl_dirs = [d] break if flag: dict_append(info, define_macros=ver_param['macros']) @@ -1045,9 +1111,9 @@ class mkl_info(system_info): for d in paths: dirs = glob(os.path.join(d, 'mkl', '*')) dirs += glob(os.path.join(d, 'mkl*')) - for d in dirs: - if os.path.isdir(os.path.join(d, 'lib')): - return d + for sub_dir in dirs: + if os.path.isdir(os.path.join(sub_dir, 'lib')): + return sub_dir return None def __init__(self): @@ -1070,7 +1136,8 @@ class mkl_info(system_info): def calc_info(self): lib_dirs = self.get_lib_dirs() incl_dirs = self.get_include_dirs() - mkl_libs = self.get_libs('mkl_libs', self._lib_mkl) + opt = self.get_option_single('mkl_libs', 'libraries') + mkl_libs = self.get_libs(opt, self._lib_mkl) info = self.check_libs2(lib_dirs, mkl_libs) if info is None: return @@ -1117,8 +1184,8 @@ class atlas_info(system_info): def calc_info(self): lib_dirs = self.get_lib_dirs() info = {} - atlas_libs = self.get_libs('atlas_libs', - self._lib_names + self._lib_atlas) + opt = self.get_option_single('atlas_libs', 'libraries') + atlas_libs = self.get_libs(opt, self._lib_names + self._lib_atlas) lapack_libs = self.get_libs('lapack_libs', self._lib_lapack) atlas = None lapack = None @@ -1158,11 +1225,11 @@ class atlas_info(system_info): else: dict_append(info, **atlas) dict_append(info, define_macros=[('ATLAS_WITHOUT_LAPACK', None)]) - message = """ -********************************************************************* - Could not find lapack library within the ATLAS installation. -********************************************************************* -""" + message = textwrap.dedent(""" + ********************************************************************* + Could not find lapack library within the ATLAS installation. + ********************************************************************* + """) warnings.warn(message, stacklevel=2) self.set_info(**info) return @@ -1185,15 +1252,15 @@ class atlas_info(system_info): if lapack_lib is not None: sz = os.stat(lapack_lib)[6] if sz <= 4000 * 1024: - message = """ -********************************************************************* - Lapack library (from ATLAS) is probably incomplete: - size of %s is %sk (expected >4000k) - - Follow the instructions in the KNOWN PROBLEMS section of the file - numpy/INSTALL.txt. -********************************************************************* -""" % (lapack_lib, sz / 1024) + message = textwrap.dedent(""" + ********************************************************************* + Lapack library (from ATLAS) is probably incomplete: + size of %s is %sk (expected >4000k) + + Follow the instructions in the KNOWN PROBLEMS section of the file + numpy/INSTALL.txt. + ********************************************************************* + """) % (lapack_lib, sz / 1024) warnings.warn(message, stacklevel=2) else: info['language'] = 'f77' @@ -1210,8 +1277,8 @@ class atlas_blas_info(atlas_info): def calc_info(self): lib_dirs = self.get_lib_dirs() info = {} - atlas_libs = self.get_libs('atlas_libs', - self._lib_names + self._lib_atlas) + opt = self.get_option_single('atlas_libs', 'libraries') + atlas_libs = self.get_libs(opt, self._lib_names + self._lib_atlas) atlas = self.check_libs2(lib_dirs, atlas_libs, []) if atlas is None: return @@ -1263,8 +1330,8 @@ class atlas_3_10_blas_info(atlas_3_10_info): def calc_info(self): lib_dirs = self.get_lib_dirs() info = {} - atlas_libs = self.get_libs('atlas_libs', - self._lib_names) + opt = self.get_option_single('atlas_lib', 'libraries') + atlas_libs = self.get_libs(opt, self._lib_names) atlas = self.check_libs2(lib_dirs, atlas_libs, []) if atlas is None: return @@ -1315,7 +1382,8 @@ class lapack_info(system_info): def calc_info(self): lib_dirs = self.get_lib_dirs() - lapack_libs = self.get_libs('lapack_libs', self._lib_names) + opt = self.get_option_single('lapack_libs', 'libraries') + lapack_libs = self.get_libs(opt, self._lib_names) info = self.check_libs(lib_dirs, lapack_libs, []) if info is None: return @@ -1470,16 +1538,16 @@ def get_atlas_version(**config): library_dirs=library_dirs, use_tee=(system_info.verbosity > 0)) if not s: - warnings.warn(""" -***************************************************** -Linkage with ATLAS requires gfortran. Use + warnings.warn(textwrap.dedent(""" + ***************************************************** + Linkage with ATLAS requires gfortran. Use - python setup.py config_fc --fcompiler=gnu95 ... + python setup.py config_fc --fcompiler=gnu95 ... -when building extension libraries that use ATLAS. -Make sure that -lgfortran is used for C++ extensions. -***************************************************** -""", stacklevel=2) + when building extension libraries that use ATLAS. + Make sure that -lgfortran is used for C++ extensions. + ***************************************************** + """), stacklevel=2) dict_append(info, language='f90', define_macros=[('ATLAS_REQUIRES_GFORTRAN', None)]) except Exception: # failed to get version from file -- maybe on Windows @@ -1497,7 +1565,7 @@ Make sure that -lgfortran is used for C++ extensions. atlas_version = os.environ.get('ATLAS_VERSION', None) if atlas_version: dict_append(info, define_macros=[( - 'ATLAS_INFO', '"\\"%s\\""' % atlas_version) + 'ATLAS_INFO', _c_string_literal(atlas_version)) ]) else: dict_append(info, define_macros=[('NO_ATLAS_INFO', -1)]) @@ -1518,7 +1586,7 @@ Make sure that -lgfortran is used for C++ extensions. dict_append(info, define_macros=[('NO_ATLAS_INFO', -2)]) else: dict_append(info, define_macros=[( - 'ATLAS_INFO', '"\\"%s\\""' % atlas_version) + 'ATLAS_INFO', _c_string_literal(atlas_version)) ]) result = _cached_atlas_version[key] = atlas_version, info return result @@ -1527,139 +1595,226 @@ Make sure that -lgfortran is used for C++ extensions. class lapack_opt_info(system_info): notfounderror = LapackNotFoundError + # Default order of LAPACK checks + lapack_order = ['mkl', 'openblas', 'flame', 'atlas', 'accelerate', 'lapack'] - def calc_info(self): + def _calc_info_mkl(self): + info = get_info('lapack_mkl') + if info: + self.set_info(**info) + return True + return False - lapack_mkl_info = get_info('lapack_mkl') - if lapack_mkl_info: - self.set_info(**lapack_mkl_info) - return + def _calc_info_openblas(self): + info = get_info('openblas_lapack') + if info: + self.set_info(**info) + return True + info = get_info('openblas_clapack') + if info: + self.set_info(**info) + return True + return False - openblas_info = get_info('openblas_lapack') - if openblas_info: - self.set_info(**openblas_info) - return + def _calc_info_flame(self): + info = get_info('flame') + if info: + self.set_info(**info) + return True + return False - openblas_info = get_info('openblas_clapack') - if openblas_info: - self.set_info(**openblas_info) - return + def _calc_info_atlas(self): + info = get_info('atlas_3_10_threads') + if not info: + info = get_info('atlas_3_10') + if not info: + info = get_info('atlas_threads') + if not info: + info = get_info('atlas') + if info: + # Figure out if ATLAS has lapack... + # If not we need the lapack library, but not BLAS! + l = info.get('define_macros', []) + if ('ATLAS_WITH_LAPACK_ATLAS', None) in l \ + or ('ATLAS_WITHOUT_LAPACK', None) in l: + # Get LAPACK (with possible warnings) + # If not found we don't accept anything + # since we can't use ATLAS with LAPACK! + lapack_info = self._get_info_lapack() + if not lapack_info: + return False + dict_append(info, **lapack_info) + self.set_info(**info) + return True + return False - atlas_info = get_info('atlas_3_10_threads') - if not atlas_info: - atlas_info = get_info('atlas_3_10') - if not atlas_info: - atlas_info = get_info('atlas_threads') - if not atlas_info: - atlas_info = get_info('atlas') - - accelerate_info = get_info('accelerate') - if accelerate_info and not atlas_info: - self.set_info(**accelerate_info) - return + def _calc_info_accelerate(self): + info = get_info('accelerate') + if info: + self.set_info(**info) + return True + return False - need_lapack = 0 - need_blas = 0 - info = {} - if atlas_info: - l = atlas_info.get('define_macros', []) - if ('ATLAS_WITH_LAPACK_ATLAS', None) in l \ - or ('ATLAS_WITHOUT_LAPACK', None) in l: - need_lapack = 1 - info = atlas_info + def _get_info_blas(self): + # Default to get the optimized BLAS implementation + info = get_info('blas_opt') + if not info: + warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=3) + info_src = get_info('blas_src') + if not info_src: + warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=3) + return {} + dict_append(info, libraries=[('fblas_src', info_src)]) + return info - else: - warnings.warn(AtlasNotFoundError.__doc__, stacklevel=2) - need_blas = 1 - need_lapack = 1 + def _get_info_lapack(self): + info = get_info('lapack') + if not info: + warnings.warn(LapackNotFoundError.__doc__ or '', stacklevel=3) + info_src = get_info('lapack_src') + if not info_src: + warnings.warn(LapackSrcNotFoundError.__doc__ or '', stacklevel=3) + return {} + dict_append(info, libraries=[('flapack_src', info_src)]) + return info + + def _calc_info_lapack(self): + info = self._get_info_lapack() + if info: + info_blas = self._get_info_blas() + dict_append(info, **info_blas) dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)]) + self.set_info(**info) + return True + return False - if need_lapack: - lapack_info = get_info('lapack') - #lapack_info = {} ## uncomment for testing - if lapack_info: - dict_append(info, **lapack_info) - else: - warnings.warn(LapackNotFoundError.__doc__, stacklevel=2) - lapack_src_info = get_info('lapack_src') - if not lapack_src_info: - warnings.warn(LapackSrcNotFoundError.__doc__, stacklevel=2) - return - dict_append(info, libraries=[('flapack_src', lapack_src_info)]) - - if need_blas: - blas_info = get_info('blas') - if blas_info: - dict_append(info, **blas_info) - else: - warnings.warn(BlasNotFoundError.__doc__, stacklevel=2) - blas_src_info = get_info('blas_src') - if not blas_src_info: - warnings.warn(BlasSrcNotFoundError.__doc__, stacklevel=2) - return - dict_append(info, libraries=[('fblas_src', blas_src_info)]) + def calc_info(self): + user_order = os.environ.get('NPY_LAPACK_ORDER', None) + if user_order is None: + lapack_order = self.lapack_order + else: + # the user has requested the order of the + # check they are all in the available list, a COMMA SEPARATED list + user_order = user_order.lower().split(',') + non_existing = [] + lapack_order = [] + for order in user_order: + if order in self.lapack_order: + lapack_order.append(order) + elif len(order) > 0: + non_existing.append(order) + if len(non_existing) > 0: + raise ValueError("lapack_opt_info user defined " + "LAPACK order has unacceptable " + "values: {}".format(non_existing)) + + for lapack in lapack_order: + if getattr(self, '_calc_info_{}'.format(lapack))(): + return - self.set_info(**info) - return + if 'lapack' not in lapack_order: + # Since the user may request *not* to use any library, we still need + # to raise warnings to signal missing packages! + warnings.warn(LapackNotFoundError.__doc__ or '', stacklevel=2) + warnings.warn(LapackSrcNotFoundError.__doc__ or '', stacklevel=2) class blas_opt_info(system_info): notfounderror = BlasNotFoundError + # Default order of BLAS checks + blas_order = ['mkl', 'blis', 'openblas', 'atlas', 'accelerate', 'blas'] - def calc_info(self): + def _calc_info_mkl(self): + info = get_info('blas_mkl') + if info: + self.set_info(**info) + return True + return False - blas_mkl_info = get_info('blas_mkl') - if blas_mkl_info: - self.set_info(**blas_mkl_info) - return + def _calc_info_blis(self): + info = get_info('blis') + if info: + self.set_info(**info) + return True + return False - blis_info = get_info('blis') - if blis_info: - self.set_info(**blis_info) - return + def _calc_info_openblas(self): + info = get_info('openblas') + if info: + self.set_info(**info) + return True + return False - openblas_info = get_info('openblas') - if openblas_info: - self.set_info(**openblas_info) - return + def _calc_info_atlas(self): + info = get_info('atlas_3_10_blas_threads') + if not info: + info = get_info('atlas_3_10_blas') + if not info: + info = get_info('atlas_blas_threads') + if not info: + info = get_info('atlas_blas') + if info: + self.set_info(**info) + return True + return False - atlas_info = get_info('atlas_3_10_blas_threads') - if not atlas_info: - atlas_info = get_info('atlas_3_10_blas') - if not atlas_info: - atlas_info = get_info('atlas_blas_threads') - if not atlas_info: - atlas_info = get_info('atlas_blas') - - accelerate_info = get_info('accelerate') - if accelerate_info and not atlas_info: - self.set_info(**accelerate_info) - return + def _calc_info_accelerate(self): + info = get_info('accelerate') + if info: + self.set_info(**info) + return True + return False - need_blas = 0 + def _calc_info_blas(self): + # Warn about a non-optimized BLAS library + warnings.warn(BlasOptNotFoundError.__doc__ or '', stacklevel=3) info = {} - if atlas_info: - info = atlas_info + dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)]) + + blas = get_info('blas') + if blas: + dict_append(info, **blas) else: - warnings.warn(AtlasNotFoundError.__doc__, stacklevel=2) - need_blas = 1 - dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)]) + # Not even BLAS was found! + warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=3) - if need_blas: - blas_info = get_info('blas') - if blas_info: - dict_append(info, **blas_info) - else: - warnings.warn(BlasNotFoundError.__doc__, stacklevel=2) - blas_src_info = get_info('blas_src') - if not blas_src_info: - warnings.warn(BlasSrcNotFoundError.__doc__, stacklevel=2) - return - dict_append(info, libraries=[('fblas_src', blas_src_info)]) + blas_src = get_info('blas_src') + if not blas_src: + warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=3) + return False + dict_append(info, libraries=[('fblas_src', blas_src)]) self.set_info(**info) - return + return True + + def calc_info(self): + user_order = os.environ.get('NPY_BLAS_ORDER', None) + if user_order is None: + blas_order = self.blas_order + else: + # the user has requested the order of the + # check they are all in the available list + user_order = user_order.lower().split(',') + non_existing = [] + blas_order = [] + for order in user_order: + if order in self.blas_order: + blas_order.append(order) + elif len(order) > 0: + non_existing.append(order) + if len(non_existing) > 0: + raise ValueError("blas_opt_info user defined BLAS order has unacceptable values: {}".format(non_existing)) + + for blas in blas_order: + if getattr(self, '_calc_info_{}'.format(blas))(): + return + + if 'blas' not in blas_order: + # Since the user may request *not* to use any library, we still need + # to raise warnings to signal missing packages! + warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=2) + warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=2) class blas_info(system_info): @@ -1670,39 +1825,64 @@ class blas_info(system_info): def calc_info(self): lib_dirs = self.get_lib_dirs() - blas_libs = self.get_libs('blas_libs', self._lib_names) + opt = self.get_option_single('blas_libs', 'libraries') + blas_libs = self.get_libs(opt, self._lib_names) info = self.check_libs(lib_dirs, blas_libs, []) if info is None: return else: info['include_dirs'] = self.get_include_dirs() if platform.system() == 'Windows': - # The check for windows is needed because has_cblas uses the + # The check for windows is needed because get_cblas_libs uses the # same compiler that was used to compile Python and msvc is # often not installed when mingw is being used. This rough # treatment is not desirable, but windows is tricky. info['language'] = 'f77' # XXX: is it generally true? else: - lib = self.has_cblas(info) + lib = self.get_cblas_libs(info) if lib is not None: info['language'] = 'c' - info['libraries'] = [lib] + info['libraries'] = lib info['define_macros'] = [('HAVE_CBLAS', None)] self.set_info(**info) - def has_cblas(self, info): + def get_cblas_libs(self, info): + """ Check whether we can link with CBLAS interface + + This method will search through several combinations of libraries + to check whether CBLAS is present: + + 1. Libraries in ``info['libraries']``, as is + 2. As 1. but also explicitly adding ``'cblas'`` as a library + 3. As 1. but also explicitly adding ``'blas'`` as a library + 4. Check only library ``'cblas'`` + 5. Check only library ``'blas'`` + + Parameters + ---------- + info : dict + system information dictionary for compilation and linking + + Returns + ------- + libraries : list of str or None + a list of libraries that enables the use of CBLAS interface. + Returns None if not found or a compilation error occurs. + + Since 1.17 returns a list. + """ # primitive cblas check by looking for the header and trying to link # cblas or blas - res = False c = customized_ccompiler() tmpdir = tempfile.mkdtemp() - s = """#include <cblas.h> - int main(int argc, const char *argv[]) - { - double a[4] = {1,2,3,4}; - double b[4] = {5,6,7,8}; - return cblas_ddot(4, a, 1, b, 1) > 10; - }""" + s = textwrap.dedent("""\ + #include <cblas.h> + int main(int argc, const char *argv[]) + { + double a[4] = {1,2,3,4}; + double b[4] = {5,6,7,8}; + return cblas_ddot(4, a, 1, b, 1) > 10; + }""") src = os.path.join(tmpdir, 'source.c') try: with open(src, 'wt') as f: @@ -1712,27 +1892,24 @@ class blas_info(system_info): # check we can compile (find headers) obj = c.compile([src], output_dir=tmpdir, include_dirs=self.get_include_dirs()) + except (distutils.ccompiler.CompileError, distutils.ccompiler.LinkError): + return None - # check we can link (find library) - # some systems have separate cblas and blas libs. First - # check for cblas lib, and if not present check for blas lib. + # check we can link (find library) + # some systems have separate cblas and blas libs. + for libs in [info['libraries'], ['cblas'] + info['libraries'], + ['blas'] + info['libraries'], ['cblas'], ['blas']]: try: c.link_executable(obj, os.path.join(tmpdir, "a.out"), - libraries=["cblas"], + libraries=libs, library_dirs=info['library_dirs'], extra_postargs=info.get('extra_link_args', [])) - res = "cblas" + return libs except distutils.ccompiler.LinkError: - c.link_executable(obj, os.path.join(tmpdir, "a.out"), - libraries=["blas"], - library_dirs=info['library_dirs'], - extra_postargs=info.get('extra_link_args', [])) - res = "blas" - except distutils.ccompiler.CompileError: - res = None + pass finally: shutil.rmtree(tmpdir) - return res + return None class openblas_info(blas_info): @@ -1749,9 +1926,9 @@ class openblas_info(blas_info): lib_dirs = self.get_lib_dirs() - openblas_libs = self.get_libs('libraries', self._lib_names) - if openblas_libs == self._lib_names: # backward compat with 1.8.0 - openblas_libs = self.get_libs('openblas_libs', self._lib_names) + # Prefer to use libraries over openblas_libs + opt = self.get_option_single('openblas_libs', 'libraries') + openblas_libs = self.get_libs(opt, self._lib_names) info = self.check_libs(lib_dirs, openblas_libs, []) @@ -1822,12 +1999,13 @@ class openblas_lapack_info(openblas_info): c = customized_ccompiler() tmpdir = tempfile.mkdtemp() - s = """void zungqr_(); - int main(int argc, const char *argv[]) - { - zungqr_(); - return 0; - }""" + s = textwrap.dedent("""\ + void zungqr_(); + int main(int argc, const char *argv[]) + { + zungqr_(); + return 0; + }""") src = os.path.join(tmpdir, 'source.c') out = os.path.join(tmpdir, 'a.out') # Add the additional "extra" arguments @@ -1863,10 +2041,8 @@ class blis_info(blas_info): def calc_info(self): lib_dirs = self.get_lib_dirs() - blis_libs = self.get_libs('libraries', self._lib_names) - if blis_libs == self._lib_names: - blis_libs = self.get_libs('blis_libs', self._lib_names) - + opt = self.get_option_single('blis_libs', 'libraries') + blis_libs = self.get_libs(opt, self._lib_names) info = self.check_libs2(lib_dirs, blis_libs, []) if info is None: return @@ -1879,8 +2055,86 @@ class blis_info(blas_info): include_dirs=incl_dirs) self.set_info(**info) + +class flame_info(system_info): + """ Usage of libflame for LAPACK operations + + This requires libflame to be compiled with lapack wrappers: + + ./configure --enable-lapack2flame ... + + Be aware that libflame 5.1.0 has some missing names in the shared library, so + if you have problems, try the static flame library. + """ + section = 'flame' + _lib_names = ['flame'] + notfounderror = FlameNotFoundError + + def check_embedded_lapack(self, info): + """ libflame does not necessarily have a wrapper for fortran LAPACK, we need to check """ + c = customized_ccompiler() + + tmpdir = tempfile.mkdtemp() + s = textwrap.dedent("""\ + void zungqr_(); + int main(int argc, const char *argv[]) + { + zungqr_(); + return 0; + }""") + src = os.path.join(tmpdir, 'source.c') + out = os.path.join(tmpdir, 'a.out') + # Add the additional "extra" arguments + extra_args = info.get('extra_link_args', []) + try: + with open(src, 'wt') as f: + f.write(s) + obj = c.compile([src], output_dir=tmpdir) + try: + c.link_executable(obj, out, libraries=info['libraries'], + library_dirs=info['library_dirs'], + extra_postargs=extra_args) + return True + except distutils.ccompiler.LinkError: + return False + finally: + shutil.rmtree(tmpdir) + + def calc_info(self): + lib_dirs = self.get_lib_dirs() + flame_libs = self.get_libs('libraries', self._lib_names) + + info = self.check_libs2(lib_dirs, flame_libs, []) + if info is None: + return + + if self.check_embedded_lapack(info): + # check if the user has supplied all information required + self.set_info(**info) + else: + # Try and get the BLAS lib to see if we can get it to work + blas_info = get_info('blas_opt') + if not blas_info: + # since we already failed once, this ain't going to work either + return + + # Now we need to merge the two dictionaries + for key in blas_info: + if isinstance(blas_info[key], list): + info[key] = info.get(key, []) + blas_info[key] + elif isinstance(blas_info[key], tuple): + info[key] = info.get(key, ()) + blas_info[key] + else: + info[key] = info.get(key, '') + blas_info[key] + + # Now check again + if self.check_embedded_lapack(info): + self.set_info(**info) + + class accelerate_info(system_info): section = 'accelerate' + _lib_names = ['accelerate', 'veclib'] notfounderror = BlasNotFoundError def calc_info(self): @@ -1889,7 +2143,7 @@ class accelerate_info(system_info): if libraries: libraries = [libraries] else: - libraries = self.get_libs('libraries', ['accelerate', 'veclib']) + libraries = self.get_libs('libraries', self._lib_names) libraries = [lib.strip().lower() for lib in libraries] if (sys.platform == 'darwin' and @@ -1987,6 +2241,7 @@ class blas_src_info(system_info): class x11_info(system_info): section = 'x11' notfounderror = X11NotFoundError + _lib_names = ['X11'] def __init__(self): system_info.__init__(self, @@ -1998,7 +2253,8 @@ class x11_info(system_info): return lib_dirs = self.get_lib_dirs() include_dirs = self.get_include_dirs() - x11_libs = self.get_libs('x11_libs', ['X11']) + opt = self.get_option_single('x11_libs', 'libraries') + x11_libs = self.get_libs(opt, self._lib_names) info = self.check_libs(lib_dirs, x11_libs, []) if info is None: return @@ -2063,7 +2319,7 @@ class _numpy_info(system_info): if vrs is None: continue macros = [(self.modulename.upper() + '_VERSION', - '"\\"%s\\""' % (vrs)), + _c_string_literal(vrs)), (self.modulename.upper(), None)] break dict_append(info, define_macros=macros) @@ -2268,7 +2524,7 @@ class _pkg_config_info(system_info): version = self.get_config_output(config_exe, self.version_flag) if version: macros.append((self.__class__.__name__.split('.')[-1].upper(), - '"\\"%s\\""' % (version))) + _c_string_literal(version))) if self.version_macro_name: macros.append((self.version_macro_name + '_%s' % (version.replace('.', '_')), None)) @@ -2389,7 +2645,8 @@ class amd_info(system_info): def calc_info(self): lib_dirs = self.get_lib_dirs() - amd_libs = self.get_libs('amd_libs', self._lib_names) + opt = self.get_option_single('amd_libs', 'libraries') + amd_libs = self.get_libs(opt, self._lib_names) info = self.check_libs(lib_dirs, amd_libs, []) if info is None: return @@ -2420,7 +2677,8 @@ class umfpack_info(system_info): def calc_info(self): lib_dirs = self.get_lib_dirs() - umfpack_libs = self.get_libs('umfpack_libs', self._lib_names) + opt = self.get_option_single('umfpack_libs', 'libraries') + umfpack_libs = self.get_libs(opt, self._lib_names) info = self.check_libs(lib_dirs, umfpack_libs, []) if info is None: return diff --git a/numpy/distutils/tests/test_exec_command.py b/numpy/distutils/tests/test_exec_command.py index 8bd265007..37912f5ba 100644 --- a/numpy/distutils/tests/test_exec_command.py +++ b/numpy/distutils/tests/test_exec_command.py @@ -6,7 +6,7 @@ from tempfile import TemporaryFile from numpy.distutils import exec_command from numpy.distutils.exec_command import get_pythonexe -from numpy.testing import tempdir, assert_ +from numpy.testing import tempdir, assert_, assert_warns # In python 3 stdout, stderr are text (unicode compliant) devices, so to # emulate them import StringIO from the io module. @@ -71,27 +71,31 @@ def test_exec_command_stdout(): # Test posix version: with redirect_stdout(StringIO()): with redirect_stderr(TemporaryFile()): - exec_command.exec_command("cd '.'") + with assert_warns(DeprecationWarning): + exec_command.exec_command("cd '.'") if os.name == 'posix': # Test general (non-posix) version: with emulate_nonposix(): with redirect_stdout(StringIO()): with redirect_stderr(TemporaryFile()): - exec_command.exec_command("cd '.'") + with assert_warns(DeprecationWarning): + exec_command.exec_command("cd '.'") def test_exec_command_stderr(): # Test posix version: with redirect_stdout(TemporaryFile(mode='w+')): with redirect_stderr(StringIO()): - exec_command.exec_command("cd '.'") + with assert_warns(DeprecationWarning): + exec_command.exec_command("cd '.'") if os.name == 'posix': # Test general (non-posix) version: with emulate_nonposix(): with redirect_stdout(TemporaryFile()): with redirect_stderr(StringIO()): - exec_command.exec_command("cd '.'") + with assert_warns(DeprecationWarning): + exec_command.exec_command("cd '.'") class TestExecCommand(object): @@ -205,11 +209,12 @@ class TestExecCommand(object): def test_basic(self): with redirect_stdout(StringIO()): with redirect_stderr(StringIO()): - if os.name == "posix": - self.check_posix(use_tee=0) - self.check_posix(use_tee=1) - elif os.name == "nt": - self.check_nt(use_tee=0) - self.check_nt(use_tee=1) - self.check_execute_in(use_tee=0) - self.check_execute_in(use_tee=1) + with assert_warns(DeprecationWarning): + if os.name == "posix": + self.check_posix(use_tee=0) + self.check_posix(use_tee=1) + elif os.name == "nt": + self.check_nt(use_tee=0) + self.check_nt(use_tee=1) + self.check_execute_in(use_tee=0) + self.check_execute_in(use_tee=1) diff --git a/numpy/distutils/tests/test_fcompiler.py b/numpy/distutils/tests/test_fcompiler.py index 95e44b051..ba19a97ea 100644 --- a/numpy/distutils/tests/test_fcompiler.py +++ b/numpy/distutils/tests/test_fcompiler.py @@ -1,6 +1,8 @@ from __future__ import division, absolute_import, print_function -from numpy.testing import assert_ +import pytest + +from numpy.testing import assert_, suppress_warnings import numpy.distutils.fcompiler customizable_flags = [ @@ -25,6 +27,7 @@ def test_fcompiler_flags(monkeypatch): monkeypatch.setenv(envvar, new_flag) new_flags = getattr(flag_vars, opt) + monkeypatch.delenv(envvar) assert_(new_flags == [new_flag]) @@ -33,12 +36,46 @@ def test_fcompiler_flags(monkeypatch): for opt, envvar in customizable_flags: new_flag = '-dummy-{}-flag'.format(opt) prev_flags = getattr(flag_vars, opt) - monkeypatch.setenv(envvar, new_flag) new_flags = getattr(flag_vars, opt) + monkeypatch.delenv(envvar) if prev_flags is None: assert_(new_flags == [new_flag]) else: assert_(new_flags == prev_flags + [new_flag]) + +def test_fcompiler_flags_append_warning(monkeypatch): + # Test to check that the warning for append behavior changing in future + # is triggered. Need to use a real compiler instance so that we have + # non-empty flags to start with (otherwise the "if var and append" check + # will always be false). + try: + with suppress_warnings() as sup: + sup.record() + fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu95') + fc.customize() + except numpy.distutils.fcompiler.CompilerNotFound: + pytest.skip("gfortran not found, so can't execute this test") + + # Ensure NPY_DISTUTILS_APPEND_FLAGS not defined + monkeypatch.delenv('NPY_DISTUTILS_APPEND_FLAGS', raising=False) + + for opt, envvar in customizable_flags: + new_flag = '-dummy-{}-flag'.format(opt) + with suppress_warnings() as sup: + sup.record() + prev_flags = getattr(fc.flag_vars, opt) + + monkeypatch.setenv(envvar, new_flag) + with suppress_warnings() as sup: + sup.record() + new_flags = getattr(fc.flag_vars, opt) + if prev_flags: + # Check that warning was issued + assert len(sup.log) == 1 + + monkeypatch.delenv(envvar) + assert_(new_flags == [new_flag]) + diff --git a/numpy/distutils/tests/test_shell_utils.py b/numpy/distutils/tests/test_shell_utils.py new file mode 100644 index 000000000..a0344244f --- /dev/null +++ b/numpy/distutils/tests/test_shell_utils.py @@ -0,0 +1,79 @@ +from __future__ import division, absolute_import, print_function + +import pytest +import subprocess +import os +import json +import sys + +from numpy.distutils import _shell_utils + +argv_cases = [ + [r'exe'], + [r'path/exe'], + [r'path\exe'], + [r'\\server\path\exe'], + [r'path to/exe'], + [r'path to\exe'], + + [r'exe', '--flag'], + [r'path/exe', '--flag'], + [r'path\exe', '--flag'], + [r'path to/exe', '--flag'], + [r'path to\exe', '--flag'], + + # flags containing literal quotes in their name + [r'path to/exe', '--flag-"quoted"'], + [r'path to\exe', '--flag-"quoted"'], + [r'path to/exe', '"--flag-quoted"'], + [r'path to\exe', '"--flag-quoted"'], +] + + +@pytest.fixture(params=[ + _shell_utils.WindowsParser, + _shell_utils.PosixParser +]) +def Parser(request): + return request.param + + +@pytest.fixture +def runner(Parser): + if Parser != _shell_utils.NativeParser: + pytest.skip('Unable to run with non-native parser') + + if Parser == _shell_utils.WindowsParser: + return lambda cmd: subprocess.check_output(cmd) + elif Parser == _shell_utils.PosixParser: + # posix has no non-shell string parsing + return lambda cmd: subprocess.check_output(cmd, shell=True) + else: + raise NotImplementedError + + +@pytest.mark.parametrize('argv', argv_cases) +def test_join_matches_subprocess(Parser, runner, argv): + """ + Test that join produces strings understood by subprocess + """ + # invoke python to return its arguments as json + cmd = [ + sys.executable, '-c', + 'import json, sys; print(json.dumps(sys.argv[1:]))' + ] + joined = Parser.join(cmd + argv) + json_out = runner(joined).decode() + assert json.loads(json_out) == argv + + +@pytest.mark.parametrize('argv', argv_cases) +def test_roundtrip(Parser, argv): + """ + Test that split is the inverse operation of join + """ + try: + joined = Parser.join(argv) + assert argv == Parser.split(joined) + except NotImplementedError: + pytest.skip("Not implemented") diff --git a/numpy/distutils/tests/test_system_info.py b/numpy/distutils/tests/test_system_info.py index 4aec13c82..3c7638960 100644 --- a/numpy/distutils/tests/test_system_info.py +++ b/numpy/distutils/tests/test_system_info.py @@ -7,10 +7,12 @@ from tempfile import mkstemp, mkdtemp from subprocess import Popen, PIPE from distutils.errors import DistutilsError +from numpy.testing import assert_, assert_equal, assert_raises from numpy.distutils import ccompiler, customized_ccompiler -from numpy.testing import assert_, assert_equal from numpy.distutils.system_info import system_info, ConfigParser +from numpy.distutils.system_info import AliasedOptionError from numpy.distutils.system_info import default_lib_dirs, default_include_dirs +from numpy.distutils import _shell_utils def get_class(name, notfound_action=1): @@ -21,7 +23,8 @@ def get_class(name, notfound_action=1): 2 - raise error """ cl = {'temp1': Temp1Info, - 'temp2': Temp2Info + 'temp2': Temp2Info, + 'duplicate_options': DuplicateOptionInfo, }.get(name.lower(), _system_info) return cl() @@ -29,7 +32,7 @@ simple_site = """ [ALL] library_dirs = {dir1:s}{pathsep:s}{dir2:s} libraries = {lib1:s},{lib2:s} -extra_compile_args = -I/fake/directory +extra_compile_args = -I/fake/directory -I"/path with/spaces" -Os runtime_library_dirs = {dir1:s} [temp1] @@ -40,8 +43,12 @@ runtime_library_dirs = {dir1:s} [temp2] library_dirs = {dir2:s} libraries = {lib2:s} -extra_link_args = -Wl,-rpath={lib2:s} +extra_link_args = -Wl,-rpath={lib2_escaped:s} rpath = {dir2:s} + +[duplicate_options] +mylib_libs = {lib1:s} +libraries = {lib2:s} """ site_cfg = simple_site @@ -118,6 +125,10 @@ class Temp2Info(_system_info): """For testing purposes""" section = 'temp2' +class DuplicateOptionInfo(_system_info): + """For testing purposes""" + section = 'duplicate_options' + class TestSystemInfoReading(object): @@ -137,7 +148,8 @@ class TestSystemInfoReading(object): 'lib1': self._lib1, 'dir2': self._dir2, 'lib2': self._lib2, - 'pathsep': os.pathsep + 'pathsep': os.pathsep, + 'lib2_escaped': _shell_utils.NativeParser.join([self._lib2]) }) # Write site.cfg fd, self._sitecfg = mkstemp() @@ -158,6 +170,9 @@ class TestSystemInfoReading(object): self.c_default = site_and_parse(get_class('default'), self._sitecfg) self.c_temp1 = site_and_parse(get_class('temp1'), self._sitecfg) self.c_temp2 = site_and_parse(get_class('temp2'), self._sitecfg) + self.c_dup_options = site_and_parse(get_class('duplicate_options'), + self._sitecfg) + def teardown(self): # Do each removal separately @@ -181,7 +196,7 @@ class TestSystemInfoReading(object): assert_equal(tsi.get_libraries(), [self._lib1, self._lib2]) assert_equal(tsi.get_runtime_lib_dirs(), [self._dir1]) extra = tsi.calc_extra_info() - assert_equal(extra['extra_compile_args'], ['-I/fake/directory']) + assert_equal(extra['extra_compile_args'], ['-I/fake/directory', '-I/path with/spaces', '-Os']) def test_temp1(self): # Read in all information in the temp1 block @@ -200,6 +215,13 @@ class TestSystemInfoReading(object): extra = tsi.calc_extra_info() assert_equal(extra['extra_link_args'], ['-Wl,-rpath=' + self._lib2]) + def test_duplicate_options(self): + # Ensure that duplicates are raising an AliasedOptionError + tsi = self.c_dup_options + assert_raises(AliasedOptionError, tsi.get_option_single, "mylib_libs", "libraries") + assert_equal(tsi.get_libs("mylib_libs", [self._lib1]), [self._lib1]) + assert_equal(tsi.get_libs("libraries", [self._lib2]), [self._lib2]) + @pytest.mark.skipif(not HAVE_COMPILER, reason="Missing compiler") def test_compile1(self): # Compile source and link the first source |