summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/release/1.14.0-notes.rst15
-rw-r--r--numpy/distutils/command/build_clib.py102
-rw-r--r--numpy/distutils/command/build_ext.py168
-rw-r--r--numpy/distutils/fcompiler/__init__.py33
-rw-r--r--numpy/distutils/fcompiler/gnu.py193
-rw-r--r--numpy/distutils/misc_util.py11
-rw-r--r--numpy/distutils/system_info.py63
-rw-r--r--numpy/lib/arraypad.py11
-rw-r--r--numpy/lib/tests/test_arraypad.py7
9 files changed, 470 insertions, 133 deletions
diff --git a/doc/release/1.14.0-notes.rst b/doc/release/1.14.0-notes.rst
index 2730643f1..e38c8fc83 100644
--- a/doc/release/1.14.0-notes.rst
+++ b/doc/release/1.14.0-notes.rst
@@ -180,6 +180,21 @@ f2py now handles arrays of dimension 0
f2py now allows for the allocation of arrays of dimension 0. This allows for
more consistent handling of corner cases downstream.
+``numpy.distutils`` supports using MSVC and mingw64-gfortran together
+---------------------------------------------------------------------
+
+Numpy distutils now supports using MSVC and Mingw64-gfortran compilers
+together. This enables producing Python extension modules on Windows
+containing Fortran code, while retaining compatibility with the
+binaries distributed by Python.org. Not all use cases are supported,
+but most common ways to wrap Fortran for Python are functional.
+
+Compilation in this mode is usually enabled automatically, and can be
+selected via the ``--fcompiler`` and ``--compiler`` options to
+``setup.py``. Moreover, linking Fortran codes to static OpenBLAS is
+supported; by default a gfortran-compatible static archive
+``openblas.a`` is looked for.
+
Changes
=======
diff --git a/numpy/distutils/command/build_clib.py b/numpy/distutils/command/build_clib.py
index 1c868cf6c..910493a77 100644
--- a/numpy/distutils/command/build_clib.py
+++ b/numpy/distutils/command/build_clib.py
@@ -7,7 +7,7 @@ from glob import glob
import shutil
from distutils.command.build_clib import build_clib as old_build_clib
from distutils.errors import DistutilsSetupError, DistutilsError, \
- DistutilsFileError
+ DistutilsFileError
from numpy.distutils import log
from distutils.dep_util import newer_group
@@ -19,9 +19,10 @@ from numpy.distutils.misc_util import filter_sources, has_f_sources,\
_l = old_build_clib.user_options
for _i in range(len(_l)):
if _l[_i][0] in ['build-clib', 'build-temp']:
- _l[_i] = (_l[_i][0]+'=',)+_l[_i][1:]
+ _l[_i] = (_l[_i][0] + '=',) + _l[_i][1:]
#
+
class build_clib(old_build_clib):
description = "build C/C++/F libraries used by Python extensions"
@@ -32,7 +33,7 @@ class build_clib(old_build_clib):
('inplace', 'i', 'Build in-place'),
('parallel=', 'j',
"number of parallel jobs"),
- ]
+ ]
boolean_options = old_build_clib.boolean_options + ['inplace']
@@ -75,7 +76,8 @@ class build_clib(old_build_clib):
for (lib_name, build_info) in self.libraries:
l = build_info.get('language', None)
- if l and l not in languages: languages.append(l)
+ if l and l not in languages:
+ languages.append(l)
from distutils.ccompiler import new_compiler
self.compiler = new_compiler(compiler=self.compiler,
@@ -94,11 +96,11 @@ class build_clib(old_build_clib):
if self.have_f_sources():
from numpy.distutils.fcompiler import new_fcompiler
self._f_compiler = new_fcompiler(compiler=self.fcompiler,
- verbose=self.verbose,
- dry_run=self.dry_run,
- force=self.force,
- requiref90='f90' in languages,
- c_compiler=self.compiler)
+ verbose=self.verbose,
+ dry_run=self.dry_run,
+ force=self.force,
+ requiref90='f90' in languages,
+ c_compiler=self.compiler)
if self._f_compiler is not None:
self._f_compiler.customize(self.distribution)
@@ -114,10 +116,10 @@ class build_clib(old_build_clib):
self.build_libraries(self.libraries)
if self.inplace:
- for l in self.distribution.installed_libraries:
+ for l in self.distribution.installed_libraries:
libname = self.compiler.library_filename(l.name)
source = os.path.join(self.build_clib, libname)
- target = os.path.join(l.target_dir, libname)
+ target = os.path.join(l.target_dir, libname)
self.mkpath(l.target_dir)
shutil.copy(source, target)
@@ -140,21 +142,25 @@ class build_clib(old_build_clib):
sources = build_info.get('sources')
if sources is None or not is_sequence(sources):
raise DistutilsSetupError(("in 'libraries' option (library '%s'), " +
- "'sources' must be present and must be " +
- "a list of source filenames") % lib_name)
+ "'sources' must be present and must be " +
+ "a list of source filenames") % lib_name)
sources = list(sources)
c_sources, cxx_sources, f_sources, fmodule_sources \
- = filter_sources(sources)
+ = filter_sources(sources)
requiref90 = not not fmodule_sources or \
- build_info.get('language', 'c')=='f90'
+ build_info.get('language', 'c') == 'f90'
# save source type information so that build_ext can use it.
source_languages = []
- if c_sources: source_languages.append('c')
- if cxx_sources: source_languages.append('c++')
- if requiref90: source_languages.append('f90')
- elif f_sources: source_languages.append('f77')
+ if c_sources:
+ source_languages.append('c')
+ if cxx_sources:
+ source_languages.append('c++')
+ if requiref90:
+ source_languages.append('f90')
+ elif f_sources:
+ source_languages.append('f77')
build_info['source_languages'] = source_languages
lib_file = compiler.library_filename(lib_name,
@@ -168,8 +174,8 @@ class build_clib(old_build_clib):
config_fc = build_info.get('config_fc', {})
if fcompiler is not None and config_fc:
- log.info('using additional config_fc from setup script '\
- 'for fortran compiler: %s' \
+ log.info('using additional config_fc from setup script '
+ 'for fortran compiler: %s'
% (config_fc,))
from numpy.distutils.fcompiler import new_fcompiler
fcompiler = new_fcompiler(compiler=fcompiler.compiler_type,
@@ -186,12 +192,14 @@ class build_clib(old_build_clib):
# check availability of Fortran compilers
if (f_sources or fmodule_sources) and fcompiler is None:
- raise DistutilsError("library %s has Fortran sources"\
- " but no Fortran compiler found" % (lib_name))
+ raise DistutilsError("library %s has Fortran sources"
+ " but no Fortran compiler found" % (lib_name))
if fcompiler is not None:
- fcompiler.extra_f77_compile_args = build_info.get('extra_f77_compile_args') or []
- fcompiler.extra_f90_compile_args = build_info.get('extra_f90_compile_args') or []
+ fcompiler.extra_f77_compile_args = build_info.get(
+ 'extra_f77_compile_args') or []
+ fcompiler.extra_f90_compile_args = build_info.get(
+ 'extra_f90_compile_args') or []
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
@@ -203,9 +211,10 @@ class build_clib(old_build_clib):
# where compiled F90 module files are:
module_dirs = build_info.get('module_dirs') or []
module_build_dir = os.path.dirname(lib_file)
- if requiref90: self.mkpath(module_build_dir)
+ if requiref90:
+ self.mkpath(module_build_dir)
- if compiler.compiler_type=='msvc':
+ if compiler.compiler_type == 'msvc':
# this hack works around the msvc compiler attributes
# problem, msvc uses its own convention :(
c_sources += cxx_sources
@@ -239,7 +248,7 @@ class build_clib(old_build_clib):
if requiref90:
if fcompiler.module_dir_switch is None:
existing_modules = glob('*.mod')
- extra_postargs += fcompiler.module_options(\
+ extra_postargs += fcompiler.module_options(
module_dirs, module_build_dir)
if fmodule_sources:
@@ -257,14 +266,14 @@ class build_clib(old_build_clib):
if f in existing_modules:
continue
t = os.path.join(module_build_dir, f)
- if os.path.abspath(f)==os.path.abspath(t):
+ if os.path.abspath(f) == os.path.abspath(t):
continue
if os.path.isfile(t):
os.remove(t)
try:
self.move_file(f, module_build_dir)
except DistutilsFileError:
- log.warn('failed to move %r to %r' \
+ log.warn('failed to move %r to %r'
% (f, module_build_dir))
if f_sources:
@@ -278,13 +287,32 @@ class build_clib(old_build_clib):
else:
f_objects = []
- objects.extend(f_objects)
-
- # assume that default linker is suitable for
- # linking Fortran object files
- compiler.create_static_lib(objects, lib_name,
- output_dir=self.build_clib,
- debug=self.debug)
+ if f_objects and not fcompiler.can_ccompiler_link(compiler):
+ # Default linker cannot link Fortran object files, and results
+ # need to be wrapped later. Instead of creating a real static
+ # library, just keep track of the object files.
+ listfn = os.path.join(self.build_clib,
+ lib_name + '.fobjects')
+ with open(listfn, 'w') as f:
+ f.write("\n".join(os.path.abspath(obj) for obj in f_objects))
+
+ listfn = os.path.join(self.build_clib,
+ lib_name + '.cobjects')
+ with open(listfn, 'w') as f:
+ f.write("\n".join(os.path.abspath(obj) for obj in objects))
+
+ # create empty "library" file for dependency tracking
+ lib_fname = os.path.join(self.build_clib,
+ lib_name + compiler.static_lib_extension)
+ with open(lib_fname, 'wb') as f:
+ pass
+ else:
+ # assume that default linker is suitable for
+ # linking Fortran object files
+ objects.extend(f_objects)
+ compiler.create_static_lib(objects, lib_name,
+ output_dir=self.build_clib,
+ debug=self.debug)
# fix library dependencies
clib_libraries = build_info.get('libraries', [])
diff --git a/numpy/distutils/command/build_ext.py b/numpy/distutils/command/build_ext.py
index cf5bfa04f..d935a3303 100644
--- a/numpy/distutils/command/build_ext.py
+++ b/numpy/distutils/command/build_ext.py
@@ -5,24 +5,26 @@ from __future__ import division, absolute_import, print_function
import os
import sys
+import shutil
from glob import glob
from distutils.dep_util import newer_group
from distutils.command.build_ext import build_ext as old_build_ext
from distutils.errors import DistutilsFileError, DistutilsSetupError,\
- DistutilsError
+ DistutilsError
from distutils.file_util import copy_file
from numpy.distutils import log
from numpy.distutils.exec_command import exec_command
-from numpy.distutils.system_info import combine_paths
+from numpy.distutils.system_info import combine_paths, system_info
from numpy.distutils.misc_util import filter_sources, has_f_sources, \
- has_cxx_sources, get_ext_source_files, \
- get_numpy_include_dirs, is_sequence, get_build_architecture, \
- msvc_version
+ has_cxx_sources, get_ext_source_files, \
+ get_numpy_include_dirs, is_sequence, get_build_architecture, \
+ msvc_version
from numpy.distutils.command.config_compiler import show_fortran_compilers
+
class build_ext (old_build_ext):
description = "build C/C++/F extensions (compile/link to build directory)"
@@ -32,12 +34,12 @@ class build_ext (old_build_ext):
"specify the Fortran compiler type"),
('parallel=', 'j',
"number of parallel jobs"),
- ]
+ ]
help_options = old_build_ext.help_options + [
('help-fcompiler', None, "list available Fortran compilers",
show_fortran_compilers),
- ]
+ ]
def initialize_options(self):
old_build_ext.initialize_options(self)
@@ -80,11 +82,13 @@ class build_ext (old_build_ext):
if self.distribution.has_c_libraries():
if self.inplace:
if self.distribution.have_run.get('build_clib'):
- log.warn('build_clib already run, it is too late to ' \
- 'ensure in-place build of build_clib')
- build_clib = self.distribution.get_command_obj('build_clib')
+ log.warn('build_clib already run, it is too late to '
+ 'ensure in-place build of build_clib')
+ build_clib = self.distribution.get_command_obj(
+ 'build_clib')
else:
- build_clib = self.distribution.get_command_obj('build_clib')
+ build_clib = self.distribution.get_command_obj(
+ 'build_clib')
build_clib.inplace = 1
build_clib.ensure_finalized()
build_clib.run()
@@ -115,13 +119,18 @@ class build_ext (old_build_ext):
self.compiler.customize_cmd(self)
self.compiler.show_customization()
+ # Setup directory for storing generated extra DLL files on Windows
+ self.extra_dll_dir = os.path.join(self.build_temp, 'extra-dll')
+ if not os.path.isdir(self.extra_dll_dir):
+ os.makedirs(self.extra_dll_dir)
+
# Create mapping of libraries built by build_clib:
clibs = {}
if build_clib is not None:
for libname, build_info in build_clib.libraries or []:
if libname in clibs and clibs[libname] != build_info:
- log.warn('library %r defined more than once,'\
- ' overwriting build_info\n%s... \nwith\n%s...' \
+ log.warn('library %r defined more than once,'
+ ' overwriting build_info\n%s... \nwith\n%s...'
% (libname, repr(clibs[libname])[:300], repr(build_info)[:300]))
clibs[libname] = build_info
# .. and distribution libraries:
@@ -177,7 +186,7 @@ class build_ext (old_build_ext):
elif 'f77' in ext_languages:
ext_language = 'f77'
else:
- ext_language = 'c' # default
+ ext_language = 'c' # default
if l and l != ext_language and ext.language:
log.warn('resetting extension %r language from %r to %r.' %
(ext.name, l, ext_language))
@@ -192,9 +201,9 @@ class build_ext (old_build_ext):
# Initialize C++ compiler:
if need_cxx_compiler:
self._cxx_compiler = new_compiler(compiler=compiler_type,
- verbose=self.verbose,
- dry_run=self.dry_run,
- force=self.force)
+ verbose=self.verbose,
+ dry_run=self.dry_run,
+ force=self.force)
compiler = self._cxx_compiler
compiler.customize(self.distribution, need_cxx=need_cxx_compiler)
compiler.customize_cmd(self)
@@ -234,7 +243,7 @@ class build_ext (old_build_ext):
dry_run=self.dry_run,
force=self.force,
requiref90=True,
- c_compiler = self.compiler)
+ c_compiler=self.compiler)
fcompiler = self._f90_compiler
if fcompiler:
ctype = fcompiler.compiler_type
@@ -252,6 +261,16 @@ class build_ext (old_build_ext):
# Build extensions
self.build_extensions()
+ # Copy over any extra DLL files
+ runtime_lib_dir = os.path.join(
+ self.build_lib, self.distribution.get_name(), 'extra-dll')
+ for fn in os.listdir(self.extra_dll_dir):
+ if not fn.lower().endswith('.dll'):
+ continue
+ if not os.path.isdir(runtime_lib_dir):
+ os.makedirs(runtime_lib_dir)
+ runtime_lib = os.path.join(self.extra_dll_dir, fn)
+ copy_file(runtime_lib, runtime_lib_dir)
def swig_sources(self, sources):
# Do nothing. Swig sources have beed handled in build_src command.
@@ -295,11 +314,9 @@ class build_ext (old_build_ext):
macros.append((undef,))
c_sources, cxx_sources, f_sources, fmodule_sources = \
- filter_sources(ext.sources)
+ filter_sources(ext.sources)
-
-
- if self.compiler.compiler_type=='msvc':
+ if self.compiler.compiler_type == 'msvc':
if cxx_sources:
# Needed to compile kiva.agg._agg extension.
extra_args.append('/Zm1000')
@@ -309,32 +326,34 @@ class build_ext (old_build_ext):
cxx_sources = []
# Set Fortran/C++ compilers for compilation and linking.
- if ext.language=='f90':
+ if ext.language == 'f90':
fcompiler = self._f90_compiler
- elif ext.language=='f77':
+ elif ext.language == 'f77':
fcompiler = self._f77_compiler
- else: # in case ext.language is c++, for instance
+ else: # in case ext.language is c++, for instance
fcompiler = self._f90_compiler or self._f77_compiler
if fcompiler is not None:
- fcompiler.extra_f77_compile_args = (ext.extra_f77_compile_args or []) if hasattr(ext, 'extra_f77_compile_args') else []
- fcompiler.extra_f90_compile_args = (ext.extra_f90_compile_args or []) if hasattr(ext, 'extra_f90_compile_args') else []
+ fcompiler.extra_f77_compile_args = (ext.extra_f77_compile_args or []) if hasattr(
+ ext, 'extra_f77_compile_args') else []
+ fcompiler.extra_f90_compile_args = (ext.extra_f90_compile_args or []) if hasattr(
+ ext, 'extra_f90_compile_args') else []
cxx_compiler = self._cxx_compiler
# check for the availability of required compilers
if cxx_sources and cxx_compiler is None:
- raise DistutilsError("extension %r has C++ sources" \
- "but no C++ compiler found" % (ext.name))
+ raise DistutilsError("extension %r has C++ sources"
+ "but no C++ compiler found" % (ext.name))
if (f_sources or fmodule_sources) and fcompiler is None:
- raise DistutilsError("extension %r has Fortran sources " \
- "but no Fortran compiler found" % (ext.name))
+ raise DistutilsError("extension %r has Fortran sources "
+ "but no Fortran compiler found" % (ext.name))
if ext.language in ['f77', 'f90'] and fcompiler is None:
- self.warn("extension %r has Fortran libraries " \
- "but no Fortran linker found, using default linker" % (ext.name))
- if ext.language=='c++' and cxx_compiler is None:
- self.warn("extension %r has C++ libraries " \
- "but no C++ linker found, using default linker" % (ext.name))
+ self.warn("extension %r has Fortran libraries "
+ "but no Fortran linker found, using default linker" % (ext.name))
+ if ext.language == 'c++' and cxx_compiler is None:
+ self.warn("extension %r has C++ libraries "
+ "but no C++ linker found, using default linker" % (ext.name))
- kws = {'depends':ext.depends}
+ kws = {'depends': ext.depends}
output_dir = self.build_temp
include_dirs = ext.include_dirs + get_numpy_include_dirs()
@@ -387,7 +406,7 @@ class build_ext (old_build_ext):
if f in existing_modules:
continue
t = os.path.join(module_build_dir, f)
- if os.path.abspath(f)==os.path.abspath(t):
+ if os.path.abspath(f) == os.path.abspath(t):
continue
if os.path.isfile(t):
os.remove(t)
@@ -406,7 +425,12 @@ class build_ext (old_build_ext):
extra_postargs=extra_postargs,
depends=ext.depends)
- objects = c_objects + f_objects
+ if f_objects and not fcompiler.can_ccompiler_link(self.compiler):
+ unlinkable_fobjects = f_objects
+ objects = c_objects
+ else:
+ unlinkable_fobjects = []
+ objects = c_objects + f_objects
if ext.extra_objects:
objects.extend(ext.extra_objects)
@@ -419,13 +443,20 @@ class build_ext (old_build_ext):
if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'):
# expand libraries with fcompiler libraries as we are
# not using fcompiler linker
- self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs)
+ self._libs_with_msvc_and_fortran(
+ fcompiler, libraries, library_dirs)
elif ext.language in ['f77', 'f90'] and fcompiler is not None:
linker = fcompiler.link_shared_object
- if ext.language=='c++' and cxx_compiler is not None:
+ if ext.language == 'c++' and cxx_compiler is not None:
linker = cxx_compiler.link_shared_object
+ if fcompiler is not None:
+ objects, libraries = self._process_unlinkable_fobjects(
+ objects, libraries,
+ fcompiler, library_dirs,
+ unlinkable_fobjects)
+
linker(objects, ext_filename,
libraries=libraries,
library_dirs=library_dirs,
@@ -440,23 +471,59 @@ class build_ext (old_build_ext):
build_src = self.get_finalized_command("build_src").build_src
build_clib = self.get_finalized_command("build_clib").build_clib
objects = self.compiler.compile([os.path.join(build_src,
- "gfortran_vs2003_hack.c")],
- output_dir=self.build_temp)
- self.compiler.create_static_lib(objects, "_gfortran_workaround", output_dir=build_clib, debug=self.debug)
+ "gfortran_vs2003_hack.c")],
+ output_dir=self.build_temp)
+ self.compiler.create_static_lib(
+ objects, "_gfortran_workaround", output_dir=build_clib, debug=self.debug)
+
+ def _process_unlinkable_fobjects(self, objects, libraries,
+ fcompiler, library_dirs,
+ unlinkable_fobjects):
+ libraries = list(libraries)
+ objects = list(objects)
+ unlinkable_fobjects = list(unlinkable_fobjects)
+
+ # Expand possible fake static libraries to objects
+ for lib in list(libraries):
+ for libdir in library_dirs:
+ fake_lib = os.path.join(libdir, lib + '.fobjects')
+ if os.path.isfile(fake_lib):
+ # Replace fake static library
+ libraries.remove(lib)
+ with open(fake_lib, 'r') as f:
+ unlinkable_fobjects.extend(f.read().splitlines())
+
+ # Expand C objects
+ c_lib = os.path.join(libdir, lib + '.cobjects')
+ with open(c_lib, 'r') as f:
+ objects.extend(f.read().splitlines())
+
+ # Wrap unlinkable objects to a linkable one
+ if unlinkable_fobjects:
+ fobjects = [os.path.relpath(obj) for obj in unlinkable_fobjects]
+ wrapped = fcompiler.wrap_unlinkable_objects(
+ fobjects, output_dir=self.build_temp,
+ extra_dll_dir=self.extra_dll_dir)
+ objects.extend(wrapped)
+
+ return objects, libraries
def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries,
c_library_dirs):
- if fcompiler is None: return
+ if fcompiler is None:
+ return
for libname in c_libraries:
- if libname.startswith('msvc'): continue
+ if libname.startswith('msvc'):
+ continue
fileexists = False
for libdir in c_library_dirs or []:
libfile = os.path.join(libdir, '%s.lib' % (libname))
if os.path.isfile(libfile):
fileexists = True
break
- if fileexists: continue
+ if fileexists:
+ continue
# make g77-compiled static libs available to MSVC
fileexists = False
for libdir in c_library_dirs:
@@ -470,7 +537,8 @@ class build_ext (old_build_ext):
c_library_dirs.append(self.build_temp)
fileexists = True
break
- if fileexists: continue
+ if fileexists:
+ continue
log.warn('could not find library %r in directories %s'
% (libname, c_library_dirs))
@@ -498,14 +566,14 @@ class build_ext (old_build_ext):
if self.build_temp not in c_library_dirs:
c_library_dirs.append(self.build_temp)
- def get_source_files (self):
+ def get_source_files(self):
self.check_extensions_list(self.extensions)
filenames = []
for ext in self.extensions:
filenames.extend(get_ext_source_files(ext))
return filenames
- def get_outputs (self):
+ def get_outputs(self):
self.check_extensions_list(self.extensions)
outputs = []
diff --git a/numpy/distutils/fcompiler/__init__.py b/numpy/distutils/fcompiler/__init__.py
index 86b080e93..1d558319d 100644
--- a/numpy/distutils/fcompiler/__init__.py
+++ b/numpy/distutils/fcompiler/__init__.py
@@ -430,6 +430,7 @@ class FCompiler(CCompiler):
raise CompilerNotFound()
return version
+
############################################################
## Public methods:
@@ -697,6 +698,38 @@ class FCompiler(CCompiler):
else:
return hook_name()
+ def can_ccompiler_link(self, ccompiler):
+ """
+ Check if the given C compiler can link objects produced by
+ this compiler.
+ """
+ return True
+
+ def wrap_unlinkable_objects(self, objects, output_dir, extra_dll_dir):
+ """
+ Convert a set of object files that are not compatible with the default
+ linker, to a file that is compatible.
+
+ Parameters
+ ----------
+ objects : list
+ List of object files to include.
+ output_dir : str
+ Output directory to place generated object files.
+ extra_dll_dir : str
+ Output directory to place extra DLL files that need to be
+ included on Windows.
+
+ Returns
+ -------
+ converted_objects : list of str
+ List of converted object files.
+ Note that the number of output files is not necessarily
+ the same as inputs.
+
+ """
+ raise NotImplementedError()
+
## class FCompiler
_default_compilers = (
diff --git a/numpy/distutils/fcompiler/gnu.py b/numpy/distutils/fcompiler/gnu.py
index 4649fd743..2260fd50d 100644
--- a/numpy/distutils/fcompiler/gnu.py
+++ b/numpy/distutils/fcompiler/gnu.py
@@ -6,37 +6,43 @@ import sys
import warnings
import platform
import tempfile
+import hashlib
+import base64
from subprocess import Popen, PIPE, STDOUT
-
+from copy import copy
from numpy.distutils.fcompiler import FCompiler
from numpy.distutils.exec_command import exec_command
-from numpy.distutils.misc_util import msvc_runtime_library
from numpy.distutils.compat import get_exception
+from numpy.distutils.system_info import system_info
compilers = ['GnuFCompiler', 'Gnu95FCompiler']
TARGET_R = re.compile(r"Target: ([a-zA-Z0-9_\-]*)")
# XXX: handle cross compilation
+
+
def is_win64():
return sys.platform == "win32" and platform.architecture()[0] == "64bit"
+
if is_win64():
#_EXTRAFLAGS = ["-fno-leading-underscore"]
_EXTRAFLAGS = []
else:
_EXTRAFLAGS = []
+
class GnuFCompiler(FCompiler):
compiler_type = 'gnu'
- compiler_aliases = ('g77',)
+ compiler_aliases = ('g77', )
description = 'GNU Fortran 77 compiler'
def gnu_version_match(self, version_string):
"""Handle the different versions of GNU fortran compilers"""
# Strip warning(s) that may be emitted by gfortran
while version_string.startswith('gfortran: warning'):
- version_string = version_string[version_string.find('\n')+1:]
+ version_string = version_string[version_string.find('\n') + 1:]
# Gfortran versions from after 2010 will output a simple string
# (usually "x.y", "x.y.z" or "x.y.z-q") for ``-dumpversion``; older
@@ -91,7 +97,7 @@ class GnuFCompiler(FCompiler):
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"],
'linker_exe' : [None, "-g", "-Wall"]
- }
+ }
module_dir_switch = None
module_include_switch = None
@@ -129,8 +135,8 @@ class GnuFCompiler(FCompiler):
try:
get_makefile_filename = sc.get_makefile_filename
except AttributeError:
- pass # i.e. PyPy
- else:
+ pass # i.e. PyPy
+ else:
filename = get_makefile_filename()
sc.parse_makefile(filename, g)
target = g.get('MACOSX_DEPLOYMENT_TARGET', '10.3')
@@ -153,9 +159,8 @@ class GnuFCompiler(FCompiler):
return opt
def get_libgcc_dir(self):
- status, output = exec_command(self.compiler_f77 +
- ['-print-libgcc-file-name'],
- use_tee=0)
+ status, output = exec_command(
+ self.compiler_f77 + ['-print-libgcc-file-name'], use_tee=0)
if not status:
return os.path.dirname(output)
return None
@@ -170,7 +175,7 @@ class GnuFCompiler(FCompiler):
d = os.path.normpath(d)
path = os.path.join(d, "lib%s.a" % self.g2c)
if not os.path.exists(path):
- root = os.path.join(d, *((os.pardir,)*4))
+ root = os.path.join(d, *((os.pardir, ) * 4))
d2 = os.path.abspath(os.path.join(root, 'lib'))
path = os.path.join(d2, "lib%s.a" % self.g2c)
if os.path.exists(path):
@@ -193,13 +198,8 @@ class GnuFCompiler(FCompiler):
opt.append(g2c)
c_compiler = self.c_compiler
if sys.platform == 'win32' and c_compiler and \
- c_compiler.compiler_type == 'msvc':
- # the following code is not needed (read: breaks) when using MinGW
- # in case want to link F77 compiled code with MSVC
+ c_compiler.compiler_type == 'msvc':
opt.append('gcc')
- runtime_lib = msvc_runtime_library()
- if runtime_lib:
- opt.append(runtime_lib)
if sys.platform == 'darwin':
opt.append('cc_dynamic')
return opt
@@ -241,7 +241,7 @@ class GnuFCompiler(FCompiler):
class Gnu95FCompiler(GnuFCompiler):
compiler_type = 'gnu95'
- compiler_aliases = ('gfortran',)
+ compiler_aliases = ('gfortran', )
description = 'GNU Fortran 95 compiler'
def version_match(self, version_string):
@@ -256,8 +256,10 @@ class Gnu95FCompiler(GnuFCompiler):
# use -mno-cygwin flag for gfortran when Python is not
# Cygwin-Python
if sys.platform == 'win32':
- for key in ['version_cmd', 'compiler_f77', 'compiler_f90',
- 'compiler_fix', 'linker_so', 'linker_exe']:
+ for key in [
+ 'version_cmd', 'compiler_f77', 'compiler_f90',
+ 'compiler_fix', 'linker_so', 'linker_exe'
+ ]:
self.executables[key].append('-mno-cygwin')
return v
@@ -274,7 +276,7 @@ class Gnu95FCompiler(GnuFCompiler):
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"],
'linker_exe' : [None, "-Wall"]
- }
+ }
module_dir_switch = '-J'
module_include_switch = '-I'
@@ -319,7 +321,7 @@ class Gnu95FCompiler(GnuFCompiler):
target = self.get_target()
if target:
d = os.path.normpath(self.get_libgcc_dir())
- root = os.path.join(d, *((os.pardir,)*4))
+ root = os.path.join(d, *((os.pardir, ) * 4))
path = os.path.join(root, "lib")
mingwdir = os.path.normpath(path)
if os.path.exists(os.path.join(mingwdir, "libmingwex.a")):
@@ -335,32 +337,148 @@ class Gnu95FCompiler(GnuFCompiler):
if c_compiler and c_compiler.compiler_type == "msvc":
if "gcc" in opt:
i = opt.index("gcc")
- opt.insert(i+1, "mingwex")
- opt.insert(i+1, "mingw32")
- # XXX: fix this mess, does not work for mingw
- if is_win64():
- c_compiler = self.c_compiler
- if c_compiler and c_compiler.compiler_type == "msvc":
- return []
- else:
- pass
+ opt.insert(i + 1, "mingwex")
+ opt.insert(i + 1, "mingw32")
+ c_compiler = self.c_compiler
+ if c_compiler and c_compiler.compiler_type == "msvc":
+ return []
+ else:
+ pass
return opt
def get_target(self):
- status, output = exec_command(self.compiler_f77 +
- ['-v'],
- use_tee=0)
+ status, output = exec_command(self.compiler_f77 + ['-v'], use_tee=0)
if not status:
m = TARGET_R.search(output)
if m:
return m.group(1)
return ""
- def get_flags_opt(self):
+ def _hash_files(self, filenames):
+ h = hashlib.sha1()
+ for fn in filenames:
+ with open(fn, 'rb') as f:
+ while True:
+ block = f.read(131072)
+ if not block:
+ break
+ h.update(block)
+ text = base64.b32encode(h.digest())
+ if sys.version_info[0] >= 3:
+ text = text.decode('ascii')
+ return text.rstrip('=')
+
+ def _link_wrapper_lib(self, objects, output_dir, extra_dll_dir,
+ chained_dlls, is_archive):
+ """Create a wrapper shared library for the given objects
+
+ Return an MSVC-compatible lib
+ """
+
+ c_compiler = self.c_compiler
+ if c_compiler.compiler_type != "msvc":
+ raise ValueError("This method only supports MSVC")
+
+ object_hash = self._hash_files(list(objects) + list(chained_dlls))
+
+ if is_win64():
+ tag = 'win_amd64'
+ else:
+ tag = 'win32'
+
+ basename = 'lib' + os.path.splitext(
+ os.path.basename(objects[0]))[0][:8]
+ root_name = basename + '.' + object_hash + '.gfortran-' + tag
+ dll_name = root_name + '.dll'
+ def_name = root_name + '.def'
+ lib_name = root_name + '.lib'
+ dll_path = os.path.join(extra_dll_dir, dll_name)
+ def_path = os.path.join(output_dir, def_name)
+ lib_path = os.path.join(output_dir, lib_name)
+
+ if os.path.isfile(lib_path):
+ # Nothing to do
+ return lib_path, dll_path
+
+ if is_archive:
+ objects = (["-Wl,--whole-archive"] + list(objects) +
+ ["-Wl,--no-whole-archive"])
+ self.link_shared_object(
+ objects,
+ dll_name,
+ output_dir=extra_dll_dir,
+ extra_postargs=list(chained_dlls) + [
+ '-Wl,--allow-multiple-definition',
+ '-Wl,--output-def,' + def_path,
+ '-Wl,--export-all-symbols',
+ '-Wl,--enable-auto-import',
+ '-static',
+ '-mlong-double-64',
+ ])
+
+ # No PowerPC!
if is_win64():
- return ['-O0']
+ specifier = '/MACHINE:X64'
+ else:
+ specifier = '/MACHINE:X86'
+
+ # MSVC specific code
+ lib_args = ['/def:' + def_path, '/OUT:' + lib_path, specifier]
+ if not c_compiler.initialized:
+ c_compiler.initialize()
+ c_compiler.spawn([c_compiler.lib] + lib_args)
+
+ return lib_path, dll_path
+
+ def can_ccompiler_link(self, compiler):
+ # MSVC cannot link objects compiled by GNU fortran
+ return compiler.compiler_type not in ("msvc", )
+
+ def wrap_unlinkable_objects(self, objects, output_dir, extra_dll_dir):
+ """
+ Convert a set of object files that are not compatible with the default
+ linker, to a file that is compatible.
+ """
+ if self.c_compiler.compiler_type == "msvc":
+ # Compile a DLL and return the lib for the DLL as
+ # the object. Also keep track of previous DLLs that
+ # we have compiled so that we can link against them.
+
+ # If there are .a archives, assume they are self-contained
+ # static libraries, and build separate DLLs for each
+ archives = []
+ plain_objects = []
+ for obj in objects:
+ if obj.lower().endswith('.a'):
+ archives.append(obj)
+ else:
+ plain_objects.append(obj)
+
+ chained_libs = []
+ chained_dlls = []
+ for archive in archives[::-1]:
+ lib, dll = self._link_wrapper_lib(
+ [archive],
+ output_dir,
+ extra_dll_dir,
+ chained_dlls=chained_dlls,
+ is_archive=True)
+ chained_libs.insert(0, lib)
+ chained_dlls.insert(0, dll)
+
+ if not plain_objects:
+ return chained_libs
+
+ lib, dll = self._link_wrapper_lib(
+ plain_objects,
+ output_dir,
+ extra_dll_dir,
+ chained_dlls=chained_dlls,
+ is_archive=False)
+ return [lib] + chained_libs
else:
- return GnuFCompiler.get_flags_opt(self)
+ raise ValueError("Unsupported C compiler")
+
def _can_target(cmd, arch):
"""Return true if the architecture supports the -arch flag"""
@@ -382,6 +500,7 @@ def _can_target(cmd, arch):
os.remove(filename)
return False
+
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
diff --git a/numpy/distutils/misc_util.py b/numpy/distutils/misc_util.py
index 30d72f50e..01376a7ff 100644
--- a/numpy/distutils/misc_util.py
+++ b/numpy/distutils/misc_util.py
@@ -33,7 +33,6 @@ def clean_up_temporary_directory():
atexit.register(clean_up_temporary_directory)
-
from numpy.distutils.compat import get_exception
from numpy.compat import basestring
from numpy.compat import npy_load_module
@@ -2066,7 +2065,6 @@ class Configuration(object):
"""
self.py_modules.append((self.name, name, generate_config_py))
-
def get_info(self,*names):
"""Get resources information.
@@ -2283,6 +2281,15 @@ def generate_config_py(target):
f.write('# This file is generated by %s\n' % (os.path.abspath(sys.argv[0])))
f.write('# It contains system_info results at the time of building this package.\n')
f.write('__all__ = ["get_info","show"]\n\n')
+
+ # For gfortran+msvc combination, extra shared libraries may exist
+ f.write("""
+import os
+extra_dll_dir = os.path.join(os.path.dirname(__file__), 'extra-dll')
+if os.path.isdir(extra_dll_dir):
+ os.environ["PATH"] += os.pathsep + extra_dll_dir
+""")
+
for k, i in system_info.saved_results.items():
f.write('%s=%r\n' % (k, i))
f.write(r'''
diff --git a/numpy/distutils/system_info.py b/numpy/distutils/system_info.py
index 2c1103d27..683b15daa 100644
--- a/numpy/distutils/system_info.py
+++ b/numpy/distutils/system_info.py
@@ -126,6 +126,7 @@ import os
import re
import copy
import warnings
+import atexit
from glob import glob
from functools import reduce
if sys.version_info[0] < 3:
@@ -684,9 +685,14 @@ class system_info(object):
return self.get_libs(key, '')
def library_extensions(self):
- static_exts = ['.a']
+ c = distutils.ccompiler.new_compiler()
+ c.customize('')
+ static_exts = []
+ if c.compiler_type != 'msvc':
+ # MSVC doesn't understand binutils
+ static_exts.append('.a')
if sys.platform == 'win32':
- static_exts.append('.lib') # .lib is used by MSVC
+ static_exts.append('.lib') # .lib is used by MSVC and others
if self.search_static_first:
exts = static_exts + [so_ext]
else:
@@ -1739,12 +1745,29 @@ class openblas_info(blas_info):
return True
def calc_info(self):
+ c = distutils.ccompiler.new_compiler()
+ c.customize('')
+
lib_dirs = self.get_lib_dirs()
openblas_libs = self.get_libs('libraries', self._lib_names)
if openblas_libs == self._lib_names: # backward compat with 1.8.0
openblas_libs = self.get_libs('openblas_libs', self._lib_names)
+
info = self.check_libs(lib_dirs, openblas_libs, [])
+
+ if c.compiler_type == "msvc" and info is None:
+ from numpy.distutils.fcompiler import new_fcompiler
+ f = new_fcompiler(c_compiler=c)
+ if f and f.compiler_type == 'gnu95':
+ # Try gfortran-compatible library files
+ info = self.check_msvc_gfortran_libs(lib_dirs, openblas_libs)
+ # Skip lapack check, we'd need build_ext to do it
+ assume_lapack = True
+ elif info:
+ assume_lapack = False
+ info['language'] = 'c'
+
if info is None:
return
@@ -1752,13 +1775,42 @@ class openblas_info(blas_info):
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
- if not self.check_embedded_lapack(info):
+ if not (assume_lapack or self.check_embedded_lapack(info)):
return
- info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
self.set_info(**info)
+ def check_msvc_gfortran_libs(self, library_dirs, libraries):
+ # First, find the full path to each library directory
+ library_paths = []
+ for library in libraries:
+ for library_dir in library_dirs:
+ # MinGW static ext will be .a
+ fullpath = os.path.join(library_dir, library + '.a')
+ if os.path.isfile(fullpath):
+ library_paths.append(fullpath)
+ break
+ else:
+ return None
+
+ # Generate numpy.distutils virtual static library file
+ tmpdir = os.path.join(os.getcwd(), 'build', 'openblas')
+ if not os.path.isdir(tmpdir):
+ os.makedirs(tmpdir)
+
+ info = {'library_dirs': [tmpdir],
+ 'libraries': ['openblas'],
+ 'language': 'f77'}
+
+ fake_lib_file = os.path.join(tmpdir, 'openblas.fobjects')
+ fake_clib_file = os.path.join(tmpdir, 'openblas.cobjects')
+ with open(fake_lib_file, 'w') as f:
+ f.write("\n".join(library_paths))
+ with open(fake_clib_file, 'w') as f:
+ pass
+
+ return info
class openblas_lapack_info(openblas_info):
section = 'openblas'
@@ -1770,6 +1822,7 @@ class openblas_lapack_info(openblas_info):
res = False
c = distutils.ccompiler.new_compiler()
c.customize('')
+
tmpdir = tempfile.mkdtemp()
s = """void zungqr();
int main(int argc, const char *argv[])
@@ -1784,6 +1837,8 @@ class openblas_lapack_info(openblas_info):
extra_args = info['extra_link_args']
except Exception:
extra_args = []
+ if sys.version_info < (3, 5) and sys.version_info > (3, 0) and c.compiler_type == "msvc":
+ extra_args.append("/MANIFEST")
try:
with open(src, 'wt') as f:
f.write(s)
diff --git a/numpy/lib/arraypad.py b/numpy/lib/arraypad.py
index 294a68950..842f3a9fe 100644
--- a/numpy/lib/arraypad.py
+++ b/numpy/lib/arraypad.py
@@ -1406,10 +1406,15 @@ def pad(array, pad_width, mode, **kwargs):
newmat = _append_min(newmat, pad_after, chunk_after, axis)
elif mode == 'reflect':
- if narray.size == 0:
- raise ValueError("There aren't any elements to reflect in `array`")
-
for axis, (pad_before, pad_after) in enumerate(pad_width):
+ if narray.shape[axis] == 0:
+ # Axes with non-zero padding cannot be empty.
+ if pad_before > 0 or pad_after > 0:
+ raise ValueError("There aren't any elements to reflect"
+ " in axis {} of `array`".format(axis))
+ # Skip zero padding on empty axes.
+ continue
+
# Recursive padding along any axis where `pad_amt` is too large
# for indexing tricks. We can only safely pad the original axis
# length, to keep the period of the reflections consistent.
diff --git a/numpy/lib/tests/test_arraypad.py b/numpy/lib/tests/test_arraypad.py
index a44722b56..fce4c451d 100644
--- a/numpy/lib/tests/test_arraypad.py
+++ b/numpy/lib/tests/test_arraypad.py
@@ -639,6 +639,11 @@ class TestReflect(object):
b = np.array([1, 2, 3, 2, 1, 2, 3, 2, 1, 2, 3])
assert_array_equal(a, b)
+ def test_check_padding_an_empty_array(self):
+ a = pad(np.zeros((0, 3)), ((0,), (1,)), mode='reflect')
+ b = np.zeros((0, 5))
+ assert_array_equal(a, b)
+
class TestSymmetric(object):
def test_check_simple(self):
@@ -1016,6 +1021,8 @@ class TestValueError1(object):
def test_check_empty_array(self):
assert_raises(ValueError, pad, [], 4, mode='reflect')
assert_raises(ValueError, pad, np.ndarray(0), 4, mode='reflect')
+ assert_raises(ValueError, pad, np.zeros((0, 3)), ((1,), (0,)),
+ mode='reflect')
class TestValueError2(object):