# Last Change: Sun Apr 26 05:00 PM 2009 J # vim:syntax=python import os import sys from os.path import join as pjoin, basename as pbasename, dirname as pdirname from copy import deepcopy from numscons import get_pythonlib_dir from numscons import GetNumpyEnvironment from numscons import CheckCBLAS from numscons import write_info from code_generators.numpy_api import \ multiarray_api as multiarray_api_dict, \ ufunc_api as ufunc_api_dict from setup_common import * from scons_support import CheckBrokenMathlib, define_no_smp, \ check_mlib, check_mlibs, is_npy_no_signal, CheckInline from scons_support import array_api_gen_bld, ufunc_api_gen_bld, template_bld, \ umath_bld, CheckGCC4, check_api_version, \ CheckLongDoubleRepresentation import SCons # Set to True to enable multiple file compilations (experimental) try: os.environ['NPY_SEPARATE_COMPILATION'] ENABLE_SEPARATE_COMPILATION = True except KeyError: ENABLE_SEPARATE_COMPILATION = False try: os.environ['NPY_BYPASS_SINGLE_EXTENDED'] BYPASS_SINGLE_EXTENDED = True except KeyError: BYPASS_SINGLE_EXTENDED = False env = GetNumpyEnvironment(ARGUMENTS) env.Append(CPPPATH = env["PYEXTCPPPATH"]) if os.name == 'nt': # NT needs the pythonlib to run any code importing Python.h, including # simple code using only typedef and so on, so we need it for configuration # checks env.AppendUnique(LIBPATH = [get_pythonlib_dir()]) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION) #======================= # Starting Configuration #======================= config = env.NumpyConfigure(custom_tests = {'CheckBrokenMathlib' : CheckBrokenMathlib, 'CheckCBLAS' : CheckCBLAS, 'CheckInline': CheckInline, 'CheckGCC4' : CheckGCC4, 'CheckLongDoubleRepresentation': CheckLongDoubleRepresentation}, config_h = pjoin('config.h')) # numpyconfig_sym will keep the values of some configuration variables, the one # needed for the public numpy API. # Convention: list of tuples (definition, value). value: # - 0: #undef definition # - 1: #define definition # - string: #define definition value numpyconfig_sym = [] #--------------- # Checking Types #--------------- if not config.CheckHeader("Python.h"): errmsg = [] for line in config.GetLastError(): errmsg.append("%s " % line) print """ Error: Python.h header cannot be compiled (or cannot be found). On linux, check that you have python-dev/python-devel packages. On windows, check that you have he platform SDK. You may also use unsupported cflags. Configuration error log says: \n\n%s""" % ''.join(errmsg) Exit(-1) st = config.CheckHeader("endian.h") if st: numpyconfig_sym.append(('DEFINE_NPY_HAVE_ENDIAN_H', '#define NPY_HAVE_ENDIAN_H 1')) else: numpyconfig_sym.append(('DEFINE_NPY_HAVE_ENDIAN_H', '')) def check_type(type, include = None): st = config.CheckTypeSize(type, includes = include) type = type.replace(' ', '_') if st: numpyconfig_sym.append(('SIZEOF_%s' % type.upper(), '%d' % st)) else: numpyconfig_sym.append(('SIZEOF_%s' % type.upper(), 0)) for type in ('short', 'int', 'long'): # SIZEOF_LONG defined on darwin if type == "long": if not config.CheckDeclaration("SIZEOF_LONG", includes="#include "): check_type(type) else: numpyconfig_sym.append(('SIZEOF_LONG', 'SIZEOF_LONG')) else: check_type(type) for type in ('float', 'double', 'long double'): sz = config.CheckTypeSize(type) numpyconfig_sym.append(('SIZEOF_%s' % type2def(type), str(sz))) # Compute size of corresponding complex type: used to check that our # definition is binary compatible with C99 complex type (check done at # build time in npy_common.h) complex_def = "struct {%s __x; %s __y;}" % (type, type) sz = config.CheckTypeSize(complex_def) numpyconfig_sym.append(('SIZEOF_COMPLEX_%s' % type2def(type), str(sz))) if sys.platform != 'darwin': tp = config.CheckLongDoubleRepresentation() config.Define("HAVE_LDOUBLE_%s" % tp, 1, "Define for arch-specific long double representation") for type in ('Py_intptr_t',): check_type(type, include = "#include \n") # We check declaration AND type because that's how distutils does it. if config.CheckDeclaration('PY_LONG_LONG', includes = '#include \n'): st = config.CheckTypeSize('PY_LONG_LONG', includes = '#include \n') assert not st == 0 numpyconfig_sym.append(('DEFINE_NPY_SIZEOF_LONGLONG', '#define NPY_SIZEOF_LONGLONG %d' % st)) numpyconfig_sym.append(('DEFINE_NPY_SIZEOF_PY_LONG_LONG', '#define NPY_SIZEOF_PY_LONG_LONG %d' % st)) else: numpyconfig_sym.append(('DEFINE_NPY_SIZEOF_LONGLONG', '')) numpyconfig_sym.append(('DEFINE_NPY_SIZEOF_PY_LONG_LONG', '')) if not config.CheckDeclaration('CHAR_BIT', includes= '#include \n'): raise RuntimeError(\ """Config wo CHAR_BIT is not supported with scons: please contact the maintainer (cdavid)""") #---------------------- # Checking signal stuff #---------------------- if is_npy_no_signal(): numpyconfig_sym.append(('DEFINE_NPY_NO_SIGNAL', '#define NPY_NO_SIGNAL\n')) config.Define('__NPY_PRIVATE_NO_SIGNAL', comment = "define to 1 to disable SMP support ") else: numpyconfig_sym.append(('DEFINE_NPY_NO_SIGNAL', '')) #--------------------- # Checking SMP option #--------------------- if define_no_smp(): nosmp = 1 else: nosmp = 0 numpyconfig_sym.append(('NPY_NO_SMP', nosmp)) #---------------------------------------------- # Check whether we can use C99 printing formats #---------------------------------------------- if config.CheckDeclaration(('PRIdPTR'), includes = '#include '): numpyconfig_sym.append(('DEFINE_NPY_USE_C99_FORMATS', '#define NPY_USE_C99_FORMATS 1')) else: numpyconfig_sym.append(('DEFINE_NPY_USE_C99_FORMATS', '')) #---------------------- # Checking the mathlib #---------------------- mlibs = [[], ['m'], ['cpml']] mathlib = os.environ.get('MATHLIB') if mathlib: mlibs.insert(0, mathlib) mlib = check_mlibs(config, mlibs) # XXX: this is ugly: mathlib has nothing to do in a public header file numpyconfig_sym.append(('MATHLIB', ','.join(mlib))) #---------------------------------- # Checking the math funcs available #---------------------------------- # Function to check: mfuncs = ('expl', 'expf', 'log1p', 'expm1', 'asinh', 'atanhf', 'atanhl', 'rint', 'trunc') # Set value to 1 for each defined function (in math lib) mfuncs_defined = dict([(f, 0) for f in mfuncs]) # Check for mandatory funcs: we barf if a single one of those is not there if not config.CheckFuncsAtOnce(MANDATORY_FUNCS): raise SystemError("One of the required function to build numpy is not" " available (the list is %s)." % str(MANDATORY_FUNCS)) # Standard functions which may not be available and for which we have a # replacement implementation # def check_funcs(funcs): # Use check_funcs_once first, and if it does not work, test func per # func. Return success only if all the functions are available st = config.CheckFuncsAtOnce(funcs) if not st: # Global check failed, check func per func for f in funcs: st = config.CheckFunc(f, language = 'C') for f in OPTIONAL_STDFUNCS_MAYBE: if config.CheckDeclaration(fname2def(f), includes="#include \n#include "): OPTIONAL_STDFUNCS.remove(f) check_funcs(OPTIONAL_STDFUNCS) # C99 functions: float and long double versions if not BYPASS_SINGLE_EXTENDED: check_funcs(C99_FUNCS_SINGLE) check_funcs(C99_FUNCS_EXTENDED) # Normally, isnan and isinf are macro (C99), but some platforms only have # func, or both func and macro version. Check for macro only, and define # replacement ones if not found. # Note: including Python.h is necessary because it modifies some math.h # definitions for f in ["isnan", "isinf", "signbit", "isfinite"]: includes = """\ #include #include """ st = config.CheckDeclaration(f, includes=includes) if st: numpyconfig_sym.append(('DEFINE_NPY_HAVE_DECL_%s' % f.upper(), '#define NPY_HAVE_DECL_%s' % f.upper())) else: numpyconfig_sym.append(('DEFINE_NPY_HAVE_DECL_%s' % f.upper(), '')) inline = config.CheckInline() config.Define('inline', inline) if ENABLE_SEPARATE_COMPILATION: config.Define("ENABLE_SEPARATE_COMPILATION", 1) numpyconfig_sym.append(('DEFINE_NPY_ENABLE_SEPARATE_COMPILATION', '#define NPY_ENABLE_SEPARATE_COMPILATION 1')) else: numpyconfig_sym.append(('DEFINE_NPY_ENABLE_SEPARATE_COMPILATION', '')) #----------------------------- # Checking for complex support #----------------------------- if config.CheckHeader('complex.h'): numpyconfig_sym.append(('DEFINE_NPY_USE_C99_COMPLEX', '#define NPY_USE_C99_COMPLEX 1')) for t in C99_COMPLEX_TYPES: st = config.CheckType(t, includes='#include ') if st: numpyconfig_sym.append(('DEFINE_NPY_HAVE_%s' % type2def(t), '#define NPY_HAVE_%s' % type2def(t))) else: numpyconfig_sym.append(('DEFINE_NPY_HAVE_%s' % type2def(t), '')) def check_prec(prec): flist = [f + prec for f in C99_COMPLEX_FUNCS] st = config.CheckFuncsAtOnce(flist) if not st: # Global check failed, check func per func for f in flist: config.CheckFunc(f, language='C') check_prec('') check_prec('f') check_prec('l') else: numpyconfig_sym.append(('DEFINE_NPY_USE_C99_COMPLEX', '')) for t in C99_COMPLEX_TYPES: numpyconfig_sym.append(('DEFINE_NPY_HAVE_%s' % type2def(t), '')) def visibility_define(): if config.CheckGCC4(): return '__attribute__((visibility("hidden")))' else: return '' numpyconfig_sym.append(('VISIBILITY_HIDDEN', visibility_define())) # Add the C API/ABI versions numpyconfig_sym.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) numpyconfig_sym.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Check whether we need our own wide character support if not config.CheckDeclaration('Py_UNICODE_WIDE', includes='#include '): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False #------------------------------------------------------- # Define the function PyOS_ascii_strod if not available #------------------------------------------------------- if not config.CheckDeclaration('PyOS_ascii_strtod', includes = "#include "): if config.CheckFunc('strtod'): config.Define('PyOS_ascii_strtod', 'strtod', "Define to a function to use as a replacement for "\ "PyOS_ascii_strtod if not available in python header") #------------------------------------ # DISTUTILS Hack on AMD64 on windows #------------------------------------ # XXX: this is ugly if sys.platform=='win32' or os.name=='nt': from distutils.msvccompiler import get_build_architecture a = get_build_architecture() print 'BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' % \ (a, os.name, sys.platform) if a == 'AMD64': distutils_use_sdk = 1 config.Define('DISTUTILS_USE_SDK', distutils_use_sdk, "define to 1 to disable SMP support ") if a == "Intel": config.Define('FORCE_NO_LONG_DOUBLE_FORMATTING', 1, "define to 1 to force long double format string to the" \ " same as double (Lg -> g)") #-------------- # Checking Blas #-------------- if config.CheckCBLAS(): build_blasdot = 1 else: build_blasdot = 0 config.config_h_text += """ #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """ config.Finish() write_info(env) #========== # Build #========== # List of headers which need to be "installed " into the build directory for # proper in-place build support generated_headers = [] #--------------------------------------- # Generate the public configuration file #--------------------------------------- config_dict = {} # XXX: this is ugly, make the API for config.h and numpyconfig.h similar for key, value in numpyconfig_sym: config_dict['@%s@' % key] = str(value) env['SUBST_DICT'] = config_dict include_dir = 'include/numpy' target = env.SubstInFile(pjoin(include_dir, '_numpyconfig.h'), pjoin(include_dir, '_numpyconfig.h.in')) generated_headers.append(target[0]) env['CONFIG_H_GEN'] = numpyconfig_sym #--------------------------- # Builder for generated code #--------------------------- env.Append(BUILDERS = {'GenerateMultiarrayApi' : array_api_gen_bld, 'GenerateUfuncApi' : ufunc_api_gen_bld, 'GenerateFromTemplate' : template_bld, 'GenerateUmath' : umath_bld}) #------------------------ # Generate generated code #------------------------ scalartypes_src = env.GenerateFromTemplate( pjoin('src', 'multiarray', 'scalartypes.c.src')) umath_funcs_src = env.GenerateFromTemplate(pjoin('src', 'umath', 'funcs.inc.src')) umath_loops_src = env.GenerateFromTemplate(pjoin('src', 'umath', 'loops.c.src')) arraytypes_src = env.GenerateFromTemplate( pjoin('src', 'multiarray', 'arraytypes.c.src')) sortmodule_src = env.GenerateFromTemplate(pjoin('src', '_sortmodule.c.src')) umathmodule_src = env.GenerateFromTemplate(pjoin('src', 'umath', 'umathmodule.c.src')) umath_tests_src = env.GenerateFromTemplate(pjoin('src', 'umath', 'umath_tests.c.src')) multiarray_tests_src = env.GenerateFromTemplate(pjoin('src', 'multiarray', 'multiarray_tests.c.src')) scalarmathmodule_src = env.GenerateFromTemplate( pjoin('src', 'scalarmathmodule.c.src')) umath = env.GenerateUmath('__umath_generated', pjoin('code_generators', 'generate_umath.py')) multiarray_api = env.GenerateMultiarrayApi('include/numpy/multiarray_api', [SCons.Node.Python.Value(d) for d in multiarray_api_dict]) generated_headers.append(multiarray_api[0]) ufunc_api = env.GenerateUfuncApi('include/numpy/ufunc_api', [SCons.Node.Python.Value(d) for d in ufunc_api_dict]) generated_headers.append(ufunc_api[0]) # include/numpy is added for compatibility reasons with distutils: this is # needed for __multiarray_api.c and __ufunc_api.c included from multiarray and # ufunc. env.Prepend(CPPPATH = ['src/private', 'include', '.', 'include/numpy']) # npymath core lib npymath_src = [env.GenerateFromTemplate(pjoin('src', 'npymath', 'npy_math.c.src')), env.GenerateFromTemplate(pjoin('src', 'npymath', 'npy_math_complex.c.src')), env.GenerateFromTemplate(pjoin('src', 'npymath', 'ieee754.c.src'))] env.DistutilsInstalledStaticExtLibrary("npymath", npymath_src, install_dir='lib') env.Prepend(LIBS=["npymath"]) env.Prepend(LIBPATH=["."]) subst_dict = {'@prefix@': '$distutils_install_prefix', '@pkgname@': 'numpy.core', '@sep@': os.path.sep} npymath_ini = env.SubstInFile(pjoin('lib', 'npy-pkg-config', 'npymath.ini'), 'npymath.ini.in', SUBST_DICT=subst_dict) subst_dict = {'@posix_mathlib@': " ".join(['-l%s' % l for l in mlib]), '@msvc_mathlib@': " ".join(['%s.mlib' % l for l in mlib])} mlib_ini = env.SubstInFile(pjoin('lib', 'npy-pkg-config', 'mlib.ini'), 'mlib.ini.in', SUBST_DICT=subst_dict) env.Install('$distutils_installdir/lib/npy-pkg-config', mlib_ini) env.Install('$distutils_installdir/lib/npy-pkg-config', npymath_ini) #----------------- # Build multiarray #----------------- if ENABLE_SEPARATE_COMPILATION: multiarray_src = [pjoin('src', 'multiarray', 'multiarraymodule.c'), pjoin('src', 'multiarray', 'hashdescr.c'), pjoin('src', 'multiarray', 'arrayobject.c'), pjoin('src', 'multiarray', 'datetime.c'), pjoin('src', 'multiarray', 'numpyos.c'), pjoin('src', 'multiarray', 'flagsobject.c'), pjoin('src', 'multiarray', 'descriptor.c'), pjoin('src', 'multiarray', 'iterators.c'), pjoin('src', 'multiarray', 'mapping.c'), pjoin('src', 'multiarray', 'number.c'), pjoin('src', 'multiarray', 'getset.c'), pjoin('src', 'multiarray', 'sequence.c'), pjoin('src', 'multiarray', 'methods.c'), pjoin('src', 'multiarray', 'ctors.c'), pjoin('src', 'multiarray', 'convert_datatype.c'), pjoin('src', 'multiarray', 'convert.c'), pjoin('src', 'multiarray', 'shape.c'), pjoin('src', 'multiarray', 'item_selection.c'), pjoin('src', 'multiarray', 'calculation.c'), pjoin('src', 'multiarray', 'common.c'), pjoin('src', 'multiarray', 'refcount.c'), pjoin('src', 'multiarray', 'conversion_utils.c'), pjoin('src', 'multiarray', 'usertypes.c'), pjoin('src', 'multiarray', 'buffer.c'), pjoin('src', 'multiarray', 'numpymemoryview.c'), pjoin('src', 'multiarray', 'scalarapi.c')] multiarray_src.extend(arraytypes_src) multiarray_src.extend(scalartypes_src) if PYTHON_HAS_UNICODE_WIDE: multiarray_src.extend([pjoin("src", "multiarray", "ucsnarrow.c")]) else: multiarray_src = [pjoin('src', 'multiarray', 'multiarraymodule_onefile.c')] multiarray = env.DistutilsPythonExtension('multiarray', source = multiarray_src) env.DistutilsPythonExtension('multiarray_tests', source=multiarray_tests_src) #------------------ # Build sort module #------------------ sort = env.DistutilsPythonExtension('_sort', source = sortmodule_src) #------------------- # Build umath module #------------------- if ENABLE_SEPARATE_COMPILATION: umathmodule_src.extend([pjoin('src', 'umath', 'ufunc_object.c')]) umathmodule_src.extend(umath_loops_src) else: umathmodule_src = [pjoin('src', 'umath', 'umathmodule_onefile.c')] umathmodule = env.DistutilsPythonExtension('umath', source = umathmodule_src) #------------------------ # Build scalarmath module #------------------------ scalarmathmodule = env.DistutilsPythonExtension('scalarmath', source = scalarmathmodule_src) #------------------------ # Build scalarmath module #------------------------ umath_tests = env.DistutilsPythonExtension('umath_tests', source=umath_tests_src) #---------------------- # Build _dotblas module #---------------------- if build_blasdot: dotblas_src = [pjoin('blasdot', i) for i in ['_dotblas.c']] # because _dotblas does #include CBLAS_HEADER instead of #include # "cblas.h", scons does not detect the dependency # XXX: PythonExtension builder does not take the Depends on extension into # account for some reason, so we first build the object, with forced # dependency, and then builds the extension. This is more likely a bug in # our PythonExtension builder, but I cannot see how to solve it. dotblas_o = env.PythonObject('_dotblas', source = dotblas_src) env.Depends(dotblas_o, pjoin("blasdot", "cblas.h")) dotblas = env.DistutilsPythonExtension('_dotblas', dotblas_o) # "Install" the header in the build directory, so that in-place build works for h in generated_headers: env.Install(pjoin('$distutils_installdir', include_dir), h)