import os import sys from bento.commands.hooks \ import \ pre_configure, pre_build, post_build import waflib import waflib.Errors from waflib.Task \ import \ Task waflib.Logs.verbose = 1 # Importing this adds new checkers to waf configure context - I don't like this # way of working, should find a more explicit way to attach new functions to # context. import numpy.build_utils from numpy.distutils.conv_template \ import \ process_str as process_c_str from code_generators.numpy_api \ import \ multiarray_api, ufunc_api from code_generators import generate_numpy_api, generate_ufunc_api, \ generate_umath from setup_common \ import \ OPTIONAL_STDFUNCS_MAYBE, OPTIONAL_STDFUNCS, C99_FUNCS_EXTENDED, \ C99_FUNCS_SINGLE, C99_COMPLEX_TYPES, C99_COMPLEX_FUNCS, \ MANDATORY_FUNCS, C_ABI_VERSION, C_API_VERSION NUMPYCONFIG_SYM = [] # FIXME NUMPYCONFIG_SYM.append(('DEFINE_NPY_ENABLE_SEPARATE_COMPILATION', '')) NUMPYCONFIG_SYM.append(('VISIBILITY_HIDDEN', '')) NUMPYCONFIG_SYM.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) NUMPYCONFIG_SYM.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) def is_npy_no_signal(): """Return True if the NPY_NO_SIGNAL symbol must be defined in configuration header.""" return sys.platform == 'win32' def define_no_smp(): """Returns True if we should define NPY_NOSMP, False otherwise.""" #-------------------------------- # Checking SMP and thread options #-------------------------------- # Python 2.3 causes a segfault when # trying to re-acquire the thread-state # which is done in error-handling # ufunc code. NPY_ALLOW_C_API and friends # cause the segfault. So, we disable threading # for now. if sys.version[:5] < '2.4.2': nosmp = 1 else: # Perhaps a fancier check is in order here. # so that threads are only enabled if there # are actually multiple CPUS? -- but # threaded code can be nice even on a single # CPU so that long-calculating code doesn't # block. try: nosmp = os.environ['NPY_NOSMP'] nosmp = 1 except KeyError: nosmp = 0 return nosmp == 1 def write_numpy_config(conf): subst_dict = {} for key, value in NUMPYCONFIG_SYM: subst_dict["@%s@" % key] = str(value) node = conf.path node = node.find_node("include/numpy/_numpyconfig.h.in") cnt = node.read() for k, v in subst_dict.items(): cnt = cnt.replace(k, v) assert node is not None onode = conf.bldnode.find_or_declare(node.path_from(conf.srcnode)).change_ext("") onode.write(cnt) def type_checks(conf): header_name = "Python.h" features = "c pyext" for c_type in ("int", "long", "short"): macro_name = "SIZEOF_%s" % numpy.build_utils.sanitize_string(c_type) conf.check_declaration(macro_name, header_name=header_name, features=features) NUMPYCONFIG_SYM.append((macro_name, macro_name)) for c_type, e_size in (("float", 4), ("double", 8), ("long double", [12, 16])): macro_name = "SIZEOF_%s" % numpy.build_utils.sanitize_string(c_type) size = conf.check_type_size(c_type, header_name=header_name, features=features, expected_sizes=e_size) NUMPYCONFIG_SYM.append((macro_name, str(size))) macro_name = "SIZEOF_COMPLEX_%s" % numpy.build_utils.sanitize_string(c_type) complex_def = "struct {%s __x; %s __y;}" % (c_type, c_type) size = conf.check_type_size(complex_def, header_name=header_name, features=features, expected_sizes=2*size) NUMPYCONFIG_SYM.append((macro_name, str(size))) if sys.platform != 'darwin': conf.check_ldouble_representation() size = conf.check_type_size("Py_intptr_t", header_name=header_name, expected_sizes=[4, 8], features=features) NUMPYCONFIG_SYM.append(('SIZEOF_%s' % numpy.build_utils.sanitize_string("Py_intptr_t"), '%d' % size)) # We check declaration AND type because that's how distutils does it. try: conf.check_declaration("PY_LONG_LONG", header_name=header_name, features=features) size = conf.check_type_size("PY_LONG_LONG", header_name=header_name, features=features, expected_sizes=[4, 8]) NUMPYCONFIG_SYM.append(("DEFINE_NPY_SIZEOF_LONGLONG", "#define NPY_SIZEOF_LONGLONG %d" % size)) NUMPYCONFIG_SYM.append(("DEFINE_NPY_SIZEOF_PY_LONG_LONG", "#define NPY_SIZEOF_PY_LONG_LONG %d" % size)) except waflib.Errors.ConfigurationError: NUMPYCONFIG_SYM.append(("DEFINE_NPY_SIZEOF_LONGLONG", "")) NUMPYCONFIG_SYM.append(("DEFINE_NPY_SIZEOF_PY_LONG_LONG", "")) conf.check_declaration("CHAR_BIT", header_name=header_name, features=features) def signal_smp_checks(conf): if is_npy_no_signal(): NUMPYCONFIG_SYM.append(("DEFINE_NPY_NO_SIGNAL", "#define NPY_NO_SIGNAL\n")) conf.define("__NPY_PRIVATE_NO_SIGNAL", 1) else: NUMPYCONFIG_SYM.append(("DEFINE_NPY_NO_SIGNAL", "")) if define_no_smp(): NUMPYCONFIG_SYM.append(("NPY_NO_SMP", 1)) else: NUMPYCONFIG_SYM.append(("NPY_NO_SMP", 0)) def check_math_runtime(conf): header_name = "Python.h math.h" features = "c cprogram pyext" mlibs = [None, "m", "cpml"] mathlib = os.environ.get('MATHLIB') if mathlib: mlibs.insert(0, mathlib) mlib = None for lib in mlibs: try: if lib is None: kw = {} else: kw = {"lib": lib} st = conf.check_functions_at_once(["exp"], uselib_store="M", **kw) mlib = lib or [] break except waflib.Errors.ConfigurationError: pass if mlib is None: raise waflib.Errors.ConfigurationError("No math lib found !") # XXX: this is ugly: mathlib has nothing to do in a public header file NUMPYCONFIG_SYM.append(('MATHLIB', ','.join(mlib))) conf.check_functions_at_once(MANDATORY_FUNCS, use="M") mfuncs = ('expl', 'expf', 'log1p', 'expm1', 'asinh', 'atanhf', 'atanhl', 'rint', 'trunc') conf.check_functions_at_once(mfuncs, use="M") header_name = "Python.h math.h" features = "c pyext" for f in OPTIONAL_STDFUNCS_MAYBE: try: conf.check_declaration("HAVE_%s" % numpy.build_utils.sanitize_string(f), header_name=header_name, features=features) OPTIONAL_STDFUNCS.remove(f) except waflib.Errors.ConfigurationError: pass conf.check_functions_at_once(OPTIONAL_STDFUNCS, features=features, mandatory=False, use="M") conf.check_functions_at_once(C99_FUNCS_SINGLE, features=features, mandatory=False, use="M") conf.check_functions_at_once(C99_FUNCS_EXTENDED, features=features, mandatory=False, use="M") for f in ["isnan", "isinf", "signbit", "isfinite"]: try: conf.check_declaration("HAVE_DECL_%s" % f.upper(), header_name=header_name, features=features) NUMPYCONFIG_SYM.append(('DEFINE_NPY_HAVE_DECL_%s' % f.upper(), '#define NPY_HAVE_DECL_%s' % f.upper())) except waflib.Errors.ConfigurationError: try: conf.check_declaration(f, header_name=header_name, features=features) NUMPYCONFIG_SYM.append(('DEFINE_NPY_HAVE_DECL_%s' % f.upper(), '#define NPY_HAVE_DECL_%s' % f.upper())) except waflib.Errors.ConfigurationError: NUMPYCONFIG_SYM.append(('DEFINE_NPY_HAVE_DECL_%s' % f.upper(), '')) def check_complex(conf): if conf.check_header("complex.h"): NUMPYCONFIG_SYM.append(('DEFINE_NPY_USE_C99_COMPLEX', '#define NPY_USE_C99_COMPLEX 1')) for t in C99_COMPLEX_TYPES: try: conf.check_type(t, header_name='complex.h') NUMPYCONFIG_SYM.append(('DEFINE_NPY_HAVE_%s' % numpy.build_utils.sanitize_string(t), '#define NPY_HAVE_%s' % numpy.build_utils.sanitize_string(t))) except waflib.Errors.ConfigurationError: NUMPYCONFIG_SYM.append(('DEFINE_NPY_HAVE_%s' % numpy.build_utils.sanitize_string(t), '')) for prec in ["", "f", "l"]: flist = [f + prec for f in C99_COMPLEX_FUNCS] conf.check_functions_at_once(flist, use="M") else: NUMPYCONFIG_SYM.append(('DEFINE_NPY_USE_C99_COMPLEX', '')) for t in C99_COMPLEX_TYPES: NUMPYCONFIG_SYM.append(('DEFINE_NPY_HAVE_%s' % numpy.build_utils.sanitize_string(t), '')) @pre_configure() def configure(context): conf = context.waf_context # FIXME: bento command's context should have API so that subclasses can # customize recurse behavior conf.path = context.local_node try: conf.check_header("endian.h") NUMPYCONFIG_SYM.append(('DEFINE_NPY_HAVE_ENDIAN_H', '#define NPY_HAVE_ENDIAN_H 1')) except waflib.Errors.ConfigurationError: NUMPYCONFIG_SYM.append(('DEFINE_NPY_HAVE_ENDIAN_H', '')) try: conf.check_declaration('PRIdPTR', header_name='inttypes.h') NUMPYCONFIG_SYM.append(('DEFINE_NPY_USE_C99_FORMATS', '#define NPY_USE_C99_FORMATS 1')) except waflib.Errors.ConfigurationError: NUMPYCONFIG_SYM.append(('DEFINE_NPY_USE_C99_FORMATS', '')) type_checks(conf) signal_smp_checks(conf) check_math_runtime(conf) numpy.build_utils.check_inline(conf) check_complex(conf) conf.env["CONFIG_HEADER_TEMPLATE"] = """\ %(content)s #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif""" conf.write_config_header("config.h") write_numpy_config(conf) conf.env.INCLUDES = [".", "include", "include/numpy"] # FIXME: Should be handled in bento context conf.store() class CTemplateTask(waflib.Task.Task): color = 'BLUE' before = ['c'] def run(self): s = self.inputs[0] cnt = s.read() writestr = process_c_str(cnt) o = self.outputs[0] o.write(writestr) @waflib.TaskGen.extension(".src") def c_template(self, node): outs = [] outs.append(node.change_ext("")) tsk = self.create_task('CTemplateTask', node, outs) if "c" in self.features: self.source.append(outs[0]) class numpy_api_generator(Task): vars = ["API_TUPLE"] color = "BLUE" before = ["c"] def run(self): targets = [o.path_from(self.generator.bld.srcnode) for o in self.outputs] generate_numpy_api.do_generate_api(targets, self.env.API_TUPLE) return 0 class ufunc_api_generator(Task): vars = ["API_TUPLE"] color = "BLUE" before = ["c"] def run(self): targets = [o.path_from(self.generator.bld.srcnode) for o in self.outputs] generate_ufunc_api.do_generate_api(targets, self.env.API_TUPLE) return 0 @waflib.TaskGen.feature("numpy_api_gen") def process_multiarray_api_generator(self): tsk = self.create_task("numpy_api_generator") if hasattr(self, "api_tuple"): tsk.env.API_TUPLE = self.api_tuple else: if not "API_TUPLE" in tsk.env: tsk.env.API_TUPLE = () header = "__%s.h" % self.pattern source = "__%s.c" % self.pattern txt = self.pattern + ".txt" files = [header, source, txt] tsk.set_outputs([self.path.find_or_declare(f) for f in files]) return tsk @waflib.TaskGen.feature("ufunc_api_gen") def process_api_ufunc_generator(self): tsk = self.create_task("ufunc_api_generator") if hasattr(self, "api_tuple"): tsk.env.API_TUPLE = self.api_tuple else: if not "API_TUPLE" in tsk.env: tsk.env.API_TUPLE = () header = "__%s.h" % self.pattern source = "__%s.c" % self.pattern txt = self.pattern + ".txt" files = [header, source, txt] tsk.set_outputs([self.path.find_or_declare(f) for f in files]) return tsk class umath_generator(Task): vars = ["API_TUPLE"] color = "BLUE" before = ["c"] ext_in = ".in" def run(self): if len(self.outputs) > 1: raise ValueError("Only one target (the .c file) is expected in the umath generator task") code = generate_umath.make_code(generate_umath.defdict, generate_umath.__file__) self.outputs[0].write(code) return 0 @waflib.TaskGen.feature("umath_gen") def process_umath_generator(self): tsk = self.create_task("umath_generator") source = "__%s.c" % self.pattern tsk.set_outputs(self.path.find_or_declare(source)) return tsk from os.path import join as pjoin @pre_build() def pbuild(context): bld = context.waf_context # FIXME: bento command's context should have API so that subclasses can # customize recurse behavior # XXX: there is a risk of confusion between waf Node class and our own: # mixing them is a big no no, and may cause very hard to debug issues. # Find a solution old_path = bld.path bld.path = old_path.find_dir(context.local_node.path_from(context.top_node)) assert bld.path.__class__ == old_path.__class__ def builder(bld, library): # FIXME: hack to build static library that can be linked into a dlopen-able # library bld(features="c pyext cstlib", source=library.sources, includes=["src/private", "src/npymath", "include"], use="cshlib", target=library.name) context.register_clib_builder("npymath", builder) def builder_multiarray(bld, extension): bld(name="multiarray_api", features="numpy_api_gen", api_tuple=multiarray_api, pattern="multiarray_api") multiarray_templates = ["src/multiarray/scalartypes.c.src", "src/multiarray/arraytypes.c.src", "src/multiarray/new_iterator.c.src", "src/multiarray/lowlevel_strided_loops.c.src", "src/multiarray/einsum.c.src"] bld(target="multiarray_templates", source=multiarray_templates) includes = ["src/multiarray", "src/private"] bld(features="c cshlib pyext", includes=includes, target="multiarray", source=extension.sources, use="npymath") context.register_builder("multiarray", builder_multiarray) def builder_sort(bld, extension): includes = [".", "src/private"] bld(features="c cshlib pyext", target="_sort", source=extension.sources, includes=includes, use="npymath") context.register_builder("_sort", builder_sort) def build_ufunc(bld, extension): bld(features="ufunc_api_gen", api_tuple=ufunc_api, pattern="ufunc_api", name="ufunc_api") ufunc_templates = ["src/umath/loops.c.src", "src/umath/umathmodule.c.src", "src/umath/funcs.inc.src"] bld(target="ufunc_templates", source=ufunc_templates) bld(features="umath_gen", pattern="umath_generated", name="umath_gen") includes = ["src/umath", "src/private"] bld(features="c cshlib pyext", includes=includes, target="umath", source=extension.sources, use="npymath") context.register_builder("umath", build_ufunc) def build_scalarmath(bld, extension): bld(features="c cshlib pyext", target=extension.name, source=extension.sources, use="npymath") context.register_builder("scalarmath", build_scalarmath) def build_multiarray_tests(bld, extension): bld(features="c cshlib pyext", target=extension.name, source=extension.sources, use="npymath") context.register_builder("multiarray_tests", build_multiarray_tests) if bld.env.HAS_CBLAS: bld(features="c cshlib pyext", target="_dotblas", source="blasdot/_dotblas.c", includes="src/private", use="CBLAS") bld.path = old_path