summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--distutils/__init__.py6
-rw-r--r--distutils/__version__.py4
-rw-r--r--distutils/ccompiler.py352
-rw-r--r--distutils/command/__init__.py31
-rw-r--r--distutils/command/bdist_rpm.py17
-rw-r--r--distutils/command/build.py8
-rw-r--r--distutils/command/build_clib.py185
-rw-r--r--distutils/command/build_ext.py342
-rw-r--r--distutils/command/build_py.py13
-rw-r--r--distutils/command/build_src.py550
-rw-r--r--distutils/command/config.py63
-rw-r--r--distutils/command/config_compiler.py56
-rw-r--r--distutils/command/install.py9
-rw-r--r--distutils/command/install_data.py14
-rw-r--r--distutils/command/install_headers.py21
-rw-r--r--distutils/command/sdist.py22
-rw-r--r--distutils/conv_template.py200
-rw-r--r--distutils/core.py123
-rw-r--r--distutils/cpuinfo.py675
-rw-r--r--distutils/exec_command.py645
-rw-r--r--distutils/extension.py74
-rw-r--r--distutils/fcompiler/__init__.py755
-rw-r--r--distutils/fcompiler/absoft.py128
-rw-r--r--distutils/fcompiler/compaq.py94
-rw-r--r--distutils/fcompiler/g95.py41
-rw-r--r--distutils/fcompiler/gnu.py210
-rw-r--r--distutils/fcompiler/hpux.py41
-rw-r--r--distutils/fcompiler/ibm.py80
-rw-r--r--distutils/fcompiler/intel.py174
-rw-r--r--distutils/fcompiler/lahey.py46
-rw-r--r--distutils/fcompiler/mips.py56
-rw-r--r--distutils/fcompiler/nag.py39
-rw-r--r--distutils/fcompiler/pg.py42
-rw-r--r--distutils/fcompiler/sun.py47
-rw-r--r--distutils/fcompiler/vast.py50
-rw-r--r--distutils/from_template.py262
-rw-r--r--distutils/line_endings.py75
-rw-r--r--distutils/log.py47
-rw-r--r--distutils/misc_util.py614
-rw-r--r--distutils/setup.py13
-rw-r--r--distutils/system_info.py1494
-rw-r--r--distutils/tests/f2py_ext/__init__.py0
-rw-r--r--distutils/tests/f2py_ext/setup.py12
-rw-r--r--distutils/tests/f2py_ext/src/fib1.f18
-rw-r--r--distutils/tests/f2py_ext/src/fib2.pyf9
-rw-r--r--distutils/tests/f2py_ext/tests/test_fib2.py13
-rw-r--r--distutils/tests/f2py_f90_ext/__init__.py0
-rw-r--r--distutils/tests/f2py_f90_ext/include/body.f905
-rw-r--r--distutils/tests/f2py_f90_ext/setup.py16
-rw-r--r--distutils/tests/f2py_f90_ext/src/foo_free.f906
-rw-r--r--distutils/tests/f2py_f90_ext/tests/test_foo.py13
-rw-r--r--distutils/tests/gen_ext/__init__.py0
-rw-r--r--distutils/tests/gen_ext/setup.py47
-rw-r--r--distutils/tests/gen_ext/tests/test_fib3.py13
-rw-r--r--distutils/tests/swig_ext/__init__.py0
-rw-r--r--distutils/tests/swig_ext/setup.py14
-rw-r--r--distutils/tests/swig_ext/src/example.c14
-rw-r--r--distutils/tests/swig_ext/src/example.i11
-rw-r--r--distutils/tests/swig_ext/src/zoo.cc23
-rw-r--r--distutils/tests/swig_ext/src/zoo.h9
-rw-r--r--distutils/tests/swig_ext/src/zoo.i10
-rw-r--r--distutils/tests/swig_ext/tests/test_example.py18
-rw-r--r--distutils/tests/swig_ext/tests/test_example2.py17
-rw-r--r--distutils/tests/test_build_src.py31
-rw-r--r--distutils/unixccompiler.py65
65 files changed, 8082 insertions, 0 deletions
diff --git a/distutils/__init__.py b/distutils/__init__.py
new file mode 100644
index 000000000..f2c08ff92
--- /dev/null
+++ b/distutils/__init__.py
@@ -0,0 +1,6 @@
+
+from __version__ import version as __version__
+# Must import local ccompiler ASAP in order to get
+# customized CCompiler.spawn effective.
+import ccompiler
+import unixccompiler
diff --git a/distutils/__version__.py b/distutils/__version__.py
new file mode 100644
index 000000000..06077f79c
--- /dev/null
+++ b/distutils/__version__.py
@@ -0,0 +1,4 @@
+major = 0
+minor = 4
+micro = 0
+version = '%(major)d.%(minor)d.%(micro)d' % (locals())
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
new file mode 100644
index 000000000..3a4d80591
--- /dev/null
+++ b/distutils/ccompiler.py
@@ -0,0 +1,352 @@
+
+import re
+import os
+import sys
+import new
+
+from distutils.ccompiler import *
+from distutils import ccompiler
+from distutils.sysconfig import customize_compiler
+from distutils.version import LooseVersion
+
+import log
+from exec_command import exec_command
+from misc_util import cyg2win32
+from distutils.spawn import _nt_quote_args
+
+# Using customized CCompiler.spawn.
+def CCompiler_spawn(self, cmd, display=None):
+ if display is None:
+ display = cmd
+ if type(display) is type([]): display = ' '.join(display)
+ log.info(display)
+ if type(cmd) is type([]) and os.name == 'nt':
+ cmd = _nt_quote_args(cmd)
+ s,o = exec_command(cmd)
+ if s:
+ if type(cmd) is type([]):
+ cmd = ' '.join(cmd)
+ print o
+ raise DistutilsExecError,\
+ 'Command "%s" failed with exit status %d' % (cmd, s)
+CCompiler.spawn = new.instancemethod(CCompiler_spawn,None,CCompiler)
+
+def CCompiler_object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+ if output_dir is None:
+ output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ base, ext = os.path.splitext(os.path.normpath(src_name))
+ base = os.path.splitdrive(base)[1] # Chop off the drive
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
+ if base.startswith('..'):
+ # Resolve starting relative path components, middle ones
+ # (if any) have been handled by os.path.normpath above.
+ i = base.rfind('..')+2
+ d = base[:i]
+ d = os.path.basename(os.path.abspath(d))
+ base = d + base[i:]
+ if ext not in self.src_extensions:
+ raise UnknownFileError, \
+ "unknown file type '%s' (from '%s')" % (ext, src_name)
+ if strip_dir:
+ base = os.path.basename(base)
+ obj_name = os.path.join(output_dir,base + self.obj_extension)
+ obj_names.append(obj_name)
+ return obj_names
+
+CCompiler.object_filenames = new.instancemethod(CCompiler_object_filenames,
+ None,CCompiler)
+
+def CCompiler_compile(self, sources, output_dir=None, macros=None,
+ include_dirs=None, debug=0, extra_preargs=None,
+ extra_postargs=None, depends=None):
+ # This method is effective only with Python >=2.3 distutils.
+ # Any changes here should be applied also to fcompiler.compile
+ # method to support pre Python 2.3 distutils.
+ if not sources:
+ return []
+ from fcompiler import FCompiler
+ if isinstance(self, FCompiler):
+ display = []
+ for fc in ['f77','f90','fix']:
+ fcomp = getattr(self,'compiler_'+fc)
+ if fcomp is None:
+ continue
+ display.append("%s(%s) options: '%s'" % (os.path.basename(fcomp[0]),
+ fc,
+ ' '.join(fcomp[1:])))
+ display = '\n'.join(display)
+ else:
+ ccomp = self.compiler_so
+ display = "%s options: '%s'" % (os.path.basename(ccomp[0]),
+ ' '.join(ccomp[1:]))
+ log.info(display)
+ macros, objects, extra_postargs, pp_opts, build = \
+ self._setup_compile(output_dir, macros, include_dirs, sources,
+ depends, extra_postargs)
+ cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
+ display = "compile options: '%s'" % (' '.join(cc_args))
+ if extra_postargs:
+ display += "\nextra options: '%s'" % (' '.join(extra_postargs))
+ log.info(display)
+
+ # build any sources in same order as they were originally specified
+ # especially important for fortran .f90 files using modules
+ if isinstance(self, FCompiler):
+ objects_to_build = build.keys()
+ for obj in objects:
+ if obj in objects_to_build:
+ src, ext = build[obj]
+ if self.compiler_type=='absoft':
+ obj = cyg2win32(obj)
+ src = cyg2win32(src)
+ self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
+ else:
+ for obj, (src, ext) in build.items():
+ self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
+
+ # Return *all* object filenames, not just the ones we just built.
+ return objects
+
+CCompiler.compile = new.instancemethod(CCompiler_compile,None,CCompiler)
+
+def CCompiler_customize_cmd(self, cmd):
+ """ Customize compiler using distutils command.
+ """
+ log.info('customize %s using %s' % (self.__class__.__name__,
+ cmd.__class__.__name__))
+ if getattr(cmd,'include_dirs',None) is not None:
+ self.set_include_dirs(cmd.include_dirs)
+ if getattr(cmd,'define',None) is not None:
+ for (name,value) in cmd.define:
+ self.define_macro(name, value)
+ if getattr(cmd,'undef',None) is not None:
+ for macro in cmd.undef:
+ self.undefine_macro(macro)
+ if getattr(cmd,'libraries',None) is not None:
+ self.set_libraries(self.libraries + cmd.libraries)
+ if getattr(cmd,'library_dirs',None) is not None:
+ self.set_library_dirs(self.library_dirs + cmd.library_dirs)
+ if getattr(cmd,'rpath',None) is not None:
+ self.set_runtime_library_dirs(cmd.rpath)
+ if getattr(cmd,'link_objects',None) is not None:
+ self.set_link_objects(cmd.link_objects)
+ return
+
+CCompiler.customize_cmd = new.instancemethod(\
+ CCompiler_customize_cmd,None,CCompiler)
+
+def _compiler_to_string(compiler):
+ props = []
+ mx = 0
+ keys = compiler.executables.keys()
+ for key in ['version','libraries','library_dirs',
+ 'object_switch','compile_switch',
+ 'include_dirs','define','undef','rpath','link_objects']:
+ if key not in keys:
+ keys.append(key)
+ for key in keys:
+ if hasattr(compiler,key):
+ v = getattr(compiler, key)
+ mx = max(mx,len(key))
+ props.append((key,`v`))
+ lines = []
+ format = '%-' +`mx+1`+ 's = %s'
+ for prop in props:
+ lines.append(format % prop)
+ return '\n'.join(lines)
+
+def CCompiler_show_customization(self):
+ if 0:
+ for attrname in ['include_dirs','define','undef',
+ 'libraries','library_dirs',
+ 'rpath','link_objects']:
+ attr = getattr(self,attrname,None)
+ if not attr:
+ continue
+ log.info("compiler '%s' is set to %s" % (attrname,attr))
+ try: self.get_version()
+ except: pass
+ if log._global_log.threshold<2:
+ print '*'*80
+ print self.__class__
+ print _compiler_to_string(self)
+ print '*'*80
+
+CCompiler.show_customization = new.instancemethod(\
+ CCompiler_show_customization,None,CCompiler)
+
+
+def CCompiler_customize(self, dist, need_cxx=0):
+ # See FCompiler.customize for suggested usage.
+ log.info('customize %s' % (self.__class__.__name__))
+ customize_compiler(self)
+ if need_cxx:
+ if hasattr(self,'compiler') and self.compiler[0].find('gcc')>=0:
+ if sys.version[:3]>='2.3':
+ if not self.compiler_cxx:
+ self.compiler_cxx = [self.compiler[0].replace('gcc','g++')]\
+ + self.compiler[1:]
+ else:
+ self.compiler_cxx = [self.compiler[0].replace('gcc','g++')]\
+ + self.compiler[1:]
+ else:
+ log.warn('Missing compiler_cxx fix for '+self.__class__.__name__)
+ return
+
+CCompiler.customize = new.instancemethod(\
+ CCompiler_customize,None,CCompiler)
+
+def CCompiler_get_version(self, force=0, ok_status=[0]):
+ """ Compiler version. Returns None if compiler is not available. """
+ if not force and hasattr(self,'version'):
+ return self.version
+ if not (hasattr(self,'version_cmd') and
+ hasattr(self,'version_pattern')):
+ #log.warn('%s does not provide version_cmd and version_pattern attributes' \
+ # % (self.__class__))
+ return
+
+ cmd = ' '.join(self.version_cmd)
+ status, output = exec_command(cmd,use_tee=0)
+ version = None
+ if status in ok_status:
+ m = re.match(self.version_pattern,output)
+ if m:
+ version = m.group('version')
+ assert version,`version`
+ version = LooseVersion(version)
+ self.version = version
+ return version
+
+CCompiler.get_version = new.instancemethod(\
+ CCompiler_get_version,None,CCompiler)
+
+#if sys.platform == 'win32':
+# compiler_class['mingw32'] = ('mingw32ccompiler', 'Mingw32CCompiler',
+# "Mingw32 port of GNU C Compiler for Win32"\
+# "(for MSC built Python)")
+# if os.environ.get('OSTYPE','')=='msys' or \
+# os.environ.get('MSYSTEM','')=='MINGW32':
+# # On windows platforms, we want to default to mingw32 (gcc)
+# # because msvc can't build blitz stuff.
+# log.info('Setting mingw32 as default compiler for nt.')
+# ccompiler._default_compilers = (('nt', 'mingw32'),) \
+# + ccompiler._default_compilers
+
+
+_distutils_new_compiler = new_compiler
+def new_compiler (plat=None,
+ compiler=None,
+ verbose=0,
+ dry_run=0,
+ force=0):
+ # Try first C compilers from scipy.distutils.
+ if plat is None:
+ plat = os.name
+ try:
+ if compiler is None:
+ compiler = get_default_compiler(plat)
+ (module_name, class_name, long_description) = compiler_class[compiler]
+ except KeyError:
+ msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+ if compiler is not None:
+ msg = msg + " with '%s' compiler" % compiler
+ raise DistutilsPlatformError, msg
+ module_name = "scipy.distutils." + module_name
+ try:
+ __import__ (module_name)
+ except ImportError, msg:
+ print msg,'in scipy.distutils, trying from distutils..'
+ module_name = module_name[6:]
+ try:
+ __import__(module_name)
+ except ImportError, msg:
+ raise DistutilsModuleError, \
+ "can't compile C/C++ code: unable to load module '%s'" % \
+ module_name
+ try:
+ module = sys.modules[module_name]
+ klass = vars(module)[class_name]
+ except KeyError:
+ raise DistutilsModuleError, \
+ ("can't compile C/C++ code: unable to find class '%s' " +
+ "in module '%s'") % (class_name, module_name)
+ compiler = klass(None, dry_run, force)
+ log.debug('new_fcompiler returns %s' % (klass))
+ return compiler
+
+ccompiler.new_compiler = new_compiler
+
+
+_distutils_gen_lib_options = gen_lib_options
+def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
+ r = _distutils_gen_lib_options(compiler, library_dirs,
+ runtime_library_dirs, libraries)
+ lib_opts = []
+ for i in r:
+ if type(i) is type([]):
+ lib_opts.extend(i)
+ else:
+ lib_opts.append(i)
+ return lib_opts
+ccompiler.gen_lib_options = gen_lib_options
+
+
+##Fix distutils.util.split_quoted:
+import re,string
+_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
+_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
+_has_white_re = re.compile(r'\s')
+def split_quoted(s):
+ s = string.strip(s)
+ words = []
+ pos = 0
+
+ while s:
+ m = _wordchars_re.match(s, pos)
+ end = m.end()
+ if end == len(s):
+ words.append(s[:end])
+ break
+
+ if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
+ words.append(s[:end]) # we definitely have a word delimiter
+ s = string.lstrip(s[end:])
+ pos = 0
+
+ elif s[end] == '\\': # preserve whatever is being escaped;
+ # will become part of the current word
+ s = s[:end] + s[end+1:]
+ pos = end+1
+
+ else:
+ if s[end] == "'": # slurp singly-quoted string
+ m = _squote_re.match(s, end)
+ elif s[end] == '"': # slurp doubly-quoted string
+ m = _dquote_re.match(s, end)
+ else:
+ raise RuntimeError, \
+ "this can't happen (bad char '%c')" % s[end]
+
+ if m is None:
+ raise ValueError, \
+ "bad string (mismatched %s quotes?)" % s[end]
+
+ (beg, end) = m.span()
+ if _has_white_re.search(s[beg+1:end-1]):
+ s = s[:beg] + s[beg+1:end-1] + s[end:]
+ pos = m.end() - 2
+ else:
+ # Keeping quotes when a quoted word does not contain
+ # white-space. XXX: send a patch to distutils
+ pos = m.end()
+
+ if pos >= len(s):
+ words.append(s)
+ break
+
+ return words
+ccompiler.split_quoted = split_quoted
diff --git a/distutils/command/__init__.py b/distutils/command/__init__.py
new file mode 100644
index 000000000..ac9243e95
--- /dev/null
+++ b/distutils/command/__init__.py
@@ -0,0 +1,31 @@
+"""distutils.command
+
+Package containing implementation of all the standard Distutils
+commands."""
+
+__revision__ = "$Id: __init__.py,v 1.3 2005/05/16 11:08:49 pearu Exp $"
+
+distutils_all = [ 'build_py',
+ 'build_scripts',
+ 'clean',
+ 'install_lib',
+ 'install_scripts',
+ 'bdist',
+ 'bdist_dumb',
+ 'bdist_wininst',
+ ]
+
+__import__('distutils.command',globals(),locals(),distutils_all)
+
+__all__ = ['build',
+ 'config_compiler',
+ 'config',
+ 'build_src',
+ 'build_ext',
+ 'build_clib',
+ 'install',
+ 'install_data',
+ 'install_headers',
+ 'bdist_rpm',
+ 'sdist',
+ ] + distutils_all
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
new file mode 100644
index 000000000..04aaea33b
--- /dev/null
+++ b/distutils/command/bdist_rpm.py
@@ -0,0 +1,17 @@
+
+import os
+from distutils.command.bdist_rpm import *
+from distutils.command.bdist_rpm import bdist_rpm as old_bdist_rpm
+
+class bdist_rpm(old_bdist_rpm):
+
+ def _make_spec_file(self):
+ spec_file = old_bdist_rpm._make_spec_file(self)
+ setup_py = os.path.basename(sys.argv[0])
+ if setup_py == 'setup.py':
+ return spec_file
+ new_spec_file = []
+ for line in spec_file:
+ line = line.replace('setup.py',setup_py)
+ new_spec_file.append(line)
+ return new_spec_file
diff --git a/distutils/command/build.py b/distutils/command/build.py
new file mode 100644
index 000000000..c7872b6ff
--- /dev/null
+++ b/distutils/command/build.py
@@ -0,0 +1,8 @@
+
+from distutils.command.build import build as old_build
+
+class build(old_build):
+
+ sub_commands = [('config_fc', lambda *args: 1),
+ ('build_src', old_build.has_ext_modules),
+ ] + old_build.sub_commands
diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
new file mode 100644
index 000000000..7f7c54d8d
--- /dev/null
+++ b/distutils/command/build_clib.py
@@ -0,0 +1,185 @@
+""" Modified version of build_clib that handles fortran source files.
+"""
+
+import os
+import string
+import sys
+import re
+from glob import glob
+from types import *
+from distutils.command.build_clib import build_clib as old_build_clib
+from distutils.command.build_clib import show_compilers
+
+from scipy.distutils import log
+from distutils.dep_util import newer_group
+from scipy.distutils.misc_util import filter_sources, has_f_sources,\
+ has_cxx_sources, all_strings, get_lib_source_files
+
+class build_clib(old_build_clib):
+
+ description = "build C/C++/F libraries used by Python extensions"
+
+ user_options = old_build_clib.user_options + [
+ ('fcompiler=', None,
+ "specify the Fortran compiler type"),
+ ]
+
+ def initialize_options(self):
+ old_build_clib.initialize_options(self)
+ self.fcompiler = None
+ return
+
+ def finalize_options(self):
+ old_build_clib.finalize_options(self)
+ self.set_undefined_options('build_ext',
+ ('fcompiler', 'fcompiler'))
+ return
+
+ def have_f_sources(self):
+ for (lib_name, build_info) in self.libraries:
+ if has_f_sources(build_info.get('sources',[])):
+ return True
+ return False
+
+ def have_cxx_sources(self):
+ for (lib_name, build_info) in self.libraries:
+ if has_cxx_sources(build_info.get('sources',[])):
+ return True
+ return False
+
+ def run(self):
+ if not self.libraries:
+ return
+
+ # Make sure that library sources are complete.
+ for (lib_name, build_info) in self.libraries:
+ if not all_strings(build_info.get('sources',[])):
+ self.run_command('build_src')
+
+ from distutils.ccompiler import new_compiler
+ self.compiler = new_compiler(compiler=self.compiler,
+ dry_run=self.dry_run,
+ force=self.force)
+ self.compiler.customize(self.distribution,
+ need_cxx=self.have_cxx_sources())
+
+ libraries = self.libraries
+ self.libraries = None
+ self.compiler.customize_cmd(self)
+ self.libraries = libraries
+
+ self.compiler.show_customization()
+
+ if self.have_f_sources():
+ from scipy.distutils.fcompiler import new_fcompiler
+ self.fcompiler = new_fcompiler(compiler=self.fcompiler,
+ verbose=self.verbose,
+ dry_run=self.dry_run,
+ force=self.force)
+ self.fcompiler.customize(self.distribution)
+
+ libraries = self.libraries
+ self.libraries = None
+ self.fcompiler.customize_cmd(self)
+ self.libraries = libraries
+
+ self.fcompiler.show_customization()
+
+ self.build_libraries(self.libraries)
+ return
+
+ def get_source_files(self):
+ self.check_library_list(self.libraries)
+ filenames = []
+ for lib in self.libraries:
+ filenames.extend(get_lib_source_files(lib))
+ return filenames
+
+ def build_libraries(self, libraries):
+
+ compiler = self.compiler
+ fcompiler = self.fcompiler
+
+ for (lib_name, build_info) in libraries:
+ sources = build_info.get('sources')
+ if sources is None or type(sources) not in (ListType, TupleType):
+ raise DistutilsSetupError, \
+ ("in 'libraries' option (library '%s'), " +
+ "'sources' must be present and must be " +
+ "a list of source filenames") % lib_name
+ sources = list(sources)
+
+ lib_file = compiler.library_filename(lib_name,
+ output_dir=self.build_clib)
+
+ depends = sources + build_info.get('depends',[])
+ if not (self.force or newer_group(depends, lib_file, 'newer')):
+ log.debug("skipping '%s' library (up-to-date)", lib_name)
+ continue
+ else:
+ log.info("building '%s' library", lib_name)
+
+ macros = build_info.get('macros')
+ include_dirs = build_info.get('include_dirs')
+ extra_postargs = build_info.get('extra_compiler_args') or []
+
+ c_sources, cxx_sources, f_sources, fmodule_sources \
+ = filter_sources(sources)
+
+ if self.compiler.compiler_type=='msvc':
+ # this hack works around the msvc compiler attributes
+ # problem, msvc uses its own convention :(
+ c_sources += cxx_sources
+ cxx_sources = []
+
+ if fmodule_sources:
+ print 'XXX: Fortran 90 module support not implemented or tested'
+ f_sources.extend(fmodule_sources)
+
+ objects = []
+ if c_sources:
+ log.info("compiling C sources")
+ objects = compiler.compile(c_sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_postargs)
+
+ if cxx_sources:
+ log.info("compiling C++ sources")
+ old_compiler = self.compiler.compiler_so[0]
+ self.compiler.compiler_so[0] = self.compiler.compiler_cxx[0]
+
+ cxx_objects = compiler.compile(cxx_sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_postargs)
+ objects.extend(cxx_objects)
+
+ self.compiler.compiler_so[0] = old_compiler
+
+ if f_sources:
+ log.info("compiling Fortran sources")
+ f_objects = fcompiler.compile(f_sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ debug=self.debug,
+ extra_postargs=[])
+ objects.extend(f_objects)
+
+ self.compiler.create_static_lib(objects, lib_name,
+ output_dir=self.build_clib,
+ debug=self.debug)
+
+ clib_libraries = build_info.get('libraries',[])
+ for lname,binfo in libraries:
+ if lname in clib_libraries:
+ clib_libraries.extend(binfo[1].get('libraries',[]))
+ if clib_libraries:
+ build_info['libraries'] = clib_libraries
+
+ return
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
new file mode 100644
index 000000000..09c4149a5
--- /dev/null
+++ b/distutils/command/build_ext.py
@@ -0,0 +1,342 @@
+""" Modified version of build_ext that handles fortran source files.
+"""
+
+import os
+import string
+import sys
+from glob import glob
+from types import *
+
+from distutils.dep_util import newer_group, newer
+from distutils.command.build_ext import build_ext as old_build_ext
+
+from scipy.distutils import log
+from scipy.distutils.misc_util import filter_sources, has_f_sources, \
+ has_cxx_sources, get_ext_source_files, all_strings
+from distutils.errors import DistutilsFileError
+
+class build_ext (old_build_ext):
+
+ description = "build C/C++/F extensions (compile/link to build directory)"
+
+ user_options = old_build_ext.user_options + [
+ ('fcompiler=', None,
+ "specify the Fortran compiler type"),
+ ]
+
+ def initialize_options(self):
+ old_build_ext.initialize_options(self)
+ self.fcompiler = None
+ return
+
+ def finalize_options(self):
+ old_build_ext.finalize_options(self)
+ self.set_undefined_options('config_fc',
+ ('fcompiler', 'fcompiler'))
+ return
+
+ def run(self):
+ if not self.extensions:
+ return
+
+ # Make sure that extension sources are complete.
+ for ext in self.extensions:
+ if not all_strings(ext.sources):
+ self.run_command('build_src')
+
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.library_dirs.append(build_clib.build_clib)
+ else:
+ build_clib = None
+
+ # Not including C libraries to the list of
+ # extension libraries automatically to prevent
+ # bogus linking commands. Extensions must
+ # explicitly specify the C libraries that they use.
+
+ # Determine if Fortran compiler is needed.
+ if build_clib and build_clib.fcompiler is not None:
+ need_f_compiler = 1
+ else:
+ need_f_compiler = 0
+ for ext in self.extensions:
+ if has_f_sources(ext.sources):
+ need_f_compiler = 1
+ break
+ if getattr(ext,'language','c') in ['f77','f90']:
+ need_f_compiler = 1
+ break
+
+ # Determine if C++ compiler is needed.
+ need_cxx_compiler = 0
+ for ext in self.extensions:
+ if has_cxx_sources(ext.sources):
+ need_cxx_compiler = 1
+ break
+ if getattr(ext,'language','c')=='c++':
+ need_cxx_compiler = 1
+ break
+
+ from distutils.ccompiler import new_compiler
+ self.compiler = new_compiler(compiler=self.compiler,
+ verbose=self.verbose,
+ dry_run=self.dry_run,
+ force=self.force)
+ self.compiler.customize(self.distribution,need_cxx=need_cxx_compiler)
+ self.compiler.customize_cmd(self)
+ self.compiler.show_customization()
+
+ # Initialize Fortran/C++ compilers if needed.
+ if need_f_compiler:
+ from scipy.distutils.fcompiler import new_fcompiler
+ self.fcompiler = new_fcompiler(compiler=self.fcompiler,
+ verbose=self.verbose,
+ dry_run=self.dry_run,
+ force=self.force)
+ self.fcompiler.customize(self.distribution)
+ self.fcompiler.customize_cmd(self)
+ self.fcompiler.show_customization()
+
+ # Build extensions
+ self.build_extensions()
+ return
+
+ def swig_sources(self, sources):
+ # Do nothing. Swig sources have beed handled in build_src command.
+ return sources
+
+ def build_extension(self, ext):
+ sources = ext.sources
+ if sources is None or type(sources) not in (ListType, TupleType):
+ raise DistutilsSetupError, \
+ ("in 'ext_modules' option (extension '%s'), " +
+ "'sources' must be present and must be " +
+ "a list of source filenames") % ext.name
+ sources = list(sources)
+
+ if not sources:
+ return
+
+ fullname = self.get_ext_fullname(ext.name)
+ if self.inplace:
+ modpath = string.split(fullname, '.')
+ package = string.join(modpath[0:-1], '.')
+ base = modpath[-1]
+
+ build_py = self.get_finalized_command('build_py')
+ package_dir = build_py.get_package_dir(package)
+ ext_filename = os.path.join(package_dir,
+ self.get_ext_filename(base))
+ else:
+ ext_filename = os.path.join(self.build_lib,
+ self.get_ext_filename(fullname))
+ depends = sources + ext.depends
+
+ if not (self.force or newer_group(depends, ext_filename, 'newer')):
+ log.debug("skipping '%s' extension (up-to-date)", ext.name)
+ return
+ else:
+ log.info("building '%s' extension", ext.name)
+
+ extra_args = ext.extra_compile_args or []
+ macros = ext.define_macros[:]
+ for undef in ext.undef_macros:
+ macros.append((undef,))
+
+ clib_libraries = []
+ clib_library_dirs = []
+ if self.distribution.libraries:
+ for libname,build_info in self.distribution.libraries:
+ if libname in ext.libraries:
+ macros.extend(build_info.get('macros',[]))
+ clib_libraries.extend(build_info.get('libraries',[]))
+ clib_library_dirs.extend(build_info.get('library_dirs',[]))
+
+ c_sources, cxx_sources, f_sources, fmodule_sources = \
+ filter_sources(ext.sources)
+ if self.compiler.compiler_type=='msvc':
+ if cxx_sources:
+ # Needed to compile kiva.agg._agg extension.
+ extra_args.append('/Zm1000')
+ # this hack works around the msvc compiler attributes
+ # problem, msvc uses its own convention :(
+ c_sources += cxx_sources
+ cxx_sources = []
+
+
+ kws = {'depends':ext.depends}
+ output_dir = self.build_temp
+
+ c_objects = []
+ if c_sources:
+ log.info("compiling C sources")
+ c_objects = self.compiler.compile(c_sources,
+ output_dir=output_dir,
+ macros=macros,
+ include_dirs=ext.include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_args,
+ **kws)
+ if cxx_sources:
+ log.info("compiling C++ sources")
+
+ old_compiler = self.compiler.compiler_so[0]
+ self.compiler.compiler_so[0] = self.compiler.compiler_cxx[0]
+
+ c_objects += self.compiler.compile(cxx_sources,
+ output_dir=output_dir,
+ macros=macros,
+ include_dirs=ext.include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_args,
+ **kws)
+ self.compiler.compiler_so[0] = old_compiler
+
+ check_for_f90_modules = not not fmodule_sources
+
+ if f_sources or fmodule_sources:
+ extra_postargs = []
+ include_dirs = ext.include_dirs[:]
+ module_dirs = ext.module_dirs[:]
+
+ #if self.fcompiler.compiler_type=='ibm':
+ macros = []
+
+ if check_for_f90_modules:
+ module_build_dir = os.path.join(\
+ self.build_temp,os.path.dirname(\
+ self.get_ext_filename(fullname)))
+
+ self.mkpath(module_build_dir)
+ if self.fcompiler.module_dir_switch is None:
+ existing_modules = glob('*.mod')
+ extra_postargs += self.fcompiler.module_options(\
+ module_dirs,module_build_dir)
+
+ f_objects = []
+ if fmodule_sources:
+ log.info("compiling Fortran 90 module sources")
+ f_objects = self.fcompiler.compile(fmodule_sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_postargs,
+ depends=ext.depends)
+
+ if check_for_f90_modules \
+ and self.fcompiler.module_dir_switch is None:
+ for f in glob('*.mod'):
+ if f in existing_modules:
+ continue
+ try:
+ self.move_file(f, module_build_dir)
+ except DistutilsFileError: # already exists in destination
+ os.remove(f)
+
+ if f_sources:
+ log.info("compiling Fortran sources")
+ f_objects += self.fcompiler.compile(f_sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_postargs,
+ depends=ext.depends)
+ else:
+ f_objects = []
+
+ objects = c_objects + f_objects
+
+ if ext.extra_objects:
+ objects.extend(ext.extra_objects)
+ extra_args = ext.extra_link_args or []
+
+ try:
+ old_linker_so_0 = self.compiler.linker_so[0]
+ except:
+ pass
+
+ use_fortran_linker = getattr(ext,'language','c') in ['f77','f90']
+ c_libraries = []
+ c_library_dirs = []
+ if use_fortran_linker or f_sources:
+ use_fortran_linker = 1
+ elif self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ f_libs = []
+ for (lib_name, build_info) in build_clib.libraries:
+ if has_f_sources(build_info.get('sources',[])):
+ f_libs.append(lib_name)
+ if lib_name in ext.libraries:
+ # XXX: how to determine if c_libraries contain
+ # fortran compiled sources?
+ c_libraries.extend(build_info.get('libraries',[]))
+ c_library_dirs.extend(build_info.get('library_dirs',[]))
+ for l in ext.libraries:
+ if l in f_libs:
+ use_fortran_linker = 1
+ break
+
+ # Always use system linker when using MSVC compiler.
+ if self.compiler.compiler_type=='msvc' and use_fortran_linker:
+ c_libraries.extend(self.fcompiler.libraries)
+ c_library_dirs.extend(self.fcompiler.library_dirs)
+ use_fortran_linker = 0
+
+ if use_fortran_linker:
+ if cxx_sources:
+ # XXX: Which linker should be used, Fortran or C++?
+ log.warn('mixing Fortran and C++ is untested')
+ link = self.fcompiler.link_shared_object
+ language = ext.language or self.fcompiler.detect_language(f_sources)
+ else:
+ link = self.compiler.link_shared_object
+ if sys.version[:3]>='2.3':
+ language = ext.language or self.compiler.detect_language(sources)
+ else:
+ language = ext.language
+ if cxx_sources:
+ self.compiler.linker_so[0] = self.compiler.compiler_cxx[0]
+
+ if sys.version[:3]>='2.3':
+ kws = {'target_lang':language}
+ else:
+ kws = {}
+
+ link(objects, ext_filename,
+ libraries=self.get_libraries(ext) + c_libraries + clib_libraries,
+ library_dirs=ext.library_dirs + c_library_dirs + clib_library_dirs,
+ runtime_library_dirs=ext.runtime_library_dirs,
+ extra_postargs=extra_args,
+ export_symbols=self.get_export_symbols(ext),
+ debug=self.debug,
+ build_temp=self.build_temp,**kws)
+
+ try:
+ self.compiler.linker_so[0] = old_linker_so_0
+ except:
+ pass
+
+ return
+
+ def get_source_files (self):
+ self.check_extensions_list(self.extensions)
+ filenames = []
+ for ext in self.extensions:
+ filenames.extend(get_ext_source_files(ext))
+ return filenames
+
+ def get_outputs (self):
+ self.check_extensions_list(self.extensions)
+
+ outputs = []
+ for ext in self.extensions:
+ if not ext.sources:
+ continue
+ fullname = self.get_ext_fullname(ext.name)
+ outputs.append(os.path.join(self.build_lib,
+ self.get_ext_filename(fullname)))
+ return outputs
+
diff --git a/distutils/command/build_py.py b/distutils/command/build_py.py
new file mode 100644
index 000000000..ab5bd8531
--- /dev/null
+++ b/distutils/command/build_py.py
@@ -0,0 +1,13 @@
+
+from distutils.command.build_py import build_py as old_build_py
+
+class build_py(old_build_py):
+
+ def find_package_modules(self, package, package_dir):
+ modules = old_build_py.find_package_modules(self, package, package_dir)
+
+ # Find build_src generated *.py files.
+ build_src = self.get_finalized_command('build_src')
+ modules += build_src.py_modules.get(package,[])
+
+ return modules
diff --git a/distutils/command/build_src.py b/distutils/command/build_src.py
new file mode 100644
index 000000000..be515c47e
--- /dev/null
+++ b/distutils/command/build_src.py
@@ -0,0 +1,550 @@
+""" Build swig, f2py, weave, sources.
+"""
+
+import os
+import re
+import copy
+
+from distutils.cmd import Command
+from distutils.command import build_ext, build_py
+from distutils.util import convert_path
+from distutils.dep_util import newer_group, newer
+
+from scipy.distutils import log
+from scipy.distutils.misc_util import fortran_ext_match, all_strings, dot_join
+from scipy.distutils.from_template import process_file as process_f_file
+from scipy.distutils.conv_template import process_file as process_c_file
+from scipy.distutils.extension import Extension
+from scipy.distutils.system_info import get_info, dict_append
+
+class build_src(build_ext.build_ext):
+
+ description = "build sources from SWIG, F2PY files or a function"
+
+ user_options = [
+ ('build-src=', 'd', "directory to \"build\" sources to"),
+ ('f2pyflags=', None, "additonal flags to f2py"),
+ ('swigflags=', None, "additional flags to swig"),
+ ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+ ('inplace', 'i',
+ "ignore build-lib and put compiled extensions into the source " +
+ "directory alongside your pure Python modules"),
+ ]
+
+ boolean_options = ['force','inplace']
+
+ help_options = []
+
+ def initialize_options(self):
+ self.extensions = None
+ self.package = None
+ self.py_modules = None
+ self.build_src = None
+ self.build_lib = None
+ self.build_base = None
+ self.force = None
+ self.inplace = None
+ self.package_dir = None
+ self.f2pyflags = None
+ self.swigflags = None
+ return
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ ('build_base', 'build_base'),
+ ('build_lib', 'build_lib'),
+ ('force', 'force'))
+ if self.package is None:
+ self.package = self.distribution.ext_package
+ self.extensions = self.distribution.ext_modules
+ self.libraries = self.distribution.libraries or []
+ self.py_modules = self.distribution.py_modules
+ if self.build_src is None:
+ self.build_src = os.path.join(self.build_base, 'src')
+ if self.inplace is None:
+ build_ext = self.get_finalized_command('build_ext')
+ self.inplace = build_ext.inplace
+
+ # py_modules is used in build_py.find_package_modules
+ self.py_modules = {}
+
+ if self.f2pyflags is None:
+ self.f2pyflags = []
+ else:
+ self.f2pyflags = self.f2pyflags.split() # XXX spaces??
+
+ if self.swigflags is None:
+ self.swigflags = []
+ else:
+ self.swigflags = self.swigflags.split() # XXX spaces??
+ return
+
+ def run(self):
+ if not (self.extensions or self.libraries):
+ return
+ self.build_sources()
+
+ return
+
+ def build_sources(self):
+
+ for libname_info in self.libraries:
+ self.build_library_sources(*libname_info)
+
+ if self.extensions:
+ self.check_extensions_list(self.extensions)
+
+ for ext in self.extensions:
+ self.build_extension_sources(ext)
+
+ return
+
+ def build_library_sources(self, lib_name, build_info):
+ sources = list(build_info.get('sources',[]))
+
+ if not sources:
+ return
+
+ log.info('building library "%s" sources' % (lib_name))
+
+ sources = self.generate_sources(sources, (lib_name, build_info))
+
+ sources = self.template_sources(sources, (lib_name, build_info))
+
+ sources, h_files = self.filter_h_files(sources)
+
+ for f in h_files:
+ self.distribution.headers.append((lib_name,f))
+
+ build_info['sources'] = sources
+ return
+
+ def build_extension_sources(self, ext):
+
+ sources = list(ext.sources)
+
+ log.info('building extension "%s" sources' % (ext.name))
+
+ fullname = self.get_ext_fullname(ext.name)
+
+ modpath = fullname.split('.')
+ package = '.'.join(modpath[0:-1])
+
+
+ if self.inplace:
+ build_py = self.get_finalized_command('build_py')
+ self.ext_target_dir = build_py.get_package_dir(package)
+
+
+ sources = self.generate_sources(sources, ext)
+
+ sources = self.template_sources(sources, ext)
+
+ sources = self.swig_sources(sources, ext)
+
+ sources = self.f2py_sources(sources, ext)
+
+ sources, py_files = self.filter_py_files(sources)
+
+ if not self.py_modules.has_key(package):
+ self.py_modules[package] = []
+ modules = []
+ for f in py_files:
+ module = os.path.splitext(os.path.basename(f))[0]
+ modules.append((package, module, f))
+ self.py_modules[package] += modules
+
+ sources, h_files = self.filter_h_files(sources)
+
+ for f in h_files:
+ self.distribution.headers.append((package,f))
+
+ ext.sources = sources
+
+ return
+
+ def generate_sources(self, sources, extension):
+ new_sources = []
+ func_sources = []
+ for source in sources:
+ if type(source) is type(''):
+ new_sources.append(source)
+ else:
+ func_sources.append(source)
+ if not func_sources:
+ return new_sources
+ if self.inplace:
+ build_dir = self.ext_target_dir
+ else:
+ if type(extension) is type(()):
+ name = extension[0]
+ if not extension[1].has_key('include_dirs'):
+ extension[1]['include_dirs'] = []
+ incl_dirs = extension[1]['include_dirs']
+ else:
+ name = extension.name
+ incl_dirs = extension.include_dirs
+ if self.build_src not in incl_dirs:
+ incl_dirs.append(self.build_src)
+ build_dir = os.path.join(*([self.build_src]\
+ +name.split('.')[:-1]))
+ self.mkpath(build_dir)
+ for func in func_sources:
+ source = func(extension, build_dir)
+ if not source:
+ continue
+ if type(source) is type([]):
+ [log.info(" adding '%s' to sources." % (s)) for s in source]
+ new_sources.extend(source)
+ else:
+ log.info(" adding '%s' to sources." % (source))
+ new_sources.append(source)
+
+ return new_sources
+
+ def filter_py_files(self, sources):
+ return self.filter_files(sources,['.py'])
+
+ def filter_h_files(self, sources):
+ return self.filter_files(sources,['.h','.hpp','.inc'])
+
+ def filter_files(self, sources, exts = []):
+ new_sources = []
+ files = []
+ for source in sources:
+ (base, ext) = os.path.splitext(source)
+ if ext in exts:
+ files.append(source)
+ else:
+ new_sources.append(source)
+ return new_sources, files
+
+ def template_sources(self, sources, extension):
+ new_sources = []
+ if type(extension) is type(()):
+ depends = extension[1].get('depends')
+ include_dirs = extension[1].get('include_dirs')
+ else:
+ depends = extension.depends
+ include_dirs = extension.include_dirs
+ for source in sources:
+ (base, ext) = os.path.splitext(source)
+ if ext == '.src': # Template file
+ if self.inplace:
+ target_dir = os.path.dirname(base)
+ else:
+ target_dir = appendpath(self.build_src, os.path.dirname(base))
+ self.mkpath(target_dir)
+ target_file = os.path.join(target_dir,os.path.basename(base))
+ if (self.force or newer_group([source] + depends, target_file)):
+ if _f_pyf_ext_match(base):
+ log.info("from_template:> %s" % (target_file))
+ outstr = process_f_file(source)
+ else:
+ log.info("conv_template:> %s" % (target_file))
+ outstr = process_c_file(source)
+ fid = open(target_file,'w')
+ fid.write(outstr)
+ fid.close()
+ if _header_ext_match(target_file):
+ d = os.path.dirname(target_file)
+ if d not in include_dirs:
+ include_dirs.append(d)
+ new_sources.append(target_file)
+ else:
+ new_sources.append(source)
+ return new_sources
+
+ def f2py_sources(self, sources, extension):
+ new_sources = []
+ f2py_sources = []
+ f_sources = []
+ f2py_targets = {}
+ target_dirs = []
+ ext_name = extension.name.split('.')[-1]
+ skip_f2py = 0
+
+ for source in sources:
+ (base, ext) = os.path.splitext(source)
+ if ext == '.pyf': # F2PY interface file
+ if self.inplace:
+ target_dir = os.path.dirname(base)
+ else:
+ target_dir = appendpath(self.build_src, os.path.dirname(base))
+ if os.path.isfile(source):
+ name = get_f2py_modulename(source)
+ assert name==ext_name,'mismatch of extension names: '\
+ +source+' provides'\
+ ' '+`name`+' but expected '+`ext_name`
+ target_file = os.path.join(target_dir,name+'module.c')
+ else:
+ log.debug(' source %s does not exist: skipping f2py\'ing.' \
+ % (source))
+ name = ext_name
+ skip_f2py = 1
+ target_file = os.path.join(target_dir,name+'module.c')
+ if not os.path.isfile(target_file):
+ log.debug(' target %s does not exist:\n '\
+ 'Assuming %smodule.c was generated with '\
+ '"build_src --inplace" command.' \
+ % (target_file, name))
+ target_dir = os.path.dirname(base)
+ target_file = os.path.join(target_dir,name+'module.c')
+ assert os.path.isfile(target_file),`target_file`+' missing'
+ log.debug(' Yes! Using %s as up-to-date target.' \
+ % (target_file))
+ target_dirs.append(target_dir)
+ f2py_sources.append(source)
+ f2py_targets[source] = target_file
+ new_sources.append(target_file)
+ elif fortran_ext_match(ext):
+ f_sources.append(source)
+ else:
+ new_sources.append(source)
+
+ if not (f2py_sources or f_sources):
+ return new_sources
+
+ map(self.mkpath, target_dirs)
+
+ f2py_options = extension.f2py_options + self.f2pyflags
+
+ if self.distribution.libraries:
+ for name,build_info in self.distribution.libraries:
+ if name in extension.libraries:
+ f2py_options.extend(build_info.get('f2py_options',[]))
+
+ log.info("f2py options: %s" % (f2py_options))
+
+ if f2py_sources:
+ assert len(f2py_sources)==1,\
+ 'only one .pyf file is allowed per extension module but got'\
+ ' more:'+`f2py_sources`
+ source = f2py_sources[0]
+ target_file = f2py_targets[source]
+ target_dir = os.path.dirname(target_file) or '.'
+ depends = [source] + extension.depends
+ if (self.force or newer_group(depends, target_file,'newer')) \
+ and not skip_f2py:
+ log.info("f2py: %s" % (source))
+ import f2py2e
+ f2py2e.run_main(f2py_options + ['--build-dir',target_dir,source])
+ else:
+ log.debug(" skipping '%s' f2py interface (up-to-date)" % (source))
+ else:
+ #XXX TODO: --inplace support for sdist command
+ if type(extension) is type(()): name = extension[0]
+ else: name = extension.name
+ target_dir = os.path.join(*([self.build_src]\
+ +name.split('.')[:-1]))
+ target_file = os.path.join(target_dir,ext_name + 'module.c')
+ new_sources.append(target_file)
+ depends = f_sources + extension.depends
+ if (self.force or newer_group(depends, target_file, 'newer')) \
+ and not skip_f2py:
+ import f2py2e
+ log.info("f2py:> %s" % (target_file))
+ self.mkpath(target_dir)
+ f2py2e.run_main(f2py_options + ['--lower',
+ '--build-dir',target_dir]+\
+ ['-m',ext_name]+f_sources)
+ else:
+ log.debug(" skipping f2py fortran files for '%s' (up-to-date)"\
+ % (target_file))
+
+ assert os.path.isfile(target_file),`target_file`+' missing'
+
+ target_c = os.path.join(self.build_src,'fortranobject.c')
+ target_h = os.path.join(self.build_src,'fortranobject.h')
+ log.info(" adding '%s' to sources." % (target_c))
+ new_sources.append(target_c)
+ if self.build_src not in extension.include_dirs:
+ log.info(" adding '%s' to include_dirs." \
+ % (self.build_src))
+ extension.include_dirs.append(self.build_src)
+
+ if not skip_f2py:
+ import f2py2e
+ d = os.path.dirname(f2py2e.__file__)
+ source_c = os.path.join(d,'src','fortranobject.c')
+ source_h = os.path.join(d,'src','fortranobject.h')
+ if newer(source_c,target_c) or newer(source_h,target_h):
+ self.mkpath(os.path.dirname(target_c))
+ self.copy_file(source_c,target_c)
+ self.copy_file(source_h,target_h)
+ else:
+ assert os.path.isfile(target_c),`target_c` + ' missing'
+ assert os.path.isfile(target_h),`target_h` + ' missing'
+
+ for name_ext in ['-f2pywrappers.f','-f2pywrappers2.f90']:
+ filename = os.path.join(target_dir,ext_name + name_ext)
+ if os.path.isfile(filename):
+ log.info(" adding '%s' to sources." % (filename))
+ f_sources.append(filename)
+
+ return new_sources + f_sources
+
+ def swig_sources(self, sources, extension):
+ # Assuming SWIG 1.3.14 or later. See compatibility note in
+ # http://www.swig.org/Doc1.3/Python.html#Python_nn6
+
+ new_sources = []
+ swig_sources = []
+ swig_targets = {}
+ target_dirs = []
+ py_files = [] # swig generated .py files
+ target_ext = '.c'
+ typ = None
+ is_cpp = 0
+ skip_swig = 0
+ ext_name = extension.name.split('.')[-1]
+
+ for source in sources:
+ (base, ext) = os.path.splitext(source)
+ if ext == '.i': # SWIG interface file
+ if self.inplace:
+ target_dir = os.path.dirname(base)
+ py_target_dir = self.ext_target_dir
+ else:
+ target_dir = appendpath(self.build_src, os.path.dirname(base))
+ py_target_dir = target_dir
+ if os.path.isfile(source):
+ name = get_swig_modulename(source)
+ assert name==ext_name[1:],'mismatch of extension names: '\
+ +source+' provides'\
+ ' '+`name`+' but expected '+`ext_name[1:]`
+ if typ is None:
+ typ = get_swig_target(source)
+ is_cpp = typ=='c++'
+ if is_cpp:
+ target_ext = '.cpp'
+ else:
+ assert typ == get_swig_target(source),`typ`
+ target_file = os.path.join(target_dir,'%s_wrap%s' \
+ % (name, target_ext))
+ else:
+ log.debug(' source %s does not exist: skipping swig\'ing.' \
+ % (source))
+ name = ext_name[1:]
+ skip_swig = 1
+ target_file = _find_swig_target(target_dir, name)
+ if not os.path.isfile(target_file):
+ log.debug(' target %s does not exist:\n '\
+ 'Assuming %s_wrap.{c,cpp} was generated with '\
+ '"build_src --inplace" command.' \
+ % (target_file, name))
+ target_dir = os.path.dirname(base)
+ target_file = _find_swig_target(target_dir, name)
+ assert os.path.isfile(target_file),`target_file`+' missing'
+ log.debug(' Yes! Using %s as up-to-date target.' \
+ % (target_file))
+ target_dirs.append(target_dir)
+ new_sources.append(target_file)
+ py_files.append(os.path.join(py_target_dir, name+'.py'))
+ swig_sources.append(source)
+ swig_targets[source] = new_sources[-1]
+ else:
+ new_sources.append(source)
+
+ if not swig_sources:
+ return new_sources
+
+ if skip_swig:
+ return new_sources + py_files
+
+ map(self.mkpath, target_dirs)
+ swig = self.find_swig()
+ swig_cmd = [swig, "-python"]
+ if is_cpp:
+ swig_cmd.append('-c++')
+ for d in extension.include_dirs:
+ swig_cmd.append('-I'+d)
+ for source in swig_sources:
+ target = swig_targets[source]
+ depends = [source] + extension.depends
+ if self.force or newer_group(depends, target, 'newer'):
+ log.info("%s: %s" % (os.path.basename(swig) \
+ + (is_cpp and '++' or ''), source))
+ self.spawn(swig_cmd + self.swigflags \
+ + ["-o", target, '-outdir', py_target_dir, source])
+ else:
+ log.debug(" skipping '%s' swig interface (up-to-date)" \
+ % (source))
+
+ return new_sources + py_files
+
+def appendpath(prefix,path):
+ if os.path.isabs(path):
+ absprefix = os.path.abspath(prefix)
+ d = os.path.commonprefix([absprefix,path])
+ if os.path.join(absprefix[:len(d)],absprefix[len(d):])!=absprefix \
+ or os.path.join(path[:len(d)],path[len(d):])!=path:
+ # Handle invalid paths
+ d = os.path.dirname(d)
+ subpath = path[len(d):]
+ if os.path.isabs(subpath):
+ subpath = subpath[1:]
+ else:
+ subpath = path
+ return os.path.normpath(os.path.join(prefix, subpath))
+
+_f_pyf_ext_match = re.compile(r'.*[.](f90|f95|f77|for|ftn|f|pyf)\Z',re.I).match
+_header_ext_match = re.compile(r'.*[.](inc|h|hpp)\Z',re.I).match
+
+#### SWIG related auxiliary functions ####
+_swig_module_name_match = re.compile(r'\s*%module\s*(?P<name>[\w_]+)',
+ re.I).match
+_has_c_header = re.compile(r'-[*]-\s*c\s*-[*]-',re.I).search
+_has_cpp_header = re.compile(r'-[*]-\s*c[+][+]\s*-[*]-',re.I).search
+
+def get_swig_target(source):
+ f = open(source,'r')
+ result = 'c'
+ line = f.readline()
+ if _has_cpp_header(line):
+ result = 'c++'
+ if _has_c_header(line):
+ result = 'c'
+ f.close()
+ return result
+
+def get_swig_modulename(source):
+ f = open(source,'r')
+ f_readlines = getattr(f,'xreadlines',f.readlines)
+ for line in f_readlines():
+ m = _swig_module_name_match(line)
+ if m:
+ name = m.group('name')
+ break
+ f.close()
+ return name
+
+def _find_swig_target(target_dir,name):
+ for ext in ['.cpp','.c']:
+ target = os.path.join(target_dir,'%s_wrap%s' % (name, ext))
+ if os.path.isfile(target):
+ break
+ return target
+
+#### F2PY related auxiliary functions ####
+
+_f2py_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]+)',
+ re.I).match
+_f2py_user_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]*?'\
+ '__user__[\w_]*)',re.I).match
+
+def get_f2py_modulename(source):
+ name = None
+ f = open(source)
+ f_readlines = getattr(f,'xreadlines',f.readlines)
+ for line in f_readlines():
+ m = _f2py_module_name_match(line)
+ if m:
+ if _f2py_user_module_name_match(line): # skip *__user__* names
+ continue
+ name = m.group('name')
+ break
+ f.close()
+ return name
+
+##########################################
diff --git a/distutils/command/config.py b/distutils/command/config.py
new file mode 100644
index 000000000..aff79067e
--- /dev/null
+++ b/distutils/command/config.py
@@ -0,0 +1,63 @@
+# Added Fortran compiler support to config. Currently useful only for
+# try_compile call. try_run works but is untested for most of Fortran
+# compilers (they must define linker_exe first).
+# Pearu Peterson
+
+from distutils.command.config import config as old_config
+from distutils.command.config import LANG_EXT
+LANG_EXT['f77'] = '.f'
+LANG_EXT['f90'] = '.f90'
+
+class config(old_config):
+ old_config.user_options += [
+ ('fcompiler=', None,
+ "specify the Fortran compiler type"),
+ ]
+
+ def initialize_options(self):
+ self.fcompiler = None
+ old_config.initialize_options(self)
+ return
+
+ def finalize_options(self):
+ old_config.finalize_options(self)
+ f = self.distribution.get_command_obj('config_fc')
+ self.set_undefined_options('config_fc',
+ ('fcompiler', 'fcompiler'))
+ return
+
+ def _check_compiler (self):
+ old_config._check_compiler(self)
+ from scipy.distutils.fcompiler import FCompiler, new_fcompiler
+ if not isinstance(self.fcompiler, FCompiler):
+ self.fcompiler = new_fcompiler(compiler=self.fcompiler,
+ dry_run=self.dry_run, force=1)
+ self.fcompiler.customize(self.distribution)
+ self.fcompiler.customize_cmd(self)
+ self.fcompiler.show_customization()
+ return
+
+ def _wrap_method(self,mth,lang,args):
+ from distutils.ccompiler import CompileError
+ from distutils.errors import DistutilsExecError
+ save_compiler = self.compiler
+ if lang in ['f77','f90']:
+ self.compiler = self.fcompiler
+ try:
+ ret = mth(*((self,)+args))
+ except (DistutilsExecError,CompileError),msg:
+ self.compiler = save_compiler
+ raise CompileError
+ self.compiler = save_compiler
+ return ret
+
+ def _compile (self, body, headers, include_dirs, lang):
+ return self._wrap_method(old_config._compile,lang,
+ (body, headers, include_dirs, lang))
+
+ def _link (self, body,
+ headers, include_dirs,
+ libraries, library_dirs, lang):
+ return self._wrap_method(old_config._link,lang,
+ (body, headers, include_dirs,
+ libraries, library_dirs, lang))
diff --git a/distutils/command/config_compiler.py b/distutils/command/config_compiler.py
new file mode 100644
index 000000000..0db601fc6
--- /dev/null
+++ b/distutils/command/config_compiler.py
@@ -0,0 +1,56 @@
+
+import sys
+from distutils.core import Command
+
+#XXX: Implement confic_cc for enhancing C/C++ compiler options.
+#XXX: Linker flags
+
+class config_fc(Command):
+ """ Distutils command to hold user specified options
+ to Fortran compilers.
+
+ config_fc command is used by the FCompiler.customize() method.
+ """
+
+ user_options = [
+ ('fcompiler=',None,"specify Fortran compiler type"),
+ ('f77exec=', None, "specify F77 compiler command"),
+ ('f90exec=', None, "specify F90 compiler command"),
+ ('f77flags=',None,"specify F77 compiler flags"),
+ ('f90flags=',None,"specify F90 compiler flags"),
+ ('opt=',None,"specify optimization flags"),
+ ('arch=',None,"specify architecture specific optimization flags"),
+ ('debug','g',"compile with debugging information"),
+ ('noopt',None,"compile without optimization"),
+ ('noarch',None,"compile without arch-dependent optimization"),
+ ('help-fcompiler',None,"list available Fortran compilers"),
+ ]
+
+ boolean_options = ['debug','noopt','noarch','help-fcompiler']
+
+ def initialize_options(self):
+ self.fcompiler = None
+ self.f77exec = None
+ self.f90exec = None
+ self.f77flags = None
+ self.f90flags = None
+ self.opt = None
+ self.arch = None
+ self.debug = None
+ self.noopt = None
+ self.noarch = None
+ self.help_fcompiler = None
+ return
+
+ def finalize_options(self):
+ if self.help_fcompiler:
+ from scipy.distutils.fcompiler import show_fcompilers
+ show_fcompilers(self.distribution)
+ sys.exit()
+ return
+
+ def run(self):
+ # Do nothing.
+ return
+
+
diff --git a/distutils/command/install.py b/distutils/command/install.py
new file mode 100644
index 000000000..64d613569
--- /dev/null
+++ b/distutils/command/install.py
@@ -0,0 +1,9 @@
+
+from distutils.command.install import *
+from distutils.command.install import install as old_install
+
+class install(old_install):
+
+ def finalize_options (self):
+ old_install.finalize_options(self)
+ self.install_lib = self.install_libbase
diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
new file mode 100644
index 000000000..e170ba4d8
--- /dev/null
+++ b/distutils/command/install_data.py
@@ -0,0 +1,14 @@
+from distutils.command.install_data import *
+from distutils.command.install_data import install_data as old_install_data
+
+#data installer with improved intelligence over distutils
+#data files are copied into the project directory instead
+#of willy-nilly
+class install_data (old_install_data):
+
+ def finalize_options (self):
+ self.set_undefined_options('install',
+ ('install_lib', 'install_dir'),
+ ('root', 'root'),
+ ('force', 'force'),
+ )
diff --git a/distutils/command/install_headers.py b/distutils/command/install_headers.py
new file mode 100644
index 000000000..801c1a9d9
--- /dev/null
+++ b/distutils/command/install_headers.py
@@ -0,0 +1,21 @@
+import os
+from distutils.command.install import *
+from distutils.command.install_headers import install_headers as old_install_headers
+
+class install_headers (old_install_headers):
+
+ def run (self):
+ headers = self.distribution.headers
+ if not headers:
+ return
+
+ prefix = os.path.dirname(self.install_dir)
+ for header in headers:
+ if isinstance(header,tuple):
+ d = os.path.join(*([prefix]+header[0].split('.')))
+ header = header[1]
+ else:
+ d = self.install_dir
+ self.mkpath(d)
+ (out, _) = self.copy_file(header, d)
+ self.outfiles.append(out)
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
new file mode 100644
index 000000000..0b0c49d4b
--- /dev/null
+++ b/distutils/command/sdist.py
@@ -0,0 +1,22 @@
+
+from distutils.command.sdist import *
+from distutils.command.sdist import sdist as old_sdist
+from scipy.distutils.misc_util import get_data_files
+
+class sdist(old_sdist):
+
+ def add_defaults (self):
+ old_sdist.add_defaults(self)
+
+ if self.distribution.has_data_files():
+ for data in self.distribution.data_files:
+ self.filelist.extend(get_data_files(data))
+
+ if self.distribution.has_headers():
+ headers = []
+ for h in self.distribution.headers:
+ if isinstance(h,str): headers.append(h)
+ else: headers.append(h[1])
+ self.filelist.extend(headers)
+
+ return
diff --git a/distutils/conv_template.py b/distutils/conv_template.py
new file mode 100644
index 000000000..d3e2de357
--- /dev/null
+++ b/distutils/conv_template.py
@@ -0,0 +1,200 @@
+#!/usr/bin/python
+
+# takes templated file .xxx.src and produces .xxx file where .xxx is .i or .c or .h
+# using the following template rules
+
+# /**begin repeat on a line by itself marks the beginning of a segment of code to be repeated
+# /**end repeat**/ on a line by itself marks it's end
+
+# after the /**begin repeat and before the */
+# all the named templates are placed
+# these should all have the same number of replacements
+
+# in the main body, the names are used.
+# Each replace will use one entry from the list of named replacements
+
+# Note that all #..# forms in a block must have the same number of
+# comma-separated entries.
+
+__all__ = ['process_str', 'process_file']
+
+import string,os,sys
+if sys.version[:3]>='2.3':
+ import re
+else:
+ import pre as re
+ False = 0
+ True = 1
+
+def parse_structure(astr):
+ spanlist = []
+ # subroutines
+ ind = 0
+ while 1:
+ start = astr.find("/**begin repeat", ind)
+ if start == -1:
+ break
+ start2 = astr.find("*/",start)
+ start2 = astr.find("\n",start2)
+ fini1 = astr.find("/**end repeat**/",start2)
+ fini2 = astr.find("\n",fini1)
+ spanlist.append((start, start2+1, fini1, fini2+1))
+ ind = fini2
+ spanlist.sort()
+ return spanlist
+
+# return n copies of substr with template replacement
+_special_names = {}
+
+template_re = re.compile(r"@([\w]+)@")
+named_re = re.compile(r"#([\w]*)=([^#]*?)#")
+
+parenrep = re.compile(r"[(]([^)]*?)[)]\*(\d+)")
+def paren_repl(obj):
+ torep = obj.group(1)
+ numrep = obj.group(2)
+ return ','.join([torep]*int(numrep))
+
+plainrep = re.compile(r"([^*]+)\*(\d+)")
+
+def conv(astr):
+ # replaces all occurrences of '(a,b,c)*4' in astr
+ # with 'a,b,c,a,b,c,a,b,c,a,b,c'
+ astr = parenrep.sub(paren_repl,astr)
+ # replaces occurences of xxx*3 with xxx, xxx, xxx
+ astr = ','.join([plainrep.sub(paren_repl,x.strip()) for x in astr.split(',')])
+ return astr
+
+def unique_key(adict):
+ # this obtains a unique key given a dictionary
+ # currently it works by appending together n of the letters of the
+ # current keys and increasing n until a unique key is found
+ # -- not particularly quick
+ allkeys = adict.keys()
+ done = False
+ n = 1
+ while not done:
+ newkey = "".join([x[:n] for x in allkeys])
+ if newkey in allkeys:
+ n += 1
+ else:
+ done = True
+ return newkey
+
+def namerepl(match):
+ global _names, _thissub
+ name = match.group(1)
+ return _names[name][_thissub]
+
+def expand_sub(substr,namestr):
+ global _names, _thissub
+ # find all named replacements
+ reps = named_re.findall(namestr)
+ _names = {}
+ _names.update(_special_names)
+ numsubs = None
+ for rep in reps:
+ name = rep[0].strip()
+ thelist = conv(rep[1])
+ _names[name] = thelist
+
+ # make lists out of string entries in name dictionary
+ for name in _names.keys():
+ entry = _names[name]
+ entrylist = entry.split(',')
+ _names[name] = entrylist
+ num = len(entrylist)
+ if numsubs is None:
+ numsubs = num
+ elif (numsubs != num):
+ print namestr
+ print substr
+ raise ValueError, "Mismatch in number to replace"
+
+ # now replace all keys for each of the lists
+ mystr = ''
+ for k in range(numsubs):
+ _thissub = k
+ mystr += template_re.sub(namerepl, substr)
+ mystr += "\n\n"
+ return mystr
+
+
+_head = \
+"""/* This file was autogenerated from a template DO NOT EDIT!!!!
+ Changes should be made to the original source (.src) file
+*/
+
+"""
+
+def get_line_header(str,beg):
+ extra = []
+ ind = beg-1
+ char = str[ind]
+ while (ind > 0) and (char != '\n'):
+ extra.insert(0,char)
+ ind = ind - 1
+ char = str[ind]
+ return ''.join(extra)
+
+def process_str(allstr):
+ newstr = allstr
+ writestr = _head
+
+ struct = parse_structure(newstr)
+ # return a (sorted) list of tuples for each begin repeat section
+ # each tuple is the start and end of a region to be template repeated
+
+ oldend = 0
+ for sub in struct:
+ writestr += newstr[oldend:sub[0]]
+ expanded = expand_sub(newstr[sub[1]:sub[2]],newstr[sub[0]:sub[1]])
+ writestr += expanded
+ oldend = sub[3]
+
+
+ writestr += newstr[oldend:]
+ return writestr
+
+include_src_re = re.compile(r"(\n|\A)#include\s*['\"](?P<name>[\w\d./\\]+[.]src)['\"]",re.I)
+
+def resolve_includes(source):
+ d = os.path.dirname(source)
+ fid = open(source)
+ lines = []
+ for line in fid.readlines():
+ m = include_src_re.match(line)
+ if m:
+ fn = m.group('name')
+ if not os.path.isabs(fn):
+ fn = os.path.join(d,fn)
+ if os.path.isfile(fn):
+ print 'Including file',fn
+ lines.extend(resolve_includes(fn))
+ else:
+ lines.append(line)
+ else:
+ lines.append(line)
+ fid.close()
+ return lines
+
+def process_file(source):
+ lines = resolve_includes(source)
+ return process_str(''.join(lines))
+
+if __name__ == "__main__":
+
+ try:
+ file = sys.argv[1]
+ except IndexError:
+ fid = sys.stdin
+ outfile = sys.stdout
+ else:
+ fid = open(file,'r')
+ (base, ext) = os.path.splitext(file)
+ newname = base
+ outfile = open(newname,'w')
+
+ allstr = fid.read()
+ writestr = process_str(allstr)
+ outfile.write(writestr)
diff --git a/distutils/core.py b/distutils/core.py
new file mode 100644
index 000000000..37d536570
--- /dev/null
+++ b/distutils/core.py
@@ -0,0 +1,123 @@
+
+import types
+from distutils.core import *
+from distutils.core import setup as old_setup
+
+from scipy.distutils.extension import Extension
+from scipy.distutils.command import config
+from scipy.distutils.command import build
+from scipy.distutils.command import build_py
+from scipy.distutils.command import config_compiler
+from scipy.distutils.command import build_ext
+from scipy.distutils.command import build_clib
+from scipy.distutils.command import build_src
+from scipy.distutils.command import sdist
+from scipy.distutils.command import install_data
+from scipy.distutils.command import install_headers
+from scipy.distutils.command import install
+from scipy.distutils.command import bdist_rpm
+from scipy.distutils.misc_util import get_data_files
+
+scipy_cmdclass = {'build': build.build,
+ 'build_src': build_src.build_src,
+ 'config_fc': config_compiler.config_fc,
+ 'config': config.config,
+ 'build_ext': build_ext.build_ext,
+ 'build_py': build_py.build_py,
+ 'build_clib': build_clib.build_clib,
+ 'sdist': sdist.sdist,
+ 'install_data': install_data.install_data,
+ 'install_headers': install_headers.install_headers,
+ 'install': install.install,
+ 'bdist_rpm': bdist_rpm.bdist_rpm,
+ }
+
+def setup(**attr):
+
+ cmdclass = scipy_cmdclass.copy()
+
+ new_attr = attr.copy()
+ if new_attr.has_key('cmdclass'):
+ cmdclass.update(new_attr['cmdclass'])
+ new_attr['cmdclass'] = cmdclass
+
+ # Move extension source libraries to libraries
+ libraries = []
+ for ext in new_attr.get('ext_modules',[]):
+ new_libraries = []
+ for item in ext.libraries:
+ if type(item) is type(()):
+ lib_name,build_info = item
+ _check_append_ext_library(libraries, item)
+ new_libraries.append(lib_name)
+ else:
+ assert type(item) is type(''),`item`
+ new_libraries.append(item)
+ ext.libraries = new_libraries
+ if libraries:
+ if not new_attr.has_key('libraries'):
+ new_attr['libraries'] = []
+ for item in libraries:
+ _check_append_library(new_attr['libraries'], item)
+
+ # sources in ext_modules or libraries may contain header files
+ if (new_attr.has_key('ext_modules') or new_attr.has_key('libraries')) \
+ and not new_attr.has_key('headers'):
+ new_attr['headers'] = []
+
+ # Expand directories in data_files to files
+ if new_attr.has_key('data_files'):
+ new_data_files = []
+ for data in new_attr['data_files']:
+ if type(data) is types.StringType:
+ new_data_files.append(get_data_files(data)[0])
+ else:
+ new_data_files.append((data[0],get_data_files(data)))
+ new_attr['data_files'] = new_data_files
+
+ return old_setup(**new_attr)
+
+def _check_append_library(libraries, item):
+ import warnings
+ for libitem in libraries:
+ if type(libitem) is type(()):
+ if type(item) is type(()):
+ if item[0]==libitem[0]:
+ if item[1] is libitem[1]:
+ return
+ warnings.warn("[0] libraries list contains '%s' with"\
+ " different build_info" % (item[0]))
+ break
+ else:
+ if item==libitem[0]:
+ warnings.warn("[1] libraries list contains '%s' with"\
+ " no build_info" % (item[0]))
+ break
+ else:
+ if type(item) is type(()):
+ if item[0]==libitem:
+ warnings.warn("[2] libraries list contains '%s' with"\
+ " no build_info" % (item[0]))
+ break
+ else:
+ if item==libitem:
+ return
+ libraries.append(item)
+ return
+
+def _check_append_ext_library(libraries, (lib_name,build_info)):
+ import warnings
+ for item in libraries:
+ if type(item) is type(()):
+ if item[0]==lib_name:
+ if item[1] is build_info:
+ return
+ warnings.warn("[3] libraries list contains '%s' with"\
+ " different build_info" % (lib_name))
+ break
+ elif item==lib_name:
+ warnings.warn("[4] libraries list contains '%s' with"\
+ " no build_info" % (lib_name))
+ break
+ libraries.append((lib_name,build_info))
+ return
diff --git a/distutils/cpuinfo.py b/distutils/cpuinfo.py
new file mode 100644
index 000000000..677a99eb4
--- /dev/null
+++ b/distutils/cpuinfo.py
@@ -0,0 +1,675 @@
+#!/usr/bin/env python
+"""
+cpuinfo
+
+Copyright 2002 Pearu Peterson all rights reserved,
+Pearu Peterson <pearu@cens.ioc.ee>
+Permission to use, modify, and distribute this software is given under the
+terms of the SciPy (BSD style) license. See LICENSE.txt that came with
+this distribution for specifics.
+
+Note: This should be merged into proc at some point. Perhaps proc should
+be returning classes like this instead of using dictionaries.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+$Revision: 1.1 $
+$Date: 2005/04/09 19:29:34 $
+Pearu Peterson
+"""
+
+__version__ = "$Id: cpuinfo.py,v 1.1 2005/04/09 19:29:34 pearu Exp $"
+
+__all__ = ['cpu']
+
+import sys,string,re,types
+
+class cpuinfo_base:
+ """Holds CPU information and provides methods for requiring
+ the availability of various CPU features.
+ """
+
+ def _try_call(self,func):
+ try:
+ return func()
+ except:
+ pass
+
+ def __getattr__(self,name):
+ if name[0]!='_':
+ if hasattr(self,'_'+name):
+ attr = getattr(self,'_'+name)
+ if type(attr) is types.MethodType:
+ return lambda func=self._try_call,attr=attr : func(attr)
+ else:
+ return lambda : None
+ raise AttributeError,name
+
+ def _getNCPUs(self):
+ return 1
+
+ def _is_32bit(self):
+ return not self.is_64bit()
+
+class linux_cpuinfo(cpuinfo_base):
+
+ info = None
+
+ def __init__(self):
+ if self.info is not None:
+ return
+ info = []
+ try:
+ for line in open('/proc/cpuinfo').readlines():
+ name_value = map(string.strip,string.split(line,':',1))
+ if len(name_value)!=2:
+ continue
+ name,value = name_value
+ if not info or info[-1].has_key(name): # next processor
+ info.append({})
+ info[-1][name] = value
+ import commands
+ status,output = commands.getstatusoutput('uname -m')
+ if not status:
+ if not info: info.append({})
+ info[-1]['uname_m'] = string.strip(output)
+ except:
+ print sys.exc_value,'(ignoring)'
+ self.__class__.info = info
+
+ def _not_impl(self): pass
+
+ # Athlon
+
+ def _is_AMD(self):
+ return self.info[0]['vendor_id']=='AuthenticAMD'
+
+ def _is_AthlonK6_2(self):
+ return self._is_AMD() and self.info[0]['model'] == '2'
+
+ def _is_AthlonK6_3(self):
+ return self._is_AMD() and self.info[0]['model'] == '3'
+
+ def _is_AthlonK6(self):
+ return re.match(r'.*?AMD-K6',self.info[0]['model name']) is not None
+
+ def _is_AthlonK7(self):
+ return re.match(r'.*?AMD-K7',self.info[0]['model name']) is not None
+
+ def _is_AthlonMP(self):
+ return re.match(r'.*?Athlon\(tm\) MP\b',
+ self.info[0]['model name']) is not None
+
+ def _is_Athlon64(self):
+ return re.match(r'.*?Athlon\(tm\) 64\b',
+ self.info[0]['model name']) is not None
+
+ def _is_AthlonHX(self):
+ return re.match(r'.*?Athlon HX\b',
+ self.info[0]['model name']) is not None
+
+ def _is_Opteron(self):
+ return re.match(r'.*?Opteron\b',
+ self.info[0]['model name']) is not None
+
+ def _is_Hammer(self):
+ return re.match(r'.*?Hammer\b',
+ self.info[0]['model name']) is not None
+
+ # Alpha
+
+ def _is_Alpha(self):
+ return self.info[0]['cpu']=='Alpha'
+
+ def _is_EV4(self):
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
+
+ def _is_EV5(self):
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
+
+ def _is_EV56(self):
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
+
+ def _is_PCA56(self):
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
+
+ # Intel
+
+ #XXX
+ _is_i386 = _not_impl
+
+ def _is_Intel(self):
+ return self.info[0]['vendor_id']=='GenuineIntel'
+
+ def _is_i486(self):
+ return self.info[0]['cpu']=='i486'
+
+ def _is_i586(self):
+ return self.is_Intel() and self.info[0]['cpu family'] == '5'
+
+ def _is_i686(self):
+ return self.is_Intel() and self.info[0]['cpu family'] == '6'
+
+ def _is_Celeron(self):
+ return re.match(r'.*?Celeron',
+ self.info[0]['model name']) is not None
+
+ def _is_Pentium(self):
+ return re.match(r'.*?Pentium',
+ self.info[0]['model name']) is not None
+
+ def _is_PentiumII(self):
+ return re.match(r'.*?Pentium.*?II\b',
+ self.info[0]['model name']) is not None
+
+ def _is_PentiumPro(self):
+ return re.match(r'.*?PentiumPro\b',
+ self.info[0]['model name']) is not None
+
+ def _is_PentiumMMX(self):
+ return re.match(r'.*?Pentium.*?MMX\b',
+ self.info[0]['model name']) is not None
+
+ def _is_PentiumIII(self):
+ return re.match(r'.*?Pentium.*?III\b',
+ self.info[0]['model name']) is not None
+
+ def _is_PentiumIV(self):
+ return re.match(r'.*?Pentium.*?(IV|4)\b',
+ self.info[0]['model name']) is not None
+
+ def _is_Itanium(self):
+ return re.match(r'.*?Itanium\b',
+ self.info[0]['model name']) is not None
+
+ def _is_XEON(self):
+ return re.match(r'.*?XEON\b',
+ self.info[0]['model name']) is not None
+
+
+ # Varia
+
+ def _is_singleCPU(self):
+ return len(self.info) == 1
+
+ def _getNCPUs(self):
+ return len(self.info)
+
+ def _has_fdiv_bug(self):
+ return self.info[0]['fdiv_bug']=='yes'
+
+ def _has_f00f_bug(self):
+ return self.info[0]['f00f_bug']=='yes'
+
+ def _has_mmx(self):
+ return re.match(r'.*?\bmmx\b',self.info[0]['flags']) is not None
+
+ def _has_sse(self):
+ return re.match(r'.*?\bsse\b',self.info[0]['flags']) is not None
+
+ def _has_sse2(self):
+ return re.match(r'.*?\bsse2\b',self.info[0]['flags']) is not None
+
+ def _has_sse3(self):
+ return re.match(r'.*?\bsse3\b',self.info[0]['flags']) is not None
+
+ def _has_3dnow(self):
+ return re.match(r'.*?\b3dnow\b',self.info[0]['flags']) is not None
+
+ def _has_3dnowext(self):
+ return re.match(r'.*?\b3dnowext\b',self.info[0]['flags']) is not None
+
+ def _is_64bit(self):
+ if self.is_Alpha():
+ return 1
+ if self.info[0].get('clflush size','')=='64':
+ return 1
+ if self.info[0]['uname_m']=='x86_64':
+ return 1
+ return 0
+
+class irix_cpuinfo(cpuinfo_base):
+
+ info = None
+
+ def __init__(self):
+ if self.info is not None:
+ return
+ info = []
+ try:
+ import commands
+ status,output = commands.getstatusoutput('sysconf')
+ if status not in [0,256]:
+ return
+ for line in output.split('\n'):
+ name_value = map(string.strip,string.split(line,' ',1))
+ if len(name_value)!=2:
+ continue
+ name,value = name_value
+ if not info:
+ info.append({})
+ info[-1][name] = value
+ except:
+ print sys.exc_value,'(ignoring)'
+ self.__class__.info = info
+
+ #print info
+ def _not_impl(self): pass
+
+ def _is_singleCPU(self):
+ return self.info[0].get('NUM_PROCESSORS') == '1'
+
+ def _getNCPUs(self):
+ return int(self.info[0].get('NUM_PROCESSORS'))
+
+ def __cputype(self,n):
+ return self.info[0].get('PROCESSORS').split()[0].lower() == 'r%s' % (n)
+ def _is_r2000(self): return self.__cputype(2000)
+ def _is_r3000(self): return self.__cputype(3000)
+ def _is_r3900(self): return self.__cputype(3900)
+ def _is_r4000(self): return self.__cputype(4000)
+ def _is_r4100(self): return self.__cputype(4100)
+ def _is_r4300(self): return self.__cputype(4300)
+ def _is_r4400(self): return self.__cputype(4400)
+ def _is_r4600(self): return self.__cputype(4600)
+ def _is_r4650(self): return self.__cputype(4650)
+ def _is_r5000(self): return self.__cputype(5000)
+ def _is_r6000(self): return self.__cputype(6000)
+ def _is_r8000(self): return self.__cputype(8000)
+ def _is_r10000(self): return self.__cputype(10000)
+ def _is_r12000(self): return self.__cputype(12000)
+ def _is_rorion(self): return self.__cputype('orion')
+
+ def get_ip(self):
+ try: return self.info[0].get('MACHINE')
+ except: pass
+ def __machine(self,n):
+ return self.info[0].get('MACHINE').lower() == 'ip%s' % (n)
+ def _is_IP19(self): return self.__machine(19)
+ def _is_IP20(self): return self.__machine(20)
+ def _is_IP21(self): return self.__machine(21)
+ def _is_IP22(self): return self.__machine(22)
+ def _is_IP22_4k(self): return self.__machine(22) and self._is_r4000()
+ def _is_IP22_5k(self): return self.__machine(22) and self._is_r5000()
+ def _is_IP24(self): return self.__machine(24)
+ def _is_IP25(self): return self.__machine(25)
+ def _is_IP26(self): return self.__machine(26)
+ def _is_IP27(self): return self.__machine(27)
+ def _is_IP28(self): return self.__machine(28)
+ def _is_IP30(self): return self.__machine(30)
+ def _is_IP32(self): return self.__machine(32)
+ def _is_IP32_5k(self): return self.__machine(32) and self._is_r5000()
+ def _is_IP32_10k(self): return self.__machine(32) and self._is_r10000()
+
+class darwin_cpuinfo(cpuinfo_base):
+
+ info = None
+
+ def __init__(self):
+ if self.info is not None:
+ return
+ info = []
+ try:
+ import commands
+ status,output = commands.getstatusoutput('arch')
+ if not status:
+ if not info: info.append({})
+ info[-1]['arch'] = string.strip(output)
+ status,output = commands.getstatusoutput('machine')
+ if not status:
+ if not info: info.append({})
+ info[-1]['machine'] = string.strip(output)
+ status,output = commands.getstatusoutput('sysctl hw')
+ if not status:
+ if not info: info.append({})
+ d = {}
+ for l in string.split(output,'\n'):
+ l = map(string.strip,string.split(l, '='))
+ if len(l)==2:
+ d[l[0]]=l[1]
+ info[-1]['sysctl_hw'] = d
+ except:
+ print sys.exc_value,'(ignoring)'
+ self.__class__.info = info
+
+ def _not_impl(self): pass
+
+ def _getNCPUs(self):
+ try: return int(self.info[0]['sysctl_hw']['hw.ncpu'])
+ except: return 1
+
+ def _is_Power_Macintosh(self):
+ return self.info[0]['sysctl_hw']['hw.machine']=='Power Macintosh'
+
+ def _is_i386(self):
+ return self.info[0]['arch']=='i386'
+ def _is_ppc(self):
+ return self.info[0]['arch']=='ppc'
+
+ def __machine(self,n):
+ return self.info[0]['machine'] == 'ppc%s'%n
+ def _is_ppc601(self): return self.__machine(601)
+ def _is_ppc602(self): return self.__machine(602)
+ def _is_ppc603(self): return self.__machine(603)
+ def _is_ppc603e(self): return self.__machine('603e')
+ def _is_ppc604(self): return self.__machine(604)
+ def _is_ppc604e(self): return self.__machine('604e')
+ def _is_ppc620(self): return self.__machine(620)
+ def _is_ppc630(self): return self.__machine(630)
+ def _is_ppc740(self): return self.__machine(740)
+ def _is_ppc7400(self): return self.__machine(7400)
+ def _is_ppc7450(self): return self.__machine(7450)
+ def _is_ppc750(self): return self.__machine(750)
+ def _is_ppc403(self): return self.__machine(403)
+ def _is_ppc505(self): return self.__machine(505)
+ def _is_ppc801(self): return self.__machine(801)
+ def _is_ppc821(self): return self.__machine(821)
+ def _is_ppc823(self): return self.__machine(823)
+ def _is_ppc860(self): return self.__machine(860)
+
+class sunos_cpuinfo(cpuinfo_base):
+
+ info = None
+
+ def __init__(self):
+ if self.info is not None:
+ return
+ info = []
+ try:
+ import commands
+ status,output = commands.getstatusoutput('arch')
+ if not status:
+ if not info: info.append({})
+ info[-1]['arch'] = string.strip(output)
+ status,output = commands.getstatusoutput('mach')
+ if not status:
+ if not info: info.append({})
+ info[-1]['mach'] = string.strip(output)
+ status,output = commands.getstatusoutput('uname -i')
+ if not status:
+ if not info: info.append({})
+ info[-1]['uname_i'] = string.strip(output)
+ status,output = commands.getstatusoutput('uname -X')
+ if not status:
+ if not info: info.append({})
+ d = {}
+ for l in string.split(output,'\n'):
+ l = map(string.strip,string.split(l, '='))
+ if len(l)==2:
+ d[l[0]]=l[1]
+ info[-1]['uname_X'] = d
+ status,output = commands.getstatusoutput('isainfo -b')
+ if not status:
+ if not info: info.append({})
+ info[-1]['isainfo_b'] = string.strip(output)
+ status,output = commands.getstatusoutput('isainfo -n')
+ if not status:
+ if not info: info.append({})
+ info[-1]['isainfo_n'] = string.strip(output)
+ status,output = commands.getstatusoutput('psrinfo -v 0')
+ if not status:
+ if not info: info.append({})
+ for l in string.split(output,'\n'):
+ m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at',l)
+ if m:
+ info[-1]['processor'] = m.group('p')
+ break
+ except:
+ print sys.exc_value,'(ignoring)'
+ self.__class__.info = info
+
+ def _not_impl(self): pass
+
+ def _is_32bit(self):
+ return self.info[0]['isainfo_b']=='32'
+ def _is_64bit(self):
+ return self.info[0]['isainfo_b']=='64'
+
+ def _is_i386(self):
+ return self.info[0]['isainfo_n']=='i386'
+ def _is_sparc(self):
+ return self.info[0]['isainfo_n']=='sparc'
+ def _is_sparcv9(self):
+ return self.info[0]['isainfo_n']=='sparcv9'
+
+ def _getNCPUs(self):
+ try: return int(self.info[0]['uname_X']['NumCPU'])
+ except: return 1
+
+ def _is_sun4(self):
+ return self.info[0]['arch']=='sun4'
+
+ def _is_SUNW(self):
+ return re.match(r'SUNW',self.info[0]['uname_i']) is not None
+ def _is_sparcstation5(self):
+ return re.match(r'.*SPARCstation-5',self.info[0]['uname_i']) is not None
+ def _is_ultra1(self):
+ return re.match(r'.*Ultra-1',self.info[0]['uname_i']) is not None
+ def _is_ultra250(self):
+ return re.match(r'.*Ultra-250',self.info[0]['uname_i']) is not None
+ def _is_ultra2(self):
+ return re.match(r'.*Ultra-2',self.info[0]['uname_i']) is not None
+ def _is_ultra30(self):
+ return re.match(r'.*Ultra-30',self.info[0]['uname_i']) is not None
+ def _is_ultra4(self):
+ return re.match(r'.*Ultra-4',self.info[0]['uname_i']) is not None
+ def _is_ultra5_10(self):
+ return re.match(r'.*Ultra-5_10',self.info[0]['uname_i']) is not None
+ def _is_ultra5(self):
+ return re.match(r'.*Ultra-5',self.info[0]['uname_i']) is not None
+ def _is_ultra60(self):
+ return re.match(r'.*Ultra-60',self.info[0]['uname_i']) is not None
+ def _is_ultra80(self):
+ return re.match(r'.*Ultra-80',self.info[0]['uname_i']) is not None
+ def _is_ultraenterprice(self):
+ return re.match(r'.*Ultra-Enterprise',self.info[0]['uname_i']) is not None
+ def _is_ultraenterprice10k(self):
+ return re.match(r'.*Ultra-Enterprise-10000',self.info[0]['uname_i']) is not None
+ def _is_sunfire(self):
+ return re.match(r'.*Sun-Fire',self.info[0]['uname_i']) is not None
+ def _is_ultra(self):
+ return re.match(r'.*Ultra',self.info[0]['uname_i']) is not None
+
+ def _is_cpusparcv7(self):
+ return self.info[0]['processor']=='sparcv7'
+ def _is_cpusparcv8(self):
+ return self.info[0]['processor']=='sparcv8'
+ def _is_cpusparcv9(self):
+ return self.info[0]['processor']=='sparcv9'
+
+class win32_cpuinfo(cpuinfo_base):
+
+ info = None
+ pkey = "HARDWARE\\DESCRIPTION\\System\\CentralProcessor"
+ # XXX: what does the value of
+ # HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0
+ # mean?
+
+ def __init__(self):
+ if self.info is not None:
+ return
+ info = []
+ try:
+ #XXX: Bad style to use so long `try:...except:...`. Fix it!
+ import _winreg
+ pkey = "HARDWARE\\DESCRIPTION\\System\\CentralProcessor"
+ prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"\
+ "\s+stepping\s+(?P<STP>\d+)",re.IGNORECASE)
+ chnd=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,pkey)
+ pnum=0
+ while 1:
+ try:
+ proc=_winreg.EnumKey(chnd,pnum)
+ except _winreg.error:
+ break
+ else:
+ pnum+=1
+ print proc
+ info.append({"Processor":proc})
+ phnd=_winreg.OpenKey(chnd,proc)
+ pidx=0
+ while True:
+ try:
+ name,value,vtpe=_winreg.EnumValue(phnd,pidx)
+ except _winreg.error:
+ break
+ else:
+ pidx=pidx+1
+ info[-1][name]=value
+ if name=="Identifier":
+ srch=prgx.search(value)
+ if srch:
+ info[-1]["Family"]=int(srch.group("FML"))
+ info[-1]["Model"]=int(srch.group("MDL"))
+ info[-1]["Stepping"]=int(srch.group("STP"))
+ except:
+ print sys.exc_value,'(ignoring)'
+ self.__class__.info = info
+
+ def _not_impl(self): pass
+
+ # Athlon
+
+ def _is_AMD(self):
+ return self.info[0]['VendorIdentifier']=='AuthenticAMD'
+
+ def _is_Am486(self):
+ return self.is_AMD() and self.info[0]['Family']==4
+
+ def _is_Am5x86(self):
+ return self.is_AMD() and self.info[0]['Family']==4
+
+ def _is_AMDK5(self):
+ return self.is_AMD() and self.info[0]['Family']==5 \
+ and self.info[0]['Model'] in [0,1,2,3]
+
+ def _is_AMDK6(self):
+ return self.is_AMD() and self.info[0]['Family']==5 \
+ and self.info[0]['Model'] in [6,7]
+
+ def _is_AMDK6_2(self):
+ return self.is_AMD() and self.info[0]['Family']==5 \
+ and self.info[0]['Model']==8
+
+ def _is_AMDK6_3(self):
+ return self.is_AMD() and self.info[0]['Family']==5 \
+ and self.info[0]['Model']==9
+
+ def _is_Athlon(self):
+ return self.is_AMD() and self.info[0]['Family']==6
+
+ def _is_Athlon64(self):
+ return self.is_AMD() and self.info[0]['Family']==15 \
+ and self.info[0]['Model']==4
+
+ def _is_Opteron(self):
+ return self.is_AMD() and self.info[0]['Family']==15 \
+ and self.info[0]['Model']==5
+
+ # Intel
+
+ def _is_Intel(self):
+ return self.info[0]['VendorIdentifier']=='GenuineIntel'
+
+ def _is_i386(self):
+ return self.info[0]['Family']==3
+
+ def _is_i486(self):
+ return self.info[0]['Family']==4
+
+ def _is_i586(self):
+ return self.is_Intel() and self.info[0]['Family']==5
+
+ def _is_i686(self):
+ return self.is_Intel() and self.info[0]['Family']==6
+
+ def _is_Pentium(self):
+ return self.is_Intel() and self.info[0]['Family']==5
+
+ def _is_PentiumMMX(self):
+ return self.is_Intel() and self.info[0]['Family']==5 \
+ and self.info[0]['Model']==4
+
+ def _is_PentiumPro(self):
+ return self.is_Intel() and self.info[0]['Family']==6 \
+ and self.info[0]['Model']==1
+
+ def _is_PentiumII(self):
+ return self.is_Intel() and self.info[0]['Family']==6 \
+ and self.info[0]['Model'] in [3,5,6]
+
+ def _is_PentiumIII(self):
+ return self.is_Intel() and self.info[0]['Family']==6 \
+ and self.info[0]['Model'] in [7,8,9,10,11]
+
+ def _is_PentiumIV(self):
+ return self.is_Intel() and self.info[0]['Family']==15
+
+ # Varia
+
+ def _is_singleCPU(self):
+ return len(self.info) == 1
+
+ def _getNCPUs(self):
+ return len(self.info)
+
+ def _has_mmx(self):
+ if self.is_Intel():
+ return (self.info[0]['Family']==5 and self.info[0]['Model']==4) \
+ or (self.info[0]['Family'] in [6,15])
+ elif self.is_AMD():
+ return self.info[0]['Family'] in [5,6,15]
+
+ def _has_sse(self):
+ if self.is_Intel():
+ return (self.info[0]['Family']==6 and \
+ self.info[0]['Model'] in [7,8,9,10,11]) \
+ or self.info[0]['Family']==15
+ elif self.is_AMD():
+ return (self.info[0]['Family']==6 and \
+ self.info[0]['Model'] in [6,7,8,10]) \
+ or self.info[0]['Family']==15
+
+ def _has_sse2(self):
+ return self.info[0]['Family']==15
+
+ def _has_3dnow(self):
+ # XXX: does only AMD have 3dnow??
+ return self.is_AMD() and self.info[0]['Family'] in [5,6,15]
+
+ def _has_3dnowext(self):
+ return self.is_AMD() and self.info[0]['Family'] in [6,15]
+
+if sys.platform[:5] == 'linux': # variations: linux2,linux-i386 (any others?)
+ cpuinfo = linux_cpuinfo
+elif sys.platform[:4] == 'irix':
+ cpuinfo = irix_cpuinfo
+elif sys.platform == 'darwin':
+ cpuinfo = darwin_cpuinfo
+elif sys.platform[:5] == 'sunos':
+ cpuinfo = sunos_cpuinfo
+elif sys.platform[:5] == 'win32':
+ cpuinfo = win32_cpuinfo
+elif sys.platform[:6] == 'cygwin':
+ cpuinfo = linux_cpuinfo
+#XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices.
+else:
+ cpuinfo = cpuinfo_base
+
+cpu = cpuinfo()
+
+if __name__ == "__main__":
+
+ cpu.is_blaa()
+ cpu.is_Intel()
+ cpu.is_Alpha()
+
+ print 'CPU information:',
+ for name in dir(cpuinfo):
+ if name[0]=='_' and name[1]!='_':
+ r = getattr(cpu,name[1:])()
+ if r:
+ if r!=1:
+ print '%s=%s' %(name[1:],r),
+ else:
+ print name[1:],
+ print
diff --git a/distutils/exec_command.py b/distutils/exec_command.py
new file mode 100644
index 000000000..6c5554bba
--- /dev/null
+++ b/distutils/exec_command.py
@@ -0,0 +1,645 @@
+#!/usr/bin/env python
+"""
+exec_command
+
+Implements exec_command function that is (almost) equivalent to
+commands.getstatusoutput function but on NT, DOS systems the
+returned status is actually correct (though, the returned status
+values may be different by a factor). In addition, exec_command
+takes keyword arguments for (re-)defining environment variables.
+
+Provides functions:
+ exec_command --- execute command in a specified directory and
+ in the modified environment.
+ splitcmdline --- inverse of ' '.join(argv)
+ find_executable --- locate a command using info from environment
+ variable PATH. Equivalent to posix `which`
+ command.
+
+Author: Pearu Peterson <pearu@cens.ioc.ee>
+Created: 11 January 2003
+
+Requires: Python 2.x
+
+Succesfully tested on:
+ os.name | sys.platform | comments
+ --------+--------------+----------
+ posix | linux2 | Debian (sid) Linux, Python 2.1.3+, 2.2.3+, 2.3.3
+ PyCrust 0.9.3, Idle 1.0.2
+ posix | linux2 | Red Hat 9 Linux, Python 2.1.3, 2.2.2, 2.3.2
+ posix | sunos5 | SunOS 5.9, Python 2.2, 2.3.2
+ posix | darwin | Darwin 7.2.0, Python 2.3
+ nt | win32 | Windows Me
+ Python 2.3(EE), Idle 1.0, PyCrust 0.7.2
+ Python 2.1.1 Idle 0.8
+ nt | win32 | Windows 98, Python 2.1.1. Idle 0.8
+ nt | win32 | Cygwin 98-4.10, Python 2.1.1(MSC) - echo tests
+ fail i.e. redefining environment variables may
+ not work. FIXED: don't use cygwin echo!
+ Comment: also `cmd /c echo` will not work
+ but redefining environment variables do work.
+ posix | cygwin | Cygwin 98-4.10, Python 2.3.3(cygming special)
+ nt | win32 | Windows XP, Python 2.3.3
+
+Known bugs:
+- Tests, that send messages to stderr, fail when executed from MSYS prompt
+ because the messages are lost at some point.
+"""
+
+__all__ = ['exec_command','find_executable']
+
+import os
+import re
+import sys
+import tempfile
+
+############################################################
+
+from log import _global_log as log
+
+############################################################
+
+def get_pythonexe():
+ pythonexe = sys.executable
+ if os.name in ['nt','dos']:
+ fdir,fn = os.path.split(pythonexe)
+ fn = fn.upper().replace('PYTHONW','PYTHON')
+ pythonexe = os.path.join(fdir,fn)
+ assert os.path.isfile(pythonexe),`pythonexe`+' is not a file'
+ return pythonexe
+
+############################################################
+
+def splitcmdline(line):
+ """ Inverse of ' '.join(sys.argv).
+ """
+ log.debug('splitcmdline(%r)' % (line))
+ lst = []
+ flag = 0
+ s,pc,cc = '','',''
+ for nc in line+' ':
+ if flag==0:
+ flag = (pc != '\\' and \
+ ((cc=='"' and 1) or (cc=="'" and 2) or \
+ (cc==' ' and pc!=' ' and -2))) or flag
+ elif flag==1:
+ flag = (cc=='"' and pc!='\\' and nc==' ' and -1) or flag
+ elif flag==2:
+ flag = (cc=="'" and pc!='\\' and nc==' ' and -1) or flag
+ if flag!=-2:
+ s += cc
+ if flag<0:
+ flag = 0
+ s = s.strip()
+ if s:
+ lst.append(s)
+ s = ''
+ pc,cc = cc,nc
+ else:
+ s = s.strip()
+ if s:
+ lst.append(s)
+ log.debug('splitcmdline -> %r' % (lst))
+ return lst
+
+def test_splitcmdline():
+ l = splitcmdline('a b cc')
+ assert l==['a','b','cc'],`l`
+ l = splitcmdline('a')
+ assert l==['a'],`l`
+ l = splitcmdline('a " b cc"')
+ assert l==['a','" b cc"'],`l`
+ l = splitcmdline('"a bcc" -h')
+ assert l==['"a bcc"','-h'],`l`
+ l = splitcmdline(r'"\"a \" bcc" -h')
+ assert l==[r'"\"a \" bcc"','-h'],`l`
+ l = splitcmdline(" 'a bcc' -h")
+ assert l==["'a bcc'",'-h'],`l`
+ l = splitcmdline(r"'\'a \' bcc' -h")
+ assert l==[r"'\'a \' bcc'",'-h'],`l`
+
+############################################################
+
+def find_executable(exe, path=None):
+ """ Return full path of a executable.
+ """
+ log.debug('find_executable(%r)' % exe)
+ orig_exe = exe
+ if path is None:
+ path = os.environ.get('PATH',os.defpath)
+ if os.name=='posix' and sys.version[:3]>'2.1':
+ realpath = os.path.realpath
+ else:
+ realpath = lambda a:a
+ if exe[0]=='"':
+ exe = exe[1:-1]
+ suffices = ['']
+ if os.name in ['nt','dos','os2']:
+ fn,ext = os.path.splitext(exe)
+ extra_suffices = ['.exe','.com','.bat']
+ if ext.lower() not in extra_suffices:
+ suffices = extra_suffices
+ if os.path.isabs(exe):
+ paths = ['']
+ else:
+ paths = map(os.path.abspath, path.split(os.pathsep))
+ if 0 and os.name == 'nt':
+ new_paths = []
+ cygwin_paths = []
+ for path in paths:
+ d,p = os.path.splitdrive(path)
+ if p.lower().find('cygwin') >= 0:
+ cygwin_paths.append(path)
+ else:
+ new_paths.append(path)
+ paths = new_paths + cygwin_paths
+ for path in paths:
+ fn = os.path.join(path,exe)
+ for s in suffices:
+ f_ext = fn+s
+ if not os.path.islink(f_ext):
+ # see comment below.
+ f_ext = realpath(f_ext)
+ if os.path.isfile(f_ext) and os.access(f_ext,os.X_OK):
+ log.debug('Found executable %s' % f_ext)
+ return f_ext
+ if os.path.islink(exe):
+ # Don't follow symbolic links. E.g. when using colorgcc then
+ # gcc -> /usr/bin/colorgcc
+ # g77 -> /usr/bin/colorgcc
+ pass
+ else:
+ exe = realpath(exe)
+ if not os.path.isfile(exe) or os.access(exe,os.X_OK):
+ log.warn('Could not locate executable %s' % orig_exe)
+ return orig_exe
+ return exe
+
+############################################################
+
+def _preserve_environment( names ):
+ log.debug('_preserve_environment(%r)' % (names))
+ env = {}
+ for name in names:
+ env[name] = os.environ.get(name)
+ return env
+
+def _update_environment( **env ):
+ log.debug('_update_environment(...)')
+ for name,value in env.items():
+ os.environ[name] = value or ''
+
+def exec_command( command,
+ execute_in='', use_shell=None, use_tee = None,
+ _with_python = 1,
+ **env ):
+ """ Return (status,output) of executed command.
+
+ command is a concatenated string of executable and arguments.
+ The output contains both stdout and stderr messages.
+ The following special keyword arguments can be used:
+ use_shell - execute `sh -c command`
+ use_tee - pipe the output of command through tee
+ execute_in - before command `cd execute_in` and after `cd -`.
+
+ On NT, DOS systems the returned status is correct for external commands.
+ Wild cards will not work for non-posix systems or when use_shell=0.
+ """
+ log.debug('exec_command(%r,%s)' % (command,\
+ ','.join(['%s=%r'%kv for kv in env.items()])))
+
+ if use_tee is None:
+ use_tee = os.name=='posix'
+ if use_shell is None:
+ use_shell = os.name=='posix'
+ execute_in = os.path.abspath(execute_in)
+ oldcwd = os.path.abspath(os.getcwd())
+
+ if __name__[-12:] == 'exec_command':
+ exec_dir = os.path.dirname(os.path.abspath(__file__))
+ elif os.path.isfile('exec_command.py'):
+ exec_dir = os.path.abspath('.')
+ else:
+ exec_dir = os.path.abspath(sys.argv[0])
+ if os.path.isfile(exec_dir):
+ exec_dir = os.path.dirname(exec_dir)
+
+ if oldcwd!=execute_in:
+ os.chdir(execute_in)
+ log.debug('New cwd: %s' % execute_in)
+ else:
+ log.debug('Retaining cwd: %s' % oldcwd)
+
+ oldenv = _preserve_environment( env.keys() )
+ _update_environment( **env )
+
+ try:
+ # _exec_command is robust but slow, it relies on
+ # usable sys.std*.fileno() descriptors. If they
+ # are bad (like in win32 Idle, PyCrust environments)
+ # then _exec_command_python (even slower)
+ # will be used as a last resort.
+ #
+ # _exec_command_posix uses os.system and is faster
+ # but not on all platforms os.system will return
+ # a correct status.
+ if _with_python and (0 or sys.__stdout__.fileno()==-1):
+ st = _exec_command_python(command,
+ exec_command_dir = exec_dir,
+ **env)
+ elif os.name=='posix':
+ st = _exec_command_posix(command,
+ use_shell=use_shell,
+ use_tee=use_tee,
+ **env)
+ else:
+ st = _exec_command(command, use_shell=use_shell,
+ use_tee=use_tee,**env)
+ finally:
+ if oldcwd!=execute_in:
+ os.chdir(oldcwd)
+ log.debug('Restored cwd to %s' % oldcwd)
+ _update_environment(**oldenv)
+
+ return st
+
+def _exec_command_posix( command,
+ use_shell = None,
+ use_tee = None,
+ **env ):
+ log.debug('_exec_command_posix(...)')
+
+ if type(command) is type([]):
+ command_str = ' '.join(command)
+ else:
+ command_str = command
+
+ tmpfile = tempfile.mktemp()
+ stsfile = None
+ if use_tee:
+ stsfile = tempfile.mktemp()
+ filter = ''
+ if use_tee == 2:
+ filter = r'| tr -cd "\n" | tr "\n" "."; echo'
+ command_posix = '( %s ; echo $? > %s ) 2>&1 | tee %s %s'\
+ % (command_str,stsfile,tmpfile,filter)
+ else:
+ stsfile = tempfile.mktemp()
+ command_posix = '( %s ; echo $? > %s ) > %s 2>&1'\
+ % (command_str,stsfile,tmpfile)
+ #command_posix = '( %s ) > %s 2>&1' % (command_str,tmpfile)
+
+ log.debug('Running os.system(%r)' % (command_posix))
+ status = os.system(command_posix)
+
+ if use_tee:
+ if status:
+ # if command_tee fails then fall back to robust exec_command
+ log.warn('_exec_command_posix failed (status=%s)' % status)
+ return _exec_command(command, use_shell=use_shell, **env)
+
+ if stsfile is not None:
+ f = open(stsfile,'r')
+ status_text = f.read()
+ status = int(status_text)
+ f.close()
+ os.remove(stsfile)
+
+ f = open(tmpfile,'r')
+ text = f.read()
+ f.close()
+ os.remove(tmpfile)
+
+ if text[-1:]=='\n':
+ text = text[:-1]
+
+ return status, text
+
+
+def _exec_command_python(command,
+ exec_command_dir='', **env):
+ log.debug('_exec_command_python(...)')
+
+ python_exe = get_pythonexe()
+ cmdfile = tempfile.mktemp()
+ stsfile = tempfile.mktemp()
+ outfile = tempfile.mktemp()
+
+ f = open(cmdfile,'w')
+ f.write('import os\n')
+ f.write('import sys\n')
+ f.write('sys.path.insert(0,%r)\n' % (exec_command_dir))
+ f.write('from exec_command import exec_command\n')
+ f.write('del sys.path[0]\n')
+ f.write('cmd = %r\n' % command)
+ f.write('os.environ = %r\n' % (os.environ))
+ f.write('s,o = exec_command(cmd, _with_python=0, **%r)\n' % (env))
+ f.write('f=open(%r,"w")\nf.write(str(s))\nf.close()\n' % (stsfile))
+ f.write('f=open(%r,"w")\nf.write(o)\nf.close()\n' % (outfile))
+ f.close()
+
+ cmd = '%s %s' % (python_exe, cmdfile)
+ status = os.system(cmd)
+ assert not status,`cmd`+' failed'
+ os.remove(cmdfile)
+
+ f = open(stsfile,'r')
+ status = int(f.read())
+ f.close()
+ os.remove(stsfile)
+
+ f = open(outfile,'r')
+ text = f.read()
+ f.close()
+ os.remove(outfile)
+
+ return status, text
+
+def quote_arg(arg):
+ if arg[0]!='"' and ' ' in arg:
+ return '"%s"' % arg
+ return arg
+
+def _exec_command( command, use_shell=None, use_tee = None, **env ):
+ log.debug('_exec_command(...)')
+
+ if use_shell is None:
+ use_shell = os.name=='posix'
+ if use_tee is None:
+ use_tee = os.name=='posix'
+
+ using_command = 0
+ if use_shell:
+ # We use shell (unless use_shell==0) so that wildcards can be
+ # used.
+ sh = os.environ.get('SHELL','/bin/sh')
+ if type(command) is type([]):
+ argv = [sh,'-c',' '.join(command)]
+ else:
+ argv = [sh,'-c',command]
+ else:
+ # On NT, DOS we avoid using command.com as it's exit status is
+ # not related to the exit status of a command.
+ if type(command) is type([]):
+ argv = command[:]
+ else:
+ argv = splitcmdline(command)
+
+ if hasattr(os,'spawnvpe'):
+ spawn_command = os.spawnvpe
+ else:
+ spawn_command = os.spawnve
+ argv[0] = find_executable(argv[0])
+ if not os.path.isfile(argv[0]):
+ log.warn('Executable %s does not exist' % (argv[0]))
+ if os.name in ['nt','dos']:
+ # argv[0] might be internal command
+ argv = [os.environ['COMSPEC'],'/C'] + argv
+ using_command = 1
+
+ # sys.__std*__ is used instead of sys.std* because environments
+ # like IDLE, PyCrust, etc overwrite sys.std* commands.
+ so_fileno = sys.__stdout__.fileno()
+ se_fileno = sys.__stderr__.fileno()
+ so_flush = sys.__stdout__.flush
+ se_flush = sys.__stderr__.flush
+ so_dup = os.dup(so_fileno)
+ se_dup = os.dup(se_fileno)
+
+ outfile = tempfile.mktemp()
+ fout = open(outfile,'w')
+ if using_command:
+ errfile = tempfile.mktemp()
+ ferr = open(errfile,'w')
+
+ log.debug('Running %s(%s,%r,%r,os.environ)' \
+ % (spawn_command.__name__,os.P_WAIT,argv[0],argv))
+
+ argv0 = argv[0]
+ if not using_command:
+ argv[0] = quote_arg(argv0)
+
+ so_flush()
+ se_flush()
+ os.dup2(fout.fileno(),so_fileno)
+ if using_command:
+ #XXX: disabled for now as it does not work from cmd under win32.
+ # Tests fail on msys
+ os.dup2(ferr.fileno(),se_fileno)
+ else:
+ os.dup2(fout.fileno(),se_fileno)
+ try:
+ status = spawn_command(os.P_WAIT,argv0,argv,os.environ)
+ except OSError,errmess:
+ status = 999
+ sys.stderr.write('%s: %s'%(errmess,argv[0]))
+
+ so_flush()
+ se_flush()
+ os.dup2(so_dup,so_fileno)
+ os.dup2(se_dup,se_fileno)
+
+ fout.close()
+ fout = open(outfile,'r')
+ text = fout.read()
+ fout.close()
+ os.remove(outfile)
+
+ if using_command:
+ ferr.close()
+ ferr = open(errfile,'r')
+ errmess = ferr.read()
+ ferr.close()
+ os.remove(errfile)
+ if errmess and not status:
+ # Not sure how to handle the case where errmess
+ # contains only warning messages and that should
+ # not be treated as errors.
+ #status = 998
+ if text:
+ text = text + '\n'
+ #text = '%sCOMMAND %r FAILED: %s' %(text,command,errmess)
+ text = text + errmess
+ print errmess
+ if text[-1:]=='\n':
+ text = text[:-1]
+ if status is None:
+ status = 0
+
+ if use_tee:
+ print text
+
+ return status, text
+
+
+def test_nt(**kws):
+ pythonexe = get_pythonexe()
+ echo = find_executable('echo')
+ using_cygwin_echo = echo != 'echo'
+ if using_cygwin_echo:
+ log.warn('Using cygwin echo in win32 environment is not supported')
+
+ s,o=exec_command(pythonexe\
+ +' -c "import os;print os.environ.get(\'AAA\',\'\')"')
+ assert s==0 and o=='',(s,o)
+
+ s,o=exec_command(pythonexe\
+ +' -c "import os;print os.environ.get(\'AAA\')"',
+ AAA='Tere')
+ assert s==0 and o=='Tere',(s,o)
+
+ os.environ['BBB'] = 'Hi'
+ s,o=exec_command(pythonexe\
+ +' -c "import os;print os.environ.get(\'BBB\',\'\')"')
+ assert s==0 and o=='Hi',(s,o)
+
+ s,o=exec_command(pythonexe\
+ +' -c "import os;print os.environ.get(\'BBB\',\'\')"',
+ BBB='Hey')
+ assert s==0 and o=='Hey',(s,o)
+
+ s,o=exec_command(pythonexe\
+ +' -c "import os;print os.environ.get(\'BBB\',\'\')"')
+ assert s==0 and o=='Hi',(s,o)
+ elif 0:
+ s,o=exec_command('echo Hello')
+ assert s==0 and o=='Hello',(s,o)
+
+ s,o=exec_command('echo a%AAA%')
+ assert s==0 and o=='a',(s,o)
+
+ s,o=exec_command('echo a%AAA%',AAA='Tere')
+ assert s==0 and o=='aTere',(s,o)
+
+ os.environ['BBB'] = 'Hi'
+ s,o=exec_command('echo a%BBB%')
+ assert s==0 and o=='aHi',(s,o)
+
+ s,o=exec_command('echo a%BBB%',BBB='Hey')
+ assert s==0 and o=='aHey', (s,o)
+ s,o=exec_command('echo a%BBB%')
+ assert s==0 and o=='aHi',(s,o)
+
+ s,o=exec_command('this_is_not_a_command')
+ assert s and o!='',(s,o)
+
+ s,o=exec_command('type not_existing_file')
+ assert s and o!='',(s,o)
+
+ s,o=exec_command('echo path=%path%')
+ assert s==0 and o!='',(s,o)
+
+ s,o=exec_command('%s -c "import sys;sys.stderr.write(sys.platform)"' \
+ % pythonexe)
+ assert s==0 and o=='win32',(s,o)
+
+ s,o=exec_command('%s -c "raise \'Ignore me.\'"' % pythonexe)
+ assert s==1 and o,(s,o)
+
+ s,o=exec_command('%s -c "import sys;sys.stderr.write(\'0\');sys.stderr.write(\'1\');sys.stderr.write(\'2\')"'\
+ % pythonexe)
+ assert s==0 and o=='012',(s,o)
+
+ s,o=exec_command('%s -c "import sys;sys.exit(15)"' % pythonexe)
+ assert s==15 and o=='',(s,o)
+
+ s,o=exec_command('%s -c "print \'Heipa\'"' % pythonexe)
+ assert s==0 and o=='Heipa',(s,o)
+
+ print 'ok'
+
+def test_posix(**kws):
+ s,o=exec_command("echo Hello",**kws)
+ assert s==0 and o=='Hello',(s,o)
+
+ s,o=exec_command('echo $AAA',**kws)
+ assert s==0 and o=='',(s,o)
+
+ s,o=exec_command('echo "$AAA"',AAA='Tere',**kws)
+ assert s==0 and o=='Tere',(s,o)
+
+
+ s,o=exec_command('echo "$AAA"',**kws)
+ assert s==0 and o=='',(s,o)
+
+ os.environ['BBB'] = 'Hi'
+ s,o=exec_command('echo "$BBB"',**kws)
+ assert s==0 and o=='Hi',(s,o)
+
+ s,o=exec_command('echo "$BBB"',BBB='Hey',**kws)
+ assert s==0 and o=='Hey',(s,o)
+
+ s,o=exec_command('echo "$BBB"',**kws)
+ assert s==0 and o=='Hi',(s,o)
+
+
+ s,o=exec_command('this_is_not_a_command',**kws)
+ assert s!=0 and o!='',(s,o)
+
+ s,o=exec_command('echo path=$PATH',**kws)
+ assert s==0 and o!='',(s,o)
+
+ s,o=exec_command('python -c "import sys,os;sys.stderr.write(os.name)"',**kws)
+ assert s==0 and o=='posix',(s,o)
+
+ s,o=exec_command('python -c "raise \'Ignore me.\'"',**kws)
+ assert s==1 and o,(s,o)
+
+ s,o=exec_command('python -c "import sys;sys.stderr.write(\'0\');sys.stderr.write(\'1\');sys.stderr.write(\'2\')"',**kws)
+ assert s==0 and o=='012',(s,o)
+
+ s,o=exec_command('python -c "import sys;sys.exit(15)"',**kws)
+ assert s==15 and o=='',(s,o)
+
+ s,o=exec_command('python -c "print \'Heipa\'"',**kws)
+ assert s==0 and o=='Heipa',(s,o)
+
+ print 'ok'
+
+def test_execute_in(**kws):
+ pythonexe = get_pythonexe()
+ tmpfile = tempfile.mktemp()
+ fn = os.path.basename(tmpfile)
+ tmpdir = os.path.dirname(tmpfile)
+ f = open(tmpfile,'w')
+ f.write('Hello')
+ f.close()
+
+ s,o = exec_command('%s -c "print \'Ignore the following IOError:\','\
+ 'open(%r,\'r\')"' % (pythonexe,fn),**kws)
+ assert s and o!='',(s,o)
+ s,o = exec_command('%s -c "print open(%r,\'r\').read()"' % (pythonexe,fn),
+ execute_in = tmpdir,**kws)
+ assert s==0 and o=='Hello',(s,o)
+ os.remove(tmpfile)
+ print 'ok'
+
+def test_svn(**kws):
+ s,o = exec_command(['svn','status'],**kws)
+ assert s,(s,o)
+ print 'svn ok'
+
+def test_cl(**kws):
+ if os.name=='nt':
+ s,o = exec_command(['cl','/V'],**kws)
+ assert s,(s,o)
+ print 'cl ok'
+
+if os.name=='posix':
+ test = test_posix
+elif os.name in ['nt','dos']:
+ test = test_nt
+else:
+ raise NotImplementedError,'exec_command tests for '+os.name
+
+############################################################
+
+if __name__ == "__main__":
+
+ test_splitcmdline()
+ test(use_tee=0)
+ test(use_tee=1)
+ test_execute_in(use_tee=0)
+ test_execute_in(use_tee=1)
+ test_svn(use_tee=1)
+ test_cl(use_tee=1)
diff --git a/distutils/extension.py b/distutils/extension.py
new file mode 100644
index 000000000..a1cab52f4
--- /dev/null
+++ b/distutils/extension.py
@@ -0,0 +1,74 @@
+"""distutils.extension
+
+Provides the Extension class, used to describe C/C++ extension
+modules in setup scripts.
+
+Overridden to support f2py.
+"""
+
+__revision__ = "$Id: extension.py,v 1.1 2005/04/09 19:29:34 pearu Exp $"
+
+from distutils.extension import Extension as old_Extension
+
+import re
+cxx_ext_re = re.compile(r'.*[.](cpp|cxx|cc)\Z',re.I).match
+fortran_pyf_ext_re = re.compile(r'.*[.](f90|f95|f77|for|ftn|f|pyf)\Z',re.I).match
+
+class Extension(old_Extension):
+ def __init__ (self, name, sources,
+ include_dirs=None,
+ define_macros=None,
+ undef_macros=None,
+ library_dirs=None,
+ libraries=None,
+ runtime_library_dirs=None,
+ extra_objects=None,
+ extra_compile_args=None,
+ extra_link_args=None,
+ export_symbols=None,
+ swig_opts=None,
+ depends=None,
+ language=None,
+ f2py_options=None,
+ module_dirs=None,
+ ):
+ old_Extension.__init__(self,name, [],
+ include_dirs,
+ define_macros,
+ undef_macros,
+ library_dirs,
+ libraries,
+ runtime_library_dirs,
+ extra_objects,
+ extra_compile_args,
+ extra_link_args,
+ export_symbols)
+ # Avoid assert statements checking that sources contains strings:
+ self.sources = sources
+
+ # Python 2.4 distutils new features
+ self.swig_opts = swig_opts or []
+
+ # Python 2.3 distutils new features
+ self.depends = depends or []
+ self.language = language
+
+ # scipy_distutils features
+ self.f2py_options = f2py_options or []
+ self.module_dirs = module_dirs or []
+
+ return
+
+ def has_cxx_sources(self):
+ for source in self.sources:
+ if cxx_ext_re(str(source)):
+ return True
+ return False
+
+ def has_f2py_sources(self):
+ for source in self.sources:
+ if fortran_pyf_ext_re(source):
+ return True
+ return False
+
+# class Extension
diff --git a/distutils/fcompiler/__init__.py b/distutils/fcompiler/__init__.py
new file mode 100644
index 000000000..f4df64ac4
--- /dev/null
+++ b/distutils/fcompiler/__init__.py
@@ -0,0 +1,755 @@
+"""scipy.distutils.fcompiler
+
+Contains FCompiler, an abstract base class that defines the interface
+for the scipy.distutils Fortran compiler abstraction model.
+"""
+
+__all__ = ['FCompiler','new_fcompiler','show_fcompilers',
+ 'dummy_fortran_file']
+
+import os
+import sys
+import re
+from types import StringType,NoneType
+from distutils.sysconfig import get_config_var
+from distutils.fancy_getopt import FancyGetopt
+from distutils.errors import DistutilsModuleError,DistutilsArgError,\
+ DistutilsExecError,CompileError,LinkError,DistutilsPlatformError
+from distutils.util import split_quoted
+
+from scipy.distutils.ccompiler import CCompiler, gen_lib_options
+from scipy.distutils import log
+from scipy.distutils.command.config_compiler import config_fc
+from distutils.spawn import _nt_quote_args
+
+class FCompiler(CCompiler):
+ """ Abstract base class to define the interface that must be implemented
+ by real Fortran compiler classes.
+
+ Methods that subclasses may redefine:
+
+ get_version_cmd(), get_linker_so(), get_version()
+ get_flags(), get_flags_opt(), get_flags_arch(), get_flags_debug()
+ get_flags_f77(), get_flags_opt_f77(), get_flags_arch_f77(),
+ get_flags_debug_f77(), get_flags_f90(), get_flags_opt_f90(),
+ get_flags_arch_f90(), get_flags_debug_f90(),
+ get_flags_fix(), get_flags_linker_so(), get_flags_version()
+
+ DON'T call these methods (except get_version) after
+ constructing a compiler instance or inside any other method.
+ All methods, except get_version_cmd() and get_flags_version(), may
+ call get_version() method.
+
+ After constructing a compiler instance, always call customize(dist=None)
+ method that finalizes compiler construction and makes the following
+ attributes available:
+ compiler_f77
+ compiler_f90
+ compiler_fix
+ linker_so
+ archiver
+ ranlib
+ libraries
+ library_dirs
+ """
+
+
+ language_map = {'.f':'f77',
+ '.for':'f77',
+ '.F':'f77', # XXX: needs preprocessor
+ '.ftn':'f77',
+ '.f77':'f77',
+ '.f90':'f90',
+ '.F90':'f90', # XXX: needs preprocessor
+ '.f95':'f90',
+ }
+ language_order = ['f90','f77']
+
+ version_pattern = None
+
+ executables = {
+ 'version_cmd' : ["f77","-v"],
+ 'compiler_f77' : ["f77"],
+ 'compiler_f90' : ["f90"],
+ 'compiler_fix' : ["f90","-fixed"],
+ 'linker_so' : ["f90","-shared"],
+ 'linker_exe' : ["f90"],
+ 'archiver' : ["ar","-cr"],
+ 'ranlib' : None,
+ }
+
+ compile_switch = "-c"
+ object_switch = "-o " # Ending space matters! It will be stripped
+ # but if it is missing then object_switch
+ # will be prefixed to object file name by
+ # string concatenation.
+ library_switch = "-o " # Ditto!
+
+ # Switch to specify where module files are created and searched
+ # for USE statement. Normally it is a string and also here ending
+ # space matters. See above.
+ module_dir_switch = None
+
+ # Switch to specify where module files are searched for USE statement.
+ module_include_switch = '-I'
+
+ pic_flags = [] # Flags to create position-independent code
+
+ src_extensions = ['.for','.ftn','.f77','.f','.f90','.f95','.F','.F90']
+ obj_extension = ".o"
+ shared_lib_extension = get_config_var('SO') # or .dll
+ static_lib_extension = ".a" # or .lib
+ static_lib_format = "lib%s%s" # or %s%s
+ shared_lib_format = "%s%s"
+ exe_extension = ""
+
+ ######################################################################
+ ## Methods that subclasses may redefine. But don't call these methods!
+ ## They are private to FCompiler class and may return unexpected
+ ## results if used elsewhere. So, you have been warned..
+
+ def get_version_cmd(self):
+ """ Compiler command to print out version information. """
+ f77 = self.executables['compiler_f77']
+ if f77 is not None:
+ f77 = f77[0]
+ cmd = self.executables['version_cmd']
+ if cmd is not None:
+ cmd = cmd[0]
+ if cmd==f77:
+ cmd = self.compiler_f77[0]
+ else:
+ f90 = self.executables['compiler_f90']
+ if f90 is not None:
+ f90 = f90[0]
+ if cmd==f90:
+ cmd = self.compiler_f90[0]
+ return cmd
+
+ return cmd
+
+ def get_linker_so(self):
+ """ Linker command to build shared libraries. """
+ f77 = self.executables['compiler_f77']
+ if f77 is not None:
+ f77 = f77[0]
+ ln = self.executables['linker_so']
+ if ln is not None:
+ ln = ln[0]
+ if ln==f77:
+ ln = self.compiler_f77[0]
+ else:
+ f90 = self.executables['compiler_f90']
+ if f90 is not None:
+ f90 = f90[0]
+ if ln==f90:
+ ln = self.compiler_f90[0]
+ return ln
+
+ def get_linker_exe(self):
+ """ Linker command to build shared libraries. """
+ f77 = self.executables['compiler_f77']
+ if f77 is not None:
+ f77 = f77[0]
+ ln = self.executables.get('linker_exe')
+ if ln is not None:
+ ln = ln[0]
+ if ln==f77:
+ ln = self.compiler_f77[0]
+ else:
+ f90 = self.executables['compiler_f90']
+ if f90 is not None:
+ f90 = f90[0]
+ if ln==f90:
+ ln = self.compiler_f90[0]
+ return ln
+
+ def get_flags(self):
+ """ List of flags common to all compiler types. """
+ return [] + self.pic_flags
+ def get_flags_version(self):
+ """ List of compiler flags to print out version information. """
+ if self.executables['version_cmd']:
+ return self.executables['version_cmd'][1:]
+ return []
+ def get_flags_f77(self):
+ """ List of Fortran 77 specific flags. """
+ if self.executables['compiler_f77']:
+ return self.executables['compiler_f77'][1:]
+ return []
+ def get_flags_f90(self):
+ """ List of Fortran 90 specific flags. """
+ if self.executables['compiler_f90']:
+ return self.executables['compiler_f90'][1:]
+ return []
+ def get_flags_free(self):
+ """ List of Fortran 90 free format specific flags. """
+ return []
+ def get_flags_fix(self):
+ """ List of Fortran 90 fixed format specific flags. """
+ if self.executables['compiler_fix']:
+ return self.executables['compiler_fix'][1:]
+ return []
+ def get_flags_linker_so(self):
+ """ List of linker flags to build a shared library. """
+ if self.executables['linker_so']:
+ return self.executables['linker_so'][1:]
+ return []
+ def get_flags_linker_exe(self):
+ """ List of linker flags to build an executable. """
+ if self.executables['linker_exe']:
+ return self.executables['linker_exe'][1:]
+ return []
+ def get_flags_ar(self):
+ """ List of archiver flags. """
+ if self.executables['archiver']:
+ return self.executables['archiver'][1:]
+ return []
+ def get_flags_opt(self):
+ """ List of architecture independent compiler flags. """
+ return []
+ def get_flags_arch(self):
+ """ List of architecture dependent compiler flags. """
+ return []
+ def get_flags_debug(self):
+ """ List of compiler flags to compile with debugging information. """
+ return []
+
+ get_flags_opt_f77 = get_flags_opt_f90 = get_flags_opt
+ get_flags_arch_f77 = get_flags_arch_f90 = get_flags_arch
+ get_flags_debug_f77 = get_flags_debug_f90 = get_flags_debug
+
+ def get_libraries(self):
+ """ List of compiler libraries. """
+ return self.libraries[:]
+ def get_library_dirs(self):
+ """ List of compiler library directories. """
+ return self.library_dirs[:]
+
+ ############################################################
+
+ ## Public methods:
+
+ def customize(self, dist=None):
+ """ Customize Fortran compiler.
+
+ This method gets Fortran compiler specific information from
+ (i) class definition, (ii) environment, (iii) distutils config
+ files, and (iv) command line.
+
+ This method should be always called after constructing a
+ compiler instance. But not in __init__ because Distribution
+ instance is needed for (iii) and (iv).
+ """
+ log.info('customize %s' % (self.__class__.__name__))
+ if dist is None:
+ # These hooks are for testing only!
+ from distutils.dist import Distribution
+ dist = Distribution()
+ dist.script_name = os.path.basename(sys.argv[0])
+ dist.script_args = ['config_fc'] + sys.argv[1:]
+ dist.cmdclass['config_fc'] = config_fc
+ dist.parse_config_files()
+ dist.parse_command_line()
+ conf = dist.get_option_dict('config_fc')
+ noopt = conf.get('noopt',[None,0])[1]
+ if 0: # change to `if 1:` when making release.
+ # Don't use architecture dependent compiler flags:
+ noarch = 1
+ else:
+ noarch = conf.get('noarch',[None,noopt])[1]
+ debug = conf.get('debug',[None,0])[1]
+
+
+ f77 = self.__get_cmd('compiler_f77','F77',(conf,'f77exec'))
+ f90 = self.__get_cmd('compiler_f90','F90',(conf,'f90exec'))
+ # Temporarily setting f77,f90 compilers so that
+ # version_cmd can use their executables.
+ if f77:
+ self.set_executables(compiler_f77=[f77])
+ if f90:
+ self.set_executables(compiler_f90=[f90])
+
+ # Must set version_cmd before others as self.get_flags*
+ # methods may call self.get_version.
+ vers_cmd = self.__get_cmd(self.get_version_cmd)
+ if vers_cmd:
+ vflags = self.__get_flags(self.get_flags_version)
+ self.set_executables(version_cmd=[vers_cmd]+vflags)
+
+ if f77:
+ f77flags = self.__get_flags(self.get_flags_f77,'F77FLAGS',
+ (conf,'f77flags'))
+ if f90:
+ f90flags = self.__get_flags(self.get_flags_f90,'F90FLAGS',
+ (conf,'f90flags'))
+ freeflags = self.__get_flags(self.get_flags_free,'FREEFLAGS',
+ (conf,'freeflags'))
+ # XXX Assuming that free format is default for f90 compiler.
+ fix = self.__get_cmd('compiler_fix','F90',(conf,'f90exec'))
+ if fix:
+ fixflags = self.__get_flags(self.get_flags_fix) + f90flags
+
+ oflags,aflags,dflags = [],[],[]
+ if not noopt:
+ oflags = self.__get_flags(self.get_flags_opt,'FOPT',(conf,'opt'))
+ if f77 and self.get_flags_opt is not self.get_flags_opt_f77:
+ f77flags += self.__get_flags(self.get_flags_opt_f77)
+ if f90 and self.get_flags_opt is not self.get_flags_opt_f90:
+ f90flags += self.__get_flags(self.get_flags_opt_f90)
+ if fix and self.get_flags_opt is not self.get_flags_opt_f90:
+ fixflags += self.__get_flags(self.get_flags_opt_f90)
+ if not noarch:
+ aflags = self.__get_flags(self.get_flags_arch,'FARCH',
+ (conf,'arch'))
+ if f77 and self.get_flags_arch is not self.get_flags_arch_f77:
+ f77flags += self.__get_flags(self.get_flags_arch_f77)
+ if f90 and self.get_flags_arch is not self.get_flags_arch_f90:
+ f90flags += self.__get_flags(self.get_flags_arch_f90)
+ if fix and self.get_flags_arch is not self.get_flags_arch_f90:
+ fixflags += self.__get_flags(self.get_flags_arch_f90)
+ if debug:
+ dflags = self.__get_flags(self.get_flags_debug,'FDEBUG')
+ if f77 and self.get_flags_debug is not self.get_flags_debug_f77:
+ f77flags += self.__get_flags(self.get_flags_debug_f77)
+ if f90 and self.get_flags_debug is not self.get_flags_debug_f90:
+ f90flags += self.__get_flags(self.get_flags_debug_f90)
+ if fix and self.get_flags_debug is not self.get_flags_debug_f90:
+ fixflags += self.__get_flags(self.get_flags_debug_f90)
+
+ fflags = self.__get_flags(self.get_flags,'FFLAGS') \
+ + dflags + oflags + aflags
+
+ if f77:
+ self.set_executables(compiler_f77=[f77]+f77flags+fflags)
+ if f90:
+ self.set_executables(compiler_f90=[f90]+freeflags+f90flags+fflags)
+ if fix:
+ self.set_executables(compiler_fix=[fix]+fixflags+fflags)
+ #XXX: Do we need LDSHARED->SOSHARED, LDFLAGS->SOFLAGS
+ linker_so = self.__get_cmd(self.get_linker_so,'LDSHARED')
+ if linker_so:
+ linker_so_flags = self.__get_flags(self.get_flags_linker_so,'LDFLAGS')
+ self.set_executables(linker_so=[linker_so]+linker_so_flags)
+
+ linker_exe = self.__get_cmd(self.get_linker_exe,'LD')
+ if linker_exe:
+ linker_exe_flags = self.__get_flags(self.get_flags_linker_exe,'LDFLAGS')
+ self.set_executables(linker_exe=[linker_exe]+linker_exe_flags)
+ ar = self.__get_cmd('archiver','AR')
+ if ar:
+ arflags = self.__get_flags(self.get_flags_ar,'ARFLAGS')
+ self.set_executables(archiver=[ar]+arflags)
+
+ ranlib = self.__get_cmd('ranlib','RANLIB')
+ if ranlib:
+ self.set_executables(ranlib=[ranlib])
+
+ self.set_library_dirs(self.get_library_dirs())
+ self.set_libraries(self.get_libraries())
+
+
+ verbose = conf.get('verbose',[None,0])[1]
+ if verbose:
+ self.dump_properties()
+ return
+
+ def dump_properties(self):
+ """ Print out the attributes of a compiler instance. """
+ props = []
+ for key in self.executables.keys() + \
+ ['version','libraries','library_dirs',
+ 'object_switch','compile_switch']:
+ if hasattr(self,key):
+ v = getattr(self,key)
+ props.append((key, None, '= '+`v`))
+ props.sort()
+
+ pretty_printer = FancyGetopt(props)
+ for l in pretty_printer.generate_help("%s instance properties:" \
+ % (self.__class__.__name__)):
+ if l[:4]==' --':
+ l = ' ' + l[4:]
+ print l
+ return
+
+ ###################
+
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ """Compile 'src' to product 'obj'."""
+ if is_f_file(src) and not has_f90_header(src):
+ flavor = ':f77'
+ compiler = self.compiler_f77
+ elif is_free_format(src):
+ flavor = ':f90'
+ compiler = self.compiler_f90
+ if compiler is None:
+ raise DistutilsExecError, 'f90 not supported by '\
+ +self.__class__.__name__
+ else:
+ flavor = ':fix'
+ compiler = self.compiler_fix
+ if compiler is None:
+ raise DistutilsExecError, 'f90 (fixed) not supported by '\
+ +self.__class__.__name__
+ if self.object_switch[-1]==' ':
+ o_args = [self.object_switch.strip(),obj]
+ else:
+ o_args = [self.object_switch.strip()+obj]
+
+ assert self.compile_switch.strip()
+ s_args = [self.compile_switch, src]
+
+ if os.name == 'nt':
+ compiler = _nt_quote_args(compiler)
+ command = compiler + cc_args + s_args + o_args + extra_postargs
+
+ display = '%s: %s' % (os.path.basename(compiler[0]) + flavor,
+ src)
+ try:
+ self.spawn(command,display=display)
+ except DistutilsExecError, msg:
+ raise CompileError, msg
+
+ return
+
+ def module_options(self, module_dirs, module_build_dir):
+ options = []
+ if self.module_dir_switch is not None:
+ if self.module_dir_switch[-1]==' ':
+ options.extend([self.module_dir_switch.strip(),module_build_dir])
+ else:
+ options.append(self.module_dir_switch.strip()+module_build_dir)
+ else:
+ print 'XXX: module_build_dir=%r option ignored' % (module_build_dir)
+ print 'XXX: Fix module_dir_switch for ',self.__class__.__name__
+ if self.module_include_switch is not None:
+ for d in [module_build_dir]+module_dirs:
+ options.append('%s%s' % (self.module_include_switch, d))
+ else:
+ print 'XXX: module_dirs=%r option ignored' % (module_dirs)
+ print 'XXX: Fix module_include_switch for ',self.__class__.__name__
+ return options
+
+ def library_option(self, lib):
+ return "-l" + lib
+ def library_dir_option(self, dir):
+ return "-L" + dir
+
+ def link(self, target_desc, objects,
+ output_filename, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=0, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ libraries, library_dirs, runtime_library_dirs = \
+ self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+ lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+ libraries)
+ if type(output_dir) not in (StringType, NoneType):
+ raise TypeError, "'output_dir' must be a string or None"
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+ if self.library_switch[-1]==' ':
+ o_args = [self.library_switch.strip(),output_filename]
+ else:
+ o_args = [self.library_switch.strip()+output_filename]
+
+ if type(self.objects) is type(''):
+ ld_args = objects + [self.objects]
+ else:
+ ld_args = objects + self.objects
+ ld_args = ld_args + lib_opts + o_args
+ if debug:
+ ld_args[:0] = ['-g']
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+ self.mkpath(os.path.dirname(output_filename))
+ if target_desc == CCompiler.EXECUTABLE:
+ linker = self.linker_exe[:]
+ else:
+ linker = self.linker_so[:]
+ if os.name == 'nt':
+ linker = _nt_quote_args(linker)
+ command = linker + ld_args
+ try:
+ self.spawn(command)
+ except DistutilsExecError, msg:
+ raise LinkError, msg
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+ return
+
+
+ ## Private methods:
+
+ def __get_cmd(self, command, envvar=None, confvar=None):
+ if command is None:
+ var = None
+ elif type(command) is type(''):
+ var = self.executables[command]
+ if var is not None:
+ var = var[0]
+ else:
+ var = command()
+ if envvar is not None:
+ var = os.environ.get(envvar, var)
+ if confvar is not None:
+ var = confvar[0].get(confvar[1], [None,var])[1]
+ return var
+
+ def __get_flags(self, command, envvar=None, confvar=None):
+ if command is None:
+ var = []
+ elif type(command) is type(''):
+ var = self.executables[command][1:]
+ else:
+ var = command()
+ if envvar is not None:
+ var = os.environ.get(envvar, var)
+ if confvar is not None:
+ var = confvar[0].get(confvar[1], [None,var])[1]
+ if type(var) is type(''):
+ var = split_quoted(var)
+ return var
+
+ ## class FCompiler
+
+fcompiler_class = {'gnu':('gnu','GnuFCompiler',
+ "GNU Fortran Compiler"),
+ 'g95':('g95','G95FCompiler',
+ "GNU Fortran 95 Compiler"),
+ 'pg':('pg','PGroupFCompiler',
+ "Portland Group Fortran Compiler"),
+ 'absoft':('absoft','AbsoftFCompiler',
+ "Absoft Corp Fortran Compiler"),
+ 'mips':('mips','MipsFCompiler',
+ "MIPSpro Fortran Compiler"),
+ 'sun':('sun','SunFCompiler',
+ "Sun|Forte Fortran 95 Compiler"),
+ 'intel':('intel','IntelFCompiler',
+ "Intel Fortran Compiler for 32-bit apps"),
+ 'intelv':('intel','IntelVisualFCompiler',
+ "Intel Visual Fortran Compiler for 32-bit apps"),
+ 'intele':('intel','IntelItaniumFCompiler',
+ "Intel Fortran Compiler for Itanium apps"),
+ 'intelev':('intel','IntelItaniumVisualFCompiler',
+ "Intel Visual Fortran Compiler for Itanium apps"),
+ 'nag':('nag','NAGFCompiler',
+ "NAGWare Fortran 95 Compiler"),
+ 'compaq':('compaq','CompaqFCompiler',
+ "Compaq Fortran Compiler"),
+ 'compaqv':('compaq','CompaqVisualFCompiler',
+ "DIGITAL|Compaq Visual Fortran Compiler"),
+ 'vast':('vast','VastFCompiler',
+ "Pacific-Sierra Research Fortran 90 Compiler"),
+ 'hpux':('hpux','HPUXFCompiler',
+ "HP Fortran 90 Compiler"),
+ 'lahey':('lahey','LaheyFCompiler',
+ "Lahey/Fujitsu Fortran 95 Compiler"),
+ 'ibm':('ibm','IbmFCompiler',
+ "IBM XL Fortran Compiler"),
+ 'f':('f','FFCompiler',
+ "Fortran Company/NAG F Compiler"),
+ 'none':('none','NoneFCompiler',"Fake Fortran compiler")
+ }
+
+_default_compilers = (
+ # Platform mappings
+ ('win32',('gnu','intelv','absoft','compaqv','intelev','g95')),
+ ('cygwin.*',('gnu','intelv','absoft','compaqv','intelev','g95')),
+ ('linux.*',('gnu','intel','lahey','pg','absoft','nag','vast','compaq',
+ 'intele','g95')),
+ ('darwin.*',('nag','absoft','ibm','gnu','g95')),
+ ('sunos.*',('sun','gnu','g95')),
+ ('irix.*',('mips','gnu')),
+ ('aix.*',('ibm','gnu')),
+ # OS mappings
+ ('posix',('gnu',)),
+ ('nt',('gnu',)),
+ ('mac',('gnu',)),
+ )
+
+def _find_existing_fcompiler(compilers, osname=None, platform=None):
+ for compiler in compilers:
+ v = None
+ try:
+ c = new_fcompiler(plat=platform, compiler=compiler)
+ c.customize()
+ v = c.get_version()
+ except DistutilsModuleError:
+ pass
+ except Exception, msg:
+ log.warn(msg)
+ if v is not None:
+ return compiler
+ return
+
+def get_default_fcompiler(osname=None, platform=None):
+ """ Determine the default Fortran compiler to use for the given platform. """
+ if osname is None:
+ osname = os.name
+ if platform is None:
+ platform = sys.platform
+ matching_compilers = []
+ for pattern, compiler in _default_compilers:
+ if re.match(pattern, platform) is not None or \
+ re.match(pattern, osname) is not None:
+ if type(compiler) is type(()):
+ matching_compilers.extend(list(compiler))
+ else:
+ matching_compilers.append(compiler)
+ if not matching_compilers:
+ matching_compilers.append('gnu')
+ compiler = _find_existing_fcompiler(matching_compilers,
+ osname=osname,
+ platform=platform)
+ if compiler is not None:
+ return compiler
+ return matching_compilers[0]
+
+def new_fcompiler(plat=None,
+ compiler=None,
+ verbose=0,
+ dry_run=0,
+ force=0):
+ """ Generate an instance of some FCompiler subclass for the supplied
+ platform/compiler combination.
+ """
+ if plat is None:
+ plat = os.name
+ try:
+ if compiler is None:
+ compiler = get_default_fcompiler(plat)
+ (module_name, class_name, long_description) = fcompiler_class[compiler]
+ except KeyError:
+ msg = "don't know how to compile Fortran code on platform '%s'" % plat
+ if compiler is not None:
+ msg = msg + " with '%s' compiler." % compiler
+ msg = msg + " Supported compilers are: %s)" \
+ % (','.join(fcompiler_class.keys()))
+ raise DistutilsPlatformError, msg
+
+ try:
+ module_name = 'scipy.distutils.fcompiler.'+module_name
+ __import__ (module_name)
+ module = sys.modules[module_name]
+ klass = vars(module)[class_name]
+ except ImportError:
+ raise DistutilsModuleError, \
+ "can't compile Fortran code: unable to load module '%s'" % \
+ module_name
+ except KeyError:
+ raise DistutilsModuleError, \
+ ("can't compile Fortran code: unable to find class '%s' " +
+ "in module '%s'") % (class_name, module_name)
+ compiler = klass(None, dry_run, force)
+ log.debug('new_fcompiler returns %s' % (klass))
+ return compiler
+
+def show_fcompilers(dist = None):
+ """ Print list of available compilers (used by the "--help-fcompiler"
+ option to "config_fc").
+ """
+ if dist is None:
+ from distutils.dist import Distribution
+ dist = Distribution()
+ dist.script_name = os.path.basename(sys.argv[0])
+ dist.script_args = ['config_fc'] + sys.argv[1:]
+ dist.cmdclass['config_fc'] = config_fc
+ dist.parse_config_files()
+ dist.parse_command_line()
+
+ compilers = []
+ compilers_na = []
+ compilers_ni = []
+ for compiler in fcompiler_class.keys():
+ v = 'N/A'
+ try:
+ c = new_fcompiler(compiler=compiler)
+ c.customize(dist)
+ v = c.get_version()
+ except DistutilsModuleError:
+ pass
+ except Exception, msg:
+ log.warn(msg)
+ if v is None:
+ compilers_na.append(("fcompiler="+compiler, None,
+ fcompiler_class[compiler][2]))
+ elif v=='N/A':
+ compilers_ni.append(("fcompiler="+compiler, None,
+ fcompiler_class[compiler][2]))
+ else:
+ compilers.append(("fcompiler="+compiler, None,
+ fcompiler_class[compiler][2] + ' (%s)' % v))
+
+ compilers.sort()
+ compilers_na.sort()
+ pretty_printer = FancyGetopt(compilers)
+ pretty_printer.print_help("List of available Fortran compilers:")
+ pretty_printer = FancyGetopt(compilers_na)
+ pretty_printer.print_help("List of unavailable Fortran compilers:")
+ if compilers_ni:
+ pretty_printer = FancyGetopt(compilers_ni)
+ pretty_printer.print_help("List of unimplemented Fortran compilers:")
+ print "For compiler details, run 'config_fc --verbose' setup command."
+
+def dummy_fortran_file():
+ import atexit
+ import tempfile
+ dummy_name = tempfile.mktemp()+'__dummy'
+ dummy = open(dummy_name+'.f','w')
+ dummy.write(" subroutine dummy()\n end\n")
+ dummy.close()
+ def rm_file(name=dummy_name,log_threshold=log._global_log.threshold):
+ save_th = log._global_log.threshold
+ log.set_threshold(log_threshold)
+ try: os.remove(name+'.f'); log.debug('removed '+name+'.f')
+ except OSError: pass
+ try: os.remove(name+'.o'); log.debug('removed '+name+'.o')
+ except OSError: pass
+ log.set_threshold(save_th)
+ atexit.register(rm_file)
+ return dummy_name
+
+is_f_file = re.compile(r'.*[.](for|ftn|f77|f)\Z',re.I).match
+_has_f_header = re.compile(r'-[*]-\s*fortran\s*-[*]-',re.I).search
+_has_f90_header = re.compile(r'-[*]-\s*f90\s*-[*]-',re.I).search
+_has_fix_header = re.compile(r'-[*]-\s*fix\s*-[*]-',re.I).search
+_free_f90_start = re.compile(r'[^c*]\s*[^\s\d\t]',re.I).match
+def is_free_format(file):
+ """Check if file is in free format Fortran."""
+ # f90 allows both fixed and free format, assuming fixed unless
+ # signs of free format are detected.
+ result = 0
+ f = open(file,'r')
+ line = f.readline()
+ n = 15 # the number of non-comment lines to scan for hints
+ if _has_f_header(line):
+ n = 0
+ elif _has_f90_header(line):
+ n = 0
+ result = 1
+ while n>0 and line:
+ if line[0]!='!':
+ n -= 1
+ if (line[0]!='\t' and _free_f90_start(line[:5])) or line[-2:-1]=='&':
+ result = 1
+ break
+ line = f.readline()
+ f.close()
+ return result
+
+def has_f90_header(src):
+ f = open(src,'r')
+ line = f.readline()
+ f.close()
+ return _has_f90_header(line) or _has_fix_header(line)
+
+if __name__ == '__main__':
+ show_fcompilers()
diff --git a/distutils/fcompiler/absoft.py b/distutils/fcompiler/absoft.py
new file mode 100644
index 000000000..11bdf7065
--- /dev/null
+++ b/distutils/fcompiler/absoft.py
@@ -0,0 +1,128 @@
+
+# http://www.absoft.com/literature/osxuserguide.pdf
+# http://www.absoft.com/documentation.html
+
+# Notes:
+# - when using -g77 then use -DUNDERSCORE_G77 to compile f2py
+# generated extension modules (works for f2py v2.45.241_1936 and up)
+
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler, dummy_fortran_file
+from scipy.distutils.misc_util import cyg2win32
+
+class AbsoftFCompiler(FCompiler):
+
+ compiler_type = 'absoft'
+ #version_pattern = r'FORTRAN 77 Compiler (?P<version>[^\s*,]*).*?Absoft Corp'
+ version_pattern = r'(f90:.*?Absoft Pro FORTRAN Version|FORTRAN 77 Compiler)'+\
+ r' (?P<version>[^\s*,]*)(.*?Absoft Corp|)'
+
+ # samt5735(8)$ f90 -V -c dummy.f
+ # f90: Copyright Absoft Corporation 1994-2002; Absoft Pro FORTRAN Version 8.0
+ # Note that fink installs g77 as f77, so need to use f90 for detection.
+
+ executables = {
+ 'version_cmd' : ["f90", "-V -c %(fname)s.f -o %(fname)s.o" \
+ % {'fname':cyg2win32(dummy_fortran_file())}],
+ 'compiler_f77' : ["f77"],
+ 'compiler_fix' : ["f90"],
+ 'compiler_f90' : ["f90"],
+ 'linker_so' : ["f90"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+
+ if os.name=='nt':
+ library_switch = '/out:' #No space after /out:!
+
+ module_dir_switch = None
+ module_include_switch = '-p'
+
+ def get_flags_linker_so(self):
+ if os.name=='nt':
+ opt = ['/dll']
+ else:
+ opt = ["-K","shared"]
+ return opt
+
+ def library_dir_option(self, dir):
+ if os.name=='nt':
+ return ['-link','/PATH:"%s"' % (dir)]
+ return "-L" + dir
+
+ def library_option(self, lib):
+ if os.name=='nt':
+ return '%s.lib' % (lib)
+ return "-l" + lib
+
+ def get_library_dirs(self):
+ opt = FCompiler.get_library_dirs(self)
+ d = os.environ.get('ABSOFT')
+ if d:
+ opt.append(os.path.join(d,'LIB'))
+ return opt
+
+ def get_libraries(self):
+ opt = FCompiler.get_libraries(self)
+ opt.extend(['fio','f90math','fmath'])
+ if os.name =='nt':
+ opt.append('COMDLG32')
+ return opt
+
+ def get_flags(self):
+ opt = FCompiler.get_flags(self)
+ if os.name != 'nt':
+ opt.extend(['-s'])
+ if self.get_version():
+ if self.get_version()>='8.2':
+ opt.append('-fpic')
+ return opt
+
+ def get_flags_f77(self):
+ opt = FCompiler.get_flags_f77(self)
+ opt.extend(['-N22','-N90','-N110'])
+ v = self.get_version()
+ if os.name == 'nt':
+ if v and v>='8.0':
+ opt.extend(['-f','-N15'])
+ else:
+ opt.append('-f')
+ if v:
+ if v<='4.6':
+ opt.append('-B108')
+ else:
+ # Though -N15 is undocumented, it works with
+ # Absoft 8.0 on Linux
+ opt.append('-N15')
+ return opt
+
+ def get_flags_f90(self):
+ opt = FCompiler.get_flags_f90(self)
+ opt.extend(["-YCFRL=1","-YCOM_NAMES=LCS","-YCOM_PFX","-YEXT_PFX",
+ "-YCOM_SFX=_","-YEXT_SFX=_","-YEXT_NAMES=LCS"])
+ if self.get_version():
+ if self.get_version()>'4.6':
+ opt.extend(["-YDEALLOC=ALL"])
+ return opt
+
+ def get_flags_fix(self):
+ opt = FCompiler.get_flags_fix(self)
+ opt.extend(["-YCFRL=1","-YCOM_NAMES=LCS","-YCOM_PFX","-YEXT_PFX",
+ "-YCOM_SFX=_","-YEXT_SFX=_","-YEXT_NAMES=LCS"])
+ opt.extend(["-f","fixed"])
+ return opt
+
+ def get_flags_opt(self):
+ opt = ['-O']
+ return opt
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='absoft')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/compaq.py b/distutils/fcompiler/compaq.py
new file mode 100644
index 000000000..7abb23ae2
--- /dev/null
+++ b/distutils/fcompiler/compaq.py
@@ -0,0 +1,94 @@
+
+#http://www.compaq.com/fortran/docs/
+
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+
+class CompaqFCompiler(FCompiler):
+
+ compiler_type = 'compaq'
+ version_pattern = r'Compaq Fortran (?P<version>[^\s]*).*'
+
+ if sys.platform[:5]=='linux':
+ fc_exe = 'fort'
+ else:
+ fc_exe = 'f90'
+
+ executables = {
+ 'version_cmd' : [fc_exe, "-version"],
+ 'compiler_f77' : [fc_exe, "-f77rtl","-fixed"],
+ 'compiler_fix' : [fc_exe, "-fixed"],
+ 'compiler_f90' : [fc_exe],
+ 'linker_so' : [fc_exe],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+
+ module_dir_switch = '-module ' # not tested
+ module_include_switch = '-I'
+
+ def get_flags(self):
+ return ['-assume no2underscore','-nomixed_str_len_arg']
+ def get_flags_debug(self):
+ return ['-g','-check bounds']
+ def get_flags_opt(self):
+ return ['-O4','-align dcommons','-assume bigarrays',
+ '-assume nozsize','-math_library fast']
+ def get_flags_arch(self):
+ return ['-arch host', '-tune host']
+ def get_flags_linker_so(self):
+ if sys.platform[:5]=='linux':
+ return ['-shared']
+ return ['-shared','-Wl,-expect_unresolved,*']
+
+class CompaqVisualFCompiler(FCompiler):
+
+ compiler_type = 'compaqv'
+ version_pattern = r'(DIGITAL|Compaq) Visual Fortran Optimizing Compiler'\
+ ' Version (?P<version>[^\s]*).*'
+
+ compile_switch = '/compile_only'
+ object_switch = '/object:'
+ library_switch = '/OUT:' #No space after /OUT:!
+
+ static_lib_extension = ".lib"
+ static_lib_format = "%s%s"
+ module_dir_switch = '/module:'
+ module_include_switch = '/I'
+
+ ar_exe = 'lib.exe'
+ fc_exe = 'DF'
+ if sys.platform=='win32':
+ from distutils.msvccompiler import MSVCCompiler
+ ar_exe = MSVCCompiler().lib
+
+ executables = {
+ 'version_cmd' : ['DF', "/what"],
+ 'compiler_f77' : ['DF', "/f77rtl","/fixed"],
+ 'compiler_fix' : ['DF', "/fixed"],
+ 'compiler_f90' : ['DF'],
+ 'linker_so' : ['DF'],
+ 'archiver' : [ar_exe, "/OUT:"],
+ 'ranlib' : None
+ }
+
+ def get_flags(self):
+ return ['/nologo','/MD','/WX','/iface=(cref,nomixed_str_len_arg)',
+ '/names:lowercase','/assume:underscore']
+ def get_flags_opt(self):
+ return ['/Ox','/fast','/optimize:5','/unroll:0','/math_library:fast']
+ def get_flags_arch(self):
+ return ['/threads']
+ def get_flags_debug(self):
+ return ['/debug']
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='compaq')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/g95.py b/distutils/fcompiler/g95.py
new file mode 100644
index 000000000..a3bf374a8
--- /dev/null
+++ b/distutils/fcompiler/g95.py
@@ -0,0 +1,41 @@
+# http://g95.sourceforge.net/
+
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+
+class G95FCompiler(FCompiler):
+
+ compiler_type = 'g95'
+ version_pattern = r'G95.*\(experimental\) \(g95!\) (?P<version>.*)\).*'
+
+ executables = {
+ 'version_cmd' : ["g95", "--version"],
+ 'compiler_f77' : ["g95", "-ffixed-form"],
+ 'compiler_fix' : ["g95", "-ffixed-form"],
+ 'compiler_f90' : ["g95"],
+ 'linker_so' : ["g95","-shared"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+ pic_flags = ['-fpic']
+ module_dir_switch = '-fmod='
+ module_include_switch = '-I'
+
+ def get_flags(self):
+ return ['-fno-second-underscore']
+ def get_flags_opt(self):
+ return ['-O']
+ def get_flags_debug(self):
+ return ['-g']
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ #compiler = new_fcompiler(compiler='g95')
+ compiler = G95FCompiler()
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/gnu.py b/distutils/fcompiler/gnu.py
new file mode 100644
index 000000000..764feebd9
--- /dev/null
+++ b/distutils/fcompiler/gnu.py
@@ -0,0 +1,210 @@
+
+import re
+import os
+import sys
+import warnings
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+from scipy.distutils.exec_command import exec_command, find_executable
+
+class GnuFCompiler(FCompiler):
+
+ compiler_type = 'gnu'
+ version_pattern = r'GNU Fortran ((\(GCC[^\)]*(\)\)|\)))|)\s*'\
+ '(?P<version>[^\s*\)]+)'
+
+ # 'g77 --version' results
+ # SunOS: GNU Fortran (GCC 3.2) 3.2 20020814 (release)
+ # Debian: GNU Fortran (GCC) 3.3.3 20040110 (prerelease) (Debian)
+ # GNU Fortran (GCC) 3.3.3 (Debian 20040401)
+ # GNU Fortran 0.5.25 20010319 (prerelease)
+ # Redhat: GNU Fortran (GCC 3.2.2 20030222 (Red Hat Linux 3.2.2-5)) 3.2.2 20030222 (Red Hat Linux 3.2.2-5)
+
+ for fc_exe in map(find_executable,['g77','f77']):
+ if os.path.isfile(fc_exe):
+ break
+ executables = {
+ 'version_cmd' : [fc_exe,"--version"],
+ 'compiler_f77' : [fc_exe,"-Wall","-fno-second-underscore"],
+ 'compiler_f90' : None,
+ 'compiler_fix' : None,
+ 'linker_so' : [fc_exe,"-Wall"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"],
+ 'linker_exe' : [fc_exe,"-Wall"]
+ }
+ module_dir_switch = None
+ module_include_switch = None
+
+ # Cygwin: f771: warning: -fPIC ignored for target (all code is position independent)
+ if os.name != 'nt' and sys.platform!='cygwin':
+ pic_flags = ['-fPIC']
+
+ #def get_linker_so(self):
+ # # win32 linking should be handled by standard linker
+ # # Darwin g77 cannot be used as a linker.
+ # #if re.match(r'(darwin)', sys.platform):
+ # # return
+ # return FCompiler.get_linker_so(self)
+
+ def get_flags_linker_so(self):
+ opt = []
+ if sys.platform=='darwin':
+ target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
+ if target is None:
+ target = '10.3'
+ major, minor = target.split('.')
+ if int(minor) < 3:
+ minor = '3'
+ warnings.warn('Environment variable '
+ 'MACOSX_DEPLOYMENT_TARGET reset to 10.3')
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = '%s.%s' % (major,
+ minor)
+
+ opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])
+ else:
+ opt.append("-shared")
+ if sys.platform[:5]=='sunos':
+ # SunOS often has dynamically loaded symbols defined in the
+ # static library libg2c.a The linker doesn't like this. To
+ # ignore the problem, use the -mimpure-text flag. It isn't
+ # the safest thing, but seems to work. 'man gcc' says:
+ # ".. Instead of using -mimpure-text, you should compile all
+ # source code with -fpic or -fPIC."
+ opt.append('-mimpure-text')
+ return opt
+
+ def get_libgcc_dir(self):
+ status, output = exec_command('%s -print-libgcc-file-name' \
+ % (self.compiler_f77[0]),use_tee=0)
+ if not status:
+ return os.path.dirname(output)
+ return
+
+ def get_library_dirs(self):
+ opt = []
+ if sys.platform[:5] != 'linux':
+ d = self.get_libgcc_dir()
+ if d:
+ opt.append(d)
+ return opt
+
+ def get_libraries(self):
+ opt = []
+ d = self.get_libgcc_dir()
+ if d is not None:
+ g2c = 'g2c-pic'
+ f = self.static_lib_format % (g2c, self.static_lib_extension)
+ if not os.path.isfile(os.path.join(d,f)):
+ g2c = 'g2c'
+ else:
+ g2c = 'g2c'
+
+ if sys.platform=='win32':
+ opt.append('gcc')
+ if g2c is not None:
+ opt.append(g2c)
+ if sys.platform == 'darwin':
+ opt.append('cc_dynamic')
+ return opt
+
+ def get_flags_debug(self):
+ return ['-g']
+
+ def get_flags_opt(self):
+ if self.get_version()<='3.3.3':
+ # With this compiler version building Fortran BLAS/LAPACK
+ # with -O3 caused failures in lib.lapack heevr,syevr tests.
+ opt = ['-O2']
+ else:
+ opt = ['-O3']
+ opt.append('-funroll-loops')
+ return opt
+
+ def get_flags_arch(self):
+ opt = []
+ if sys.platform=='darwin':
+ if os.name != 'posix':
+ # this should presumably correspond to Apple
+ if cpu.is_ppc():
+ opt.append('-arch ppc')
+ elif cpu.is_i386():
+ opt.append('-arch i386')
+ for a in '601 602 603 603e 604 604e 620 630 740 7400 7450 750'\
+ '403 505 801 821 823 860'.split():
+ if getattr(cpu,'is_ppc%s'%a)():
+ opt.append('-mcpu='+a)
+ opt.append('-mtune='+a)
+ break
+ return opt
+ march_flag = 1
+ # 0.5.25 corresponds to 2.95.x
+ if self.get_version() == '0.5.26': # gcc 3.0
+ if cpu.is_AthlonK6():
+ opt.append('-march=k6')
+ elif cpu.is_AthlonK7():
+ opt.append('-march=athlon')
+ else:
+ march_flag = 0
+ # Note: gcc 3.2 on win32 has breakage with -march specified
+ elif self.get_version() >= '3.1.1' \
+ and not sys.platform=='win32': # gcc >= 3.1.1
+ if cpu.is_AthlonK6():
+ opt.append('-march=k6')
+ elif cpu.is_AthlonK6_2():
+ opt.append('-march=k6-2')
+ elif cpu.is_AthlonK6_3():
+ opt.append('-march=k6-3')
+ elif cpu.is_AthlonK7():
+ opt.append('-march=athlon')
+ elif cpu.is_AthlonMP():
+ opt.append('-march=athlon-mp')
+ # there's also: athlon-tbird, athlon-4, athlon-xp
+ elif cpu.is_PentiumIV():
+ opt.append('-march=pentium4')
+ elif cpu.is_PentiumIII():
+ opt.append('-march=pentium3')
+ elif cpu.is_PentiumII():
+ opt.append('-march=pentium2')
+ else:
+ march_flag = 0
+ if self.get_version() >= '3.4' and not march_flag:
+ march_flag = 1
+ if cpu.is_Opteron():
+ opt.append('-march=opteron')
+ elif cpu.is_Athlon64():
+ opt.append('-march=athlon64')
+ else:
+ march_flag = 0
+ if cpu.has_mmx(): opt.append('-mmmx')
+ if self.get_version() > '3.2.2':
+ if cpu.has_sse2(): opt.append('-msse2')
+ if cpu.has_sse(): opt.append('-msse')
+ if self.get_version() >= '3.4':
+ if cpu.has_sse3(): opt.append('-msse3')
+ if cpu.has_3dnow(): opt.append('-m3dnow')
+ else:
+ march_flag = 0
+ if march_flag:
+ pass
+ elif cpu.is_i686():
+ opt.append('-march=i686')
+ elif cpu.is_i586():
+ opt.append('-march=i586')
+ elif cpu.is_i486():
+ opt.append('-march=i486')
+ elif cpu.is_i386():
+ opt.append('-march=i386')
+ if cpu.is_Intel():
+ opt.extend(['-malign-double','-fomit-frame-pointer'])
+ return opt
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ #compiler = new_fcompiler(compiler='gnu')
+ compiler = GnuFCompiler()
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/hpux.py b/distutils/fcompiler/hpux.py
new file mode 100644
index 000000000..95f326cea
--- /dev/null
+++ b/distutils/fcompiler/hpux.py
@@ -0,0 +1,41 @@
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+
+class HPUXFCompiler(FCompiler):
+
+ compiler_type = 'hpux'
+ version_pattern = r'HP F90 (?P<version>[^\s*,]*)'
+
+ executables = {
+ 'version_cmd' : ["f90", "+version"],
+ 'compiler_f77' : ["f90"],
+ 'compiler_fix' : ["f90"],
+ 'compiler_f90' : ["f90"],
+ 'linker_so' : None,
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+ module_dir_switch = None #XXX: fix me
+ module_include_switch = None #XXX: fix me
+ pic_flags = ['+pic=long']
+ def get_flags(self):
+ return self.pic_flags + ['+ppu']
+ def get_flags_opt(self):
+ return ['-O3']
+ def get_libraries(self):
+ return ['m']
+ def get_version(self, force=0, ok_status=[256,0]):
+ # XXX status==256 may indicate 'unrecognized option' or
+ # 'no input file'. So, version_cmd needs more work.
+ return FCompiler.get_version(self,force,ok_status)
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(10)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='hpux')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/ibm.py b/distutils/fcompiler/ibm.py
new file mode 100644
index 000000000..5fc59d1e6
--- /dev/null
+++ b/distutils/fcompiler/ibm.py
@@ -0,0 +1,80 @@
+import os
+import re
+import sys
+
+from scipy.distutils.fcompiler import FCompiler
+from distutils import log
+
+class IbmFCompiler(FCompiler):
+
+ compiler_type = 'ibm'
+ version_pattern = r'xlf\(1\)\s*IBM XL Fortran (Advanced Edition |)Version (?P<version>[^\s*]*)'
+
+ executables = {
+ 'version_cmd' : ["xlf"],
+ 'compiler_f77' : ["xlf"],
+ 'compiler_fix' : ["xlf90", "-qfixed"],
+ 'compiler_f90' : ["xlf90"],
+ 'linker_so' : ["xlf95"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+
+ def get_version(self,*args,**kwds):
+ version = FCompiler.get_version(self,*args,**kwds)
+ xlf_dir = '/etc/opt/ibmcmp/xlf'
+ if version is None and os.path.isdir(xlf_dir):
+ # If the output of xlf does not contain version info
+ # (that's the case with xlf 8.1, for instance) then
+ # let's try another method:
+ l = os.listdir(xlf_dir)
+ l.sort()
+ l.reverse()
+ l = [d for d in l if os.path.isfile(os.path.join(xlf_dir,d,'xlf.cfg'))]
+ if not l:
+ from distutils.version import LooseVersion
+ self.version = version = LooseVersion(l[0])
+ return version
+
+ def get_flags(self):
+ return ['-qextname']
+
+ def get_flags_debug(self):
+ return ['-g']
+
+ def get_flags_linker_so(self):
+ opt = []
+ if sys.platform=='darwin':
+ opt.append('-Wl,-bundle,-flat_namespace,-undefined,suppress')
+ else:
+ opt.append('-bshared')
+ version = self.get_version(ok_status=[0,40])
+ if version is not None:
+ import tempfile
+ xlf_cfg = '/etc/opt/ibmcmp/xlf/%s/xlf.cfg' % version
+ new_cfg = tempfile.mktemp()+'_xlf.cfg'
+ log.info('Creating '+new_cfg)
+ fi = open(xlf_cfg,'r')
+ fo = open(new_cfg,'w')
+ crt1_match = re.compile(r'\s*crt\s*[=]\s*(?P<path>.*)/crt1.o').match
+ for line in fi.readlines():
+ m = crt1_match(line)
+ if m:
+ fo.write('crt = %s/bundle1.o\n' % (m.group('path')))
+ else:
+ fo.write(line)
+ fi.close()
+ fo.close()
+ opt.append('-F'+new_cfg)
+ return opt
+
+ def get_flags_opt(self):
+ return ['-O5']
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='ibm')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/intel.py b/distutils/fcompiler/intel.py
new file mode 100644
index 000000000..2cd255d98
--- /dev/null
+++ b/distutils/fcompiler/intel.py
@@ -0,0 +1,174 @@
+# http://developer.intel.com/software/products/compilers/flin/
+
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler, dummy_fortran_file
+from scipy.distutils.exec_command import find_executable
+
+class IntelFCompiler(FCompiler):
+
+ compiler_type = 'intel'
+ version_pattern = r'Intel\(R\) Fortran Compiler for 32-bit '\
+ 'applications, Version (?P<version>[^\s*]*)'
+
+ for fc_exe in map(find_executable,['ifort','ifc']):
+ if os.path.isfile(fc_exe):
+ break
+
+ executables = {
+ 'version_cmd' : [fc_exe, "-FI -V -c %(fname)s.f -o %(fname)s.o" \
+ % {'fname':dummy_fortran_file()}],
+ 'compiler_f77' : [fc_exe,"-72","-w90","-w95"],
+ 'compiler_fix' : [fc_exe,"-FI"],
+ 'compiler_f90' : [fc_exe],
+ 'linker_so' : [fc_exe,"-shared"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+
+ pic_flags = ['-KPIC']
+ module_dir_switch = '-module ' # Don't remove ending space!
+ module_include_switch = '-I'
+
+ def get_flags(self):
+ opt = self.pic_flags + ["-cm"]
+ return opt
+
+ def get_flags_free(self):
+ return ["-FR"]
+
+ def get_flags_opt(self):
+ return ['-O3','-unroll']
+
+ def get_flags_arch(self):
+ opt = []
+ if cpu.has_fdiv_bug():
+ opt.append('-fdiv_check')
+ if cpu.has_f00f_bug():
+ opt.append('-0f_check')
+ if cpu.is_PentiumPro() or cpu.is_PentiumII():
+ opt.extend(['-tpp6','-xi'])
+ elif cpu.is_PentiumIII():
+ opt.append('-tpp6')
+ elif cpu.is_Pentium():
+ opt.append('-tpp5')
+ elif cpu.is_PentiumIV() or cpu.is_XEON():
+ opt.extend(['-tpp7','-xW'])
+ if cpu.has_mmx():
+ opt.append('-xM')
+ return opt
+
+ def get_flags_linker_so(self):
+ opt = FCompiler.get_flags_linker_so(self)
+ v = self.get_version()
+ if v and v >= '8.0':
+ opt.append('-nofor_main')
+ return opt
+
+class IntelItaniumFCompiler(IntelFCompiler):
+ compiler_type = 'intele'
+ version_pattern = r'Intel\(R\) Fortran 90 Compiler Itanium\(TM\) Compiler'\
+ ' for the Itanium\(TM\)-based applications,'\
+ ' Version (?P<version>[^\s*]*)'
+
+ for fc_exe in map(find_executable,['efort','efc','ifort']):
+ if os.path.isfile(fc_exe):
+ break
+
+ executables = {
+ 'version_cmd' : [fc_exe, "-FI -V -c %(fname)s.f -o %(fname)s.o" \
+ % {'fname':dummy_fortran_file()}],
+ 'compiler_f77' : [fc_exe,"-FI","-w90","-w95"],
+ 'compiler_fix' : [fc_exe,"-FI"],
+ 'compiler_f90' : [fc_exe],
+ 'linker_so' : [fc_exe,"-shared"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+
+class IntelVisualFCompiler(FCompiler):
+
+ compiler_type = 'intelv'
+ version_pattern = r'Intel\(R\) Fortran Compiler for 32-bit applications, '\
+ 'Version (?P<version>[^\s*]*)'
+
+ ar_exe = 'lib.exe'
+ fc_exe = 'ifl'
+ if sys.platform=='win32':
+ from distutils.msvccompiler import MSVCCompiler
+ ar_exe = MSVCCompiler().lib
+
+ executables = {
+ 'version_cmd' : [fc_exe, "-FI -V -c %(fname)s.f -o %(fname)s.o" \
+ % {'fname':dummy_fortran_file()}],
+ 'compiler_f77' : [fc_exe,"-FI","-w90","-w95"],
+ 'compiler_fix' : [fc_exe,"-FI","-4L72","-w"],
+ 'compiler_f90' : [fc_exe],
+ 'linker_so' : [fc_exe,"-shared"],
+ 'archiver' : [ar_exe, "/verbose", "/OUT:"],
+ 'ranlib' : None
+ }
+
+ compile_switch = '/c '
+ object_switch = '/Fo' #No space after /Fo!
+ library_switch = '/OUT:' #No space after /OUT:!
+ module_dir_switch = '/module:' #No space after /module:
+ module_include_switch = '/I'
+
+ def get_flags(self):
+ opt = ['/nologo','/MD','/nbs','/Qlowercase','/us']
+ return opt
+
+ def get_flags_free(self):
+ return ["-FR"]
+
+ def get_flags_debug(self):
+ return ['/4Yb','/d2']
+
+ def get_flags_opt(self):
+ return ['/O3','/Qip','/Qipo','/Qipo_obj']
+
+ def get_flags_arch(self):
+ opt = []
+ if cpu.is_PentiumPro() or cpu.is_PentiumII():
+ opt.extend(['/G6','/Qaxi'])
+ elif cpu.is_PentiumIII():
+ opt.extend(['/G6','/QaxK'])
+ elif cpu.is_Pentium():
+ opt.append('/G5')
+ elif cpu.is_PentiumIV():
+ opt.extend(['/G7','/QaxW'])
+ if cpu.has_mmx():
+ opt.append('/QaxM')
+ return opt
+
+class IntelItaniumVisualFCompiler(IntelVisualFCompiler):
+
+ compiler_type = 'intelev'
+ version_pattern = r'Intel\(R\) Fortran 90 Compiler Itanium\(TM\) Compiler'\
+ ' for the Itanium\(TM\)-based applications,'\
+ ' Version (?P<version>[^\s*]*)'
+
+ fc_exe = 'efl' # XXX this is a wild guess
+ ar_exe = IntelVisualFCompiler.ar_exe
+
+ executables = {
+ 'version_cmd' : [fc_exe, "-FI -V -c %(fname)s.f -o %(fname)s.o" \
+ % {'fname':dummy_fortran_file()}],
+ 'compiler_f77' : [fc_exe,"-FI","-w90","-w95"],
+ 'compiler_fix' : [fc_exe,"-FI","-4L72","-w"],
+ 'compiler_f90' : [fc_exe],
+ 'linker_so' : [fc_exe,"-shared"],
+ 'archiver' : [ar_exe, "/verbose", "/OUT:"],
+ 'ranlib' : None
+ }
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='intel')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/lahey.py b/distutils/fcompiler/lahey.py
new file mode 100644
index 000000000..9b88cc264
--- /dev/null
+++ b/distutils/fcompiler/lahey.py
@@ -0,0 +1,46 @@
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+
+class LaheyFCompiler(FCompiler):
+
+ compiler_type = 'lahey'
+ version_pattern = r'Lahey/Fujitsu Fortran 95 Compiler Release (?P<version>[^\s*]*)'
+
+ executables = {
+ 'version_cmd' : ["lf95", "--version"],
+ 'compiler_f77' : ["lf95", "--fix"],
+ 'compiler_fix' : ["lf95", "--fix"],
+ 'compiler_f90' : ["lf95"],
+ 'linker_so' : ["lf95","-shared"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+
+ module_dir_switch = None #XXX Fix me
+ module_include_switch = None #XXX Fix me
+
+ def get_flags_opt(self):
+ return ['-O']
+ def get_flags_debug(self):
+ return ['-g','--chk','--chkglobal']
+ def get_library_dirs(self):
+ opt = []
+ d = os.environ.get('LAHEY')
+ if d:
+ opt.append(os.path.join(d,'lib'))
+ return opt
+ def get_libraries(self):
+ opt = []
+ opt.extend(['fj9f6', 'fj9i6', 'fj9ipp', 'fj9e6'])
+ return opt
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='lahey')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/mips.py b/distutils/fcompiler/mips.py
new file mode 100644
index 000000000..547e91423
--- /dev/null
+++ b/distutils/fcompiler/mips.py
@@ -0,0 +1,56 @@
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+
+class MipsFCompiler(FCompiler):
+
+ compiler_type = 'mips'
+ version_pattern = r'MIPSpro Compilers: Version (?P<version>[^\s*,]*)'
+
+ executables = {
+ 'version_cmd' : ["f90", "-version"],
+ 'compiler_f77' : ["f77", "-f77"],
+ 'compiler_fix' : ["f90", "-fixedform"],
+ 'compiler_f90' : ["f90"],
+ 'linker_so' : ["f90","-shared"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : None
+ }
+ module_dir_switch = None #XXX: fix me
+ module_include_switch = None #XXX: fix me
+ pic_flags = ['-KPIC']
+
+ def get_flags(self):
+ return self.pic_flags + ['-n32']
+ def get_flags_opt(self):
+ return ['-O3']
+ def get_flags_arch(self):
+ opt = []
+ for a in '19 20 21 22_4k 22_5k 24 25 26 27 28 30 32_5k 32_10k'.split():
+ if getattr(cpu,'is_IP%s'%a)():
+ opt.append('-TARG:platform=IP%s' % a)
+ break
+ return opt
+ def get_flags_arch_f77(self):
+ r = None
+ if cpu.is_r10000(): r = 10000
+ elif cpu.is_r12000(): r = 12000
+ elif cpu.is_r8000(): r = 8000
+ elif cpu.is_r5000(): r = 5000
+ elif cpu.is_r4000(): r = 4000
+ if r is not None:
+ return ['r%s' % (r)]
+ return []
+ def get_flags_arch_f90(self):
+ r = self.get_flags_arch_f77()
+ if r:
+ r[0] = '-' + r[0]
+ return r
+
+if __name__ == '__main__':
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='mips')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/nag.py b/distutils/fcompiler/nag.py
new file mode 100644
index 000000000..e17d972c4
--- /dev/null
+++ b/distutils/fcompiler/nag.py
@@ -0,0 +1,39 @@
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+
+class NAGFCompiler(FCompiler):
+
+ compiler_type = 'nag'
+ version_pattern = r'NAGWare Fortran 95 compiler Release (?P<version>[^\s]*)'
+
+ executables = {
+ 'version_cmd' : ["f95", "-V"],
+ 'compiler_f77' : ["f95", "-fixed"],
+ 'compiler_fix' : ["f95", "-fixed"],
+ 'compiler_f90' : ["f95"],
+ 'linker_so' : ["f95"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+
+ def get_flags_linker_so(self):
+ if sys.platform=='darwin':
+ return ['-unsharedf95','-Wl,-bundle,-flat_namespace,-undefined,suppress']
+ return ["-Wl,shared"]
+ def get_flags_opt(self):
+ return ['-O4']
+ def get_flags_arch(self):
+ return ['-target=native']
+ def get_flags_debug(self):
+ return ['-g','-gline','-g90','-nan','-C']
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='nag')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/pg.py b/distutils/fcompiler/pg.py
new file mode 100644
index 000000000..bfdf9752c
--- /dev/null
+++ b/distutils/fcompiler/pg.py
@@ -0,0 +1,42 @@
+
+# http://www.pgroup.com
+
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+
+class PGroupFCompiler(FCompiler):
+
+ compiler_type = 'pg'
+ version_pattern = r'\s*pg(f77|f90|hpf) (?P<version>[\d.-]+).*'
+
+ executables = {
+ 'version_cmd' : ["pgf77", "-V 2>/dev/null"],
+ 'compiler_f77' : ["pgf77"],
+ 'compiler_fix' : ["pgf90", "-Mfixed"],
+ 'compiler_f90' : ["pgf90"],
+ 'linker_so' : ["pgf90","-shared","-fpic"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+ pic_flags = ['-fpic']
+ module_dir_switch = '-module '
+ module_include_switch = '-I'
+
+ def get_flags(self):
+ opt = ['-Minform=inform','-Mnosecond_underscore']
+ return self.pic_flags + opt
+ def get_flags_opt(self):
+ return ['-fast']
+ def get_flags_debug(self):
+ return ['-g']
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='pg')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/sun.py b/distutils/fcompiler/sun.py
new file mode 100644
index 000000000..0c0599a06
--- /dev/null
+++ b/distutils/fcompiler/sun.py
@@ -0,0 +1,47 @@
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler import FCompiler
+
+class SunFCompiler(FCompiler):
+
+ compiler_type = 'sun'
+ version_pattern = r'(f90|f95): (Sun|Forte Developer 7|WorkShop 6 update \d+) Fortran 95 (?P<version>[^\s]+).*'
+
+ executables = {
+ 'version_cmd' : ["f90", "-V"],
+ 'compiler_f77' : ["f90"],
+ 'compiler_fix' : ["f90", "-fixed"],
+ 'compiler_f90' : ["f90"],
+ 'linker_so' : ["f90","-Bdynamic","-G"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+ module_dir_switch = '-moddir='
+ module_include_switch = '-M'
+ pic_flags = ['-xcode=pic32']
+
+ def get_flags_f77(self):
+ ret = ["-ftrap=%none"]
+ if (self.get_version() or '') >= '7':
+ ret.append("-f77")
+ else:
+ ret.append("-fixed")
+ return ret
+ def get_opt(self):
+ return ['-fast','-dalign']
+ def get_arch(self):
+ return ['-xtarget=generic']
+ def get_libraries(self):
+ opt = []
+ opt.extend(['fsu','sunmath','mvec','f77compat'])
+ return opt
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='sun')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/fcompiler/vast.py b/distutils/fcompiler/vast.py
new file mode 100644
index 000000000..1c6b40032
--- /dev/null
+++ b/distutils/fcompiler/vast.py
@@ -0,0 +1,50 @@
+import os
+import sys
+
+from scipy.distutils.cpuinfo import cpu
+from scipy.distutils.fcompiler.gnu import GnuFCompiler
+
+class VastFCompiler(GnuFCompiler):
+
+ compiler_type = 'vast'
+ version_pattern = r'\s*Pacific-Sierra Research vf90 '\
+ '(Personal|Professional)\s+(?P<version>[^\s]*)'
+
+ # VAST f90 does not support -o with -c. So, object files are created
+ # to the current directory and then moved to build directory
+ object_switch = ' && function _mvfile { mv -v `basename $1` $1 ; } && _mvfile '
+
+ executables = {
+ 'version_cmd' : ["vf90", "-v"],
+ 'compiler_f77' : ["g77"],
+ 'compiler_fix' : ["f90", "-Wv,-ya"],
+ 'compiler_f90' : ["f90"],
+ 'linker_so' : ["f90"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : ["ranlib"]
+ }
+ module_dir_switch = None #XXX Fix me
+ module_include_switch = None #XXX Fix me
+
+ def get_version_cmd(self):
+ f90 = self.compiler_f90[0]
+ d,b = os.path.split(f90)
+ vf90 = os.path.join(d,'v'+b)
+ return vf90
+
+ def get_flags_arch(self):
+ vast_version = self.get_version()
+ gnu = GnuFCompiler()
+ gnu.customize()
+ self.version = gnu.get_version()
+ opt = GnuFCompiler.get_flags_arch(self)
+ self.version = vast_version
+ return opt
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from scipy.distutils.fcompiler import new_fcompiler
+ compiler = new_fcompiler(compiler='vast')
+ compiler.customize()
+ print compiler.get_version()
diff --git a/distutils/from_template.py b/distutils/from_template.py
new file mode 100644
index 000000000..96866bb9b
--- /dev/null
+++ b/distutils/from_template.py
@@ -0,0 +1,262 @@
+#!/usr/bin/python
+"""
+
+process_file(filename)
+
+ takes templated file .xxx.src and produces .xxx file where .xxx
+ is .pyf .f90 or .f using the following template rules:
+
+ '<..>' denotes a template.
+
+ All function and subroutine blocks in a source file with names that
+ contain '<..>' will be replicated according to the rules in '<..>'.
+
+ The number of comma-separeted words in '<..>' will determine the number of
+ replicates.
+
+ '<..>' may have two different forms, named and short. For example,
+
+ named:
+ <p=d,s,z,c> where anywhere inside a block '<p>' will be replaced with
+ 'd', 's', 'z', and 'c' for each replicate of the block.
+
+ <_c> is already defined: <_c=s,d,c,z>
+ <_t> is already defined: <_t=real,double precision,complex,double complex>
+
+ short:
+ <s,d,c,z>, a short form of the named, useful when no <p> appears inside
+ a block.
+
+ In general, '<..>' contains a comma separated list of arbitrary
+ expressions. If these expression must contain a comma|leftarrow|rightarrow,
+ then prepend the comma|leftarrow|rightarrow with a backslash.
+
+ If an expression matches '\\<index>' then it will be replaced
+ by <index>-th expression.
+
+ Note that all '<..>' forms in a block must have the same number of
+ comma-separated entries.
+
+ Predefined named template rules:
+ <prefix=s,d,c,z>
+ <ftype=real,double precision,complex,double complex>
+ <ftypereal=real,double precision,\\0,\\1>
+ <ctype=float,double,complex_float,complex_double>
+ <ctypereal=float,double,\\0,\\1>
+
+"""
+
+__all__ = ['process_str','process_file']
+
+import string,os,sys
+if sys.version[:3]>='2.3':
+ import re
+else:
+ import pre as re
+ False = 0
+ True = 1
+if sys.version[:5]=='2.2.1':
+ import re
+
+routine_start_re = re.compile(r'(\n|\A)(( (\$|\*))|)\s*(subroutine|function)\b',re.I)
+routine_end_re = re.compile(r'\n\s*end\s*(subroutine|function)\b.*(\n|\Z)',re.I)
+function_start_re = re.compile(r'\n (\$|\*)\s*function\b',re.I)
+
+def parse_structure(astr):
+ """ Return a list of tuples for each function or subroutine each
+ tuple is the start and end of a subroutine or function to be
+ expanded.
+ """
+
+ spanlist = []
+ ind = 0
+ while 1:
+ m = routine_start_re.search(astr,ind)
+ if m is None:
+ break
+ start = m.start()
+ if function_start_re.match(astr,start,m.end()):
+ while 1:
+ i = astr.rfind('\n',ind,start)
+ if i==-1:
+ break
+ start = i
+ if astr[i:i+7]!='\n $':
+ break
+ start += 1
+ m = routine_end_re.search(astr,m.end())
+ ind = end = m and m.end()-1 or len(astr)
+ spanlist.append((start,end))
+ return spanlist
+
+template_re = re.compile(r"<\s*(\w[\w\d]*)\s*>")
+named_re = re.compile(r"<\s*(\w[\w\d]*)\s*=\s*(.*?)\s*>")
+list_re = re.compile(r"<\s*((.*?))\s*>")
+
+def find_repl_patterns(astr):
+ reps = named_re.findall(astr)
+ names = {}
+ for rep in reps:
+ name = rep[0].strip() or unique_key(names)
+ repl = rep[1].replace('\,','@comma@')
+ thelist = conv(repl)
+ names[name] = thelist
+ return names
+
+item_re = re.compile(r"\A\\(?P<index>\d+)\Z")
+def conv(astr):
+ b = astr.split(',')
+ l = [x.strip() for x in b]
+ for i in range(len(l)):
+ m = item_re.match(l[i])
+ if m:
+ j = int(m.group('index'))
+ l[i] = l[j]
+ return ','.join(l)
+
+def unique_key(adict):
+ """ Obtain a unique key given a dictionary."""
+ allkeys = adict.keys()
+ done = False
+ n = 1
+ while not done:
+ newkey = '__l%s' % (n)
+ if newkey in allkeys:
+ n += 1
+ else:
+ done = True
+ return newkey
+
+
+template_name_re = re.compile(r'\A\s*(\w[\w\d]*)\s*\Z')
+def expand_sub(substr,names):
+ substr = substr.replace('\>','@rightarrow@')
+ substr = substr.replace('\<','@leftarrow@')
+ lnames = find_repl_patterns(substr)
+ substr = named_re.sub(r"<\1>",substr) # get rid of definition templates
+
+ def listrepl(mobj):
+ thelist = conv(mobj.group(1).replace('\,','@comma@'))
+ if template_name_re.match(thelist):
+ return "<%s>" % (thelist)
+ name = None
+ for key in lnames.keys(): # see if list is already in dictionary
+ if lnames[key] == thelist:
+ name = key
+ if name is None: # this list is not in the dictionary yet
+ name = unique_key(lnames)
+ lnames[name] = thelist
+ return "<%s>" % name
+
+ substr = list_re.sub(listrepl, substr) # convert all lists to named templates
+ # newnames are constructed as needed
+
+ numsubs = None
+ base_rule = None
+ rules = {}
+ for r in template_re.findall(substr):
+ if not rules.has_key(r):
+ thelist = lnames.get(r,names.get(r,None))
+ if thelist is None:
+ raise ValueError,'No replicates found for <%s>' % (r)
+ if not names.has_key(r) and not thelist.startswith('_'):
+ names[r] = thelist
+ rule = [i.replace('@comma@',',') for i in thelist.split(',')]
+ num = len(rule)
+
+ if numsubs is None:
+ numsubs = num
+ rules[r] = rule
+ base_rule = r
+ elif num == numsubs:
+ rules[r] = rule
+ else:
+ print "Mismatch in number of replacements (base <%s=%s>)"\
+ " for <%s=%s>. Ignoring." % (base_rule,
+ ','.join(rules[base_rule]),
+ r,thelist)
+ if not rules:
+ return substr
+
+ def namerepl(mobj):
+ name = mobj.group(1)
+ return rules.get(name,(k+1)*[name])[k]
+
+ newstr = ''
+ for k in range(numsubs):
+ newstr += template_re.sub(namerepl, substr) + '\n\n'
+
+ newstr = newstr.replace('@rightarrow@','>')
+ newstr = newstr.replace('@leftarrow@','<')
+ return newstr
+
+def process_str(allstr):
+ newstr = allstr
+ writestr = '' #_head # using _head will break free-format files
+
+ struct = parse_structure(newstr)
+
+ oldend = 0
+ names = {}
+ names.update(_special_names)
+ for sub in struct:
+ writestr += newstr[oldend:sub[0]]
+ names.update(find_repl_patterns(newstr[oldend:sub[0]]))
+ writestr += expand_sub(newstr[sub[0]:sub[1]],names)
+ oldend = sub[1]
+ writestr += newstr[oldend:]
+
+ return writestr
+
+include_src_re = re.compile(r"(\n|\A)\s*include\s*['\"](?P<name>[\w\d./\\]+[.]src)['\"]",re.I)
+
+def resolve_includes(source):
+ d = os.path.dirname(source)
+ fid = open(source)
+ lines = []
+ for line in fid.readlines():
+ m = include_src_re.match(line)
+ if m:
+ fn = m.group('name')
+ if not os.path.isabs(fn):
+ fn = os.path.join(d,fn)
+ if os.path.isfile(fn):
+ print 'Including file',fn
+ lines.extend(resolve_includes(fn))
+ else:
+ lines.append(line)
+ else:
+ lines.append(line)
+ fid.close()
+ return lines
+
+def process_file(source):
+ lines = resolve_includes(source)
+ return process_str(''.join(lines))
+
+_special_names = find_repl_patterns('''
+<_c=s,d,c,z>
+<_t=real,double precision,complex,double complex>
+<prefix=s,d,c,z>
+<ftype=real,double precision,complex,double complex>
+<ctype=float,double,complex_float,complex_double>
+<ftypereal=real,double precision,\\0,\\1>
+<ctypereal=float,double,\\0,\\1>
+''')
+
+if __name__ == "__main__":
+
+ try:
+ file = sys.argv[1]
+ except IndexError:
+ fid = sys.stdin
+ outfile = sys.stdout
+ else:
+ fid = open(file,'r')
+ (base, ext) = os.path.splitext(file)
+ newname = base
+ outfile = open(newname,'w')
+
+ allstr = fid.read()
+ writestr = process_str(allstr)
+ outfile.write(writestr)
diff --git a/distutils/line_endings.py b/distutils/line_endings.py
new file mode 100644
index 000000000..4f30af06a
--- /dev/null
+++ b/distutils/line_endings.py
@@ -0,0 +1,75 @@
+""" Functions for converting from DOS to UNIX line endings
+"""
+
+import sys, re, os
+
+def dos2unix(file):
+ "Replace CRLF with LF in argument files. Print names of changed files."
+ if os.path.isdir(file):
+ print file, "Directory!"
+ return
+
+ data = open(file, "rb").read()
+ if '\0' in data:
+ print file, "Binary!"
+ return
+
+ newdata = re.sub("\r\n", "\n", data)
+ if newdata != data:
+ print 'dos2unix:', file
+ f = open(file, "wb")
+ f.write(newdata)
+ f.close()
+ return file
+ else:
+ print file, 'ok'
+
+def dos2unix_one_dir(modified_files,dir_name,file_names):
+ for file in file_names:
+ full_path = os.path.join(dir_name,file)
+ file = dos2unix(full_path)
+ if file is not None:
+ modified_files.append(file)
+
+def dos2unix_dir(dir_name):
+ modified_files = []
+ os.path.walk(dir_name,dos2unix_one_dir,modified_files)
+ return modified_files
+#----------------------------------
+
+def unix2dos(file):
+ "Replace LF with CRLF in argument files. Print names of changed files."
+ if os.path.isdir(file):
+ print file, "Directory!"
+ return
+
+ data = open(file, "rb").read()
+ if '\0' in data:
+ print file, "Binary!"
+ return
+ newdata = re.sub("\r\n", "\n", data)
+ newdata = re.sub("\n", "\r\n", newdata)
+ if newdata != data:
+ print 'unix2dos:', file
+ f = open(file, "wb")
+ f.write(newdata)
+ f.close()
+ return file
+ else:
+ print file, 'ok'
+
+def unix2dos_one_dir(modified_files,dir_name,file_names):
+ for file in file_names:
+ full_path = os.path.join(dir_name,file)
+ unix2dos(full_path)
+ if file is not None:
+ modified_files.append(file)
+
+def unix2dos_dir(dir_name):
+ modified_files = []
+ os.path.walk(dir_name,unix2dos_one_dir,modified_files)
+ return modified_files
+
+if __name__ == "__main__":
+ import sys
+ dos2unix_dir(sys.argv[1])
diff --git a/distutils/log.py b/distutils/log.py
new file mode 100644
index 000000000..fa8981bbc
--- /dev/null
+++ b/distutils/log.py
@@ -0,0 +1,47 @@
+# Colored log, requires Python 2.3 or up.
+
+import sys
+from distutils.log import *
+from distutils.log import Log as old_Log
+from distutils.log import _global_log
+from scipy.distutils.misc_util import red_text, yellow_text, cyan_text
+
+
+def _fix_args(args,flag=1):
+ if type(args) is type(''):
+ return args.replace('%','%%')
+ if flag and type(args) is type(()):
+ return tuple([_fix_args(a,flag=0) for a in args])
+ return args
+
+class Log(old_Log):
+ def _log(self, level, msg, args):
+ if level>= self.threshold:
+ if args:
+ print _global_color_map[level](msg % _fix_args(args))
+ else:
+ print _global_color_map[level](msg)
+ sys.stdout.flush()
+_global_log.__class__ = Log
+
+def set_verbosity(v):
+ prev_level = _global_log.threshold
+ if v<0:
+ set_threshold(ERROR)
+ elif v == 0:
+ set_threshold(WARN)
+ elif v == 1:
+ set_threshold(INFO)
+ elif v >= 2:
+ set_threshold(DEBUG)
+ return {FATAL:-2,ERROR:-1,WARN:0,INFO:1,DEBUG:2}.get(prev_level,1)
+
+_global_color_map = {
+ DEBUG:cyan_text,
+ INFO:yellow_text,
+ WARN:red_text,
+ ERROR:red_text,
+ FATAL:red_text
+}
+
+set_verbosity(1)
diff --git a/distutils/misc_util.py b/distutils/misc_util.py
new file mode 100644
index 000000000..72b262d38
--- /dev/null
+++ b/distutils/misc_util.py
@@ -0,0 +1,614 @@
+import os
+import re
+import sys
+import imp
+import copy
+import types
+import glob
+
+def get_path(mod_name,parent_path=None):
+ """ Return path of the module.
+
+ Returned path is relative to parent_path when given,
+ otherwise it is absolute path.
+ """
+ if mod_name == '__main__':
+ d = os.path.abspath('.')
+ elif mod_name == '__builtin__':
+ #builtin if/then added by Pearu for use in core.run_setup.
+ d = os.path.dirname(os.path.abspath(sys.argv[0]))
+ else:
+ __import__(mod_name)
+ mod = sys.modules[mod_name]
+ file = mod.__file__
+ d = os.path.dirname(os.path.abspath(file))
+ if parent_path is not None:
+ pd = os.path.abspath(parent_path)
+ if pd==d[:len(pd)]:
+ d = d[len(pd)+1:]
+ return d or '.'
+
+# Hooks for colored terminal output.
+# See also http://www.livinglogic.de/Python/ansistyle
+def terminal_has_colors():
+ if sys.platform=='cygwin' and not os.environ.has_key('USE_COLOR'):
+ # Avoid importing curses that causes illegal operation
+ # with a message:
+ # PYTHON2 caused an invalid page fault in
+ # module CYGNURSES7.DLL as 015f:18bbfc28
+ # Details: Python 2.3.3 [GCC 3.3.1 (cygming special)]
+ # ssh to Win32 machine from debian
+ # curses.version is 2.2
+ # CYGWIN_98-4.10, release 1.5.7(0.109/3/2))
+ return 0
+ if hasattr(sys.stdout,'isatty') and sys.stdout.isatty():
+ try:
+ import curses
+ curses.setupterm()
+ if (curses.tigetnum("colors") >= 0
+ and curses.tigetnum("pairs") >= 0
+ and ((curses.tigetstr("setf") is not None
+ and curses.tigetstr("setb") is not None)
+ or (curses.tigetstr("setaf") is not None
+ and curses.tigetstr("setab") is not None)
+ or curses.tigetstr("scp") is not None)):
+ return 1
+ except Exception,msg:
+ pass
+ return 0
+
+if terminal_has_colors():
+ def red_text(s): return '\x1b[31m%s\x1b[0m'%s
+ def green_text(s): return '\x1b[32m%s\x1b[0m'%s
+ def yellow_text(s): return '\x1b[33m%s\x1b[0m'%s
+ def blue_text(s): return '\x1b[34m%s\x1b[0m'%s
+ def cyan_text(s): return '\x1b[35m%s\x1b[0m'%s
+else:
+ def red_text(s): return s
+ def green_text(s): return s
+ def yellow_text(s): return s
+ def cyan_text(s): return s
+ def blue_text(s): return s
+
+#########################
+
+def cyg2win32(path):
+ if sys.platform=='cygwin' and path.startswith('/cygdrive'):
+ path = path[10] + ':' + os.path.normcase(path[11:])
+ return path
+
+#########################
+
+#XXX need support for .C that is also C++
+cxx_ext_match = re.compile(r'.*[.](cpp|cxx|cc)\Z',re.I).match
+fortran_ext_match = re.compile(r'.*[.](f90|f95|f77|for|ftn|f)\Z',re.I).match
+f90_ext_match = re.compile(r'.*[.](f90|f95)\Z',re.I).match
+f90_module_name_match = re.compile(r'\s*module\s*(?P<name>[\w_]+)',re.I).match
+def _get_f90_modules(source):
+ """ Return a list of Fortran f90 module names that
+ given source file defines.
+ """
+ if not f90_ext_match(source):
+ return []
+ modules = []
+ f = open(source,'r')
+ f_readlines = getattr(f,'xreadlines',f.readlines)
+ for line in f_readlines():
+ m = f90_module_name_match(line)
+ if m:
+ name = m.group('name')
+ modules.append(name)
+ # break # XXX can we assume that there is one module per file?
+ f.close()
+ return modules
+
+def all_strings(lst):
+ """ Return True if all items in lst are string objects. """
+ for item in lst:
+ if type(item) is not types.StringType:
+ return False
+ return True
+
+def has_f_sources(sources):
+ """ Return True if sources contains Fortran files """
+ for source in sources:
+ if fortran_ext_match(source):
+ return True
+ return False
+
+def has_cxx_sources(sources):
+ """ Return True if sources contains C++ files """
+ for source in sources:
+ if cxx_ext_match(source):
+ return True
+ return False
+
+def filter_sources(sources):
+ """ Return four lists of filenames containing
+ C, C++, Fortran, and Fortran 90 module sources,
+ respectively.
+ """
+ c_sources = []
+ cxx_sources = []
+ f_sources = []
+ fmodule_sources = []
+ for source in sources:
+ if fortran_ext_match(source):
+ modules = _get_f90_modules(source)
+ if modules:
+ fmodule_sources.append(source)
+ else:
+ f_sources.append(source)
+ elif cxx_ext_match(source):
+ cxx_sources.append(source)
+ else:
+ c_sources.append(source)
+ return c_sources, cxx_sources, f_sources, fmodule_sources
+
+
+def _get_headers(directory_list):
+ # get *.h files from list of directories
+ headers = []
+ for dir in directory_list:
+ head = glob.glob(os.path.join(dir,"*.h")) #XXX: *.hpp files??
+ headers.extend(head)
+ return headers
+
+def _get_directories(list_of_sources):
+ # get unique directories from list of sources.
+ direcs = []
+ for file in list_of_sources:
+ dir = os.path.split(file)
+ if dir[0] != '' and not dir[0] in direcs:
+ direcs.append(dir[0])
+ return direcs
+
+def get_dependencies(sources):
+ #XXX scan sources for include statements
+ return _get_headers(_get_directories(sources))
+
+def is_local_src_dir(directory):
+ """ Return true if directory is local directory.
+ """
+ abs_dir = os.path.abspath(directory)
+ c = os.path.commonprefix([os.getcwd(),abs_dir])
+ new_dir = abs_dir[len(c):].split(os.sep)
+ if new_dir and not new_dir[0]:
+ new_dir = new_dir[1:]
+ if new_dir and new_dir[0]=='build':
+ return False
+ new_dir = os.sep.join(new_dir)
+ return os.path.isdir(new_dir)
+
+def _gsf_visit_func(filenames,dirname,names):
+ if os.path.basename(dirname) in ['CVS','.svn','build']:
+ names[:] = []
+ return
+ for name in names:
+ if name[-1] in "~#":
+ continue
+ fullname = os.path.join(dirname,name)
+ ext = os.path.splitext(fullname)[1]
+ if ext and ext in ['.pyc','.o']:
+ continue
+ if os.path.isfile(fullname):
+ filenames.append(fullname)
+
+def get_ext_source_files(ext):
+ # Get sources and any include files in the same directory.
+ filenames = []
+ sources = filter(lambda s:type(s) is types.StringType,ext.sources)
+ filenames.extend(sources)
+ filenames.extend(get_dependencies(sources))
+ for d in ext.depends:
+ if is_local_src_dir(d):
+ os.path.walk(d,_gsf_visit_func,filenames)
+ elif os.path.isfile(d):
+ filenames.append(d)
+ return filenames
+
+def get_lib_source_files(lib):
+ filenames = []
+ sources = lib[1].get('sources',[])
+ sources = filter(lambda s:type(s) is types.StringType,sources)
+ filenames.extend(sources)
+ filenames.extend(get_dependencies(sources))
+ depends = build_info.get('depends',[])
+ for d in depends:
+ if is_local_src_dir(d):
+ os.path.walk(d,_gsf_visit_func,filenames)
+ elif os.path.isfile(d):
+ filenames.append(d)
+ return filenames
+
+def get_data_files(data):
+ if type(data) is types.StringType:
+ return [data]
+ sources = data[1]
+ filenames = []
+ for s in sources:
+ if is_local_src_dir(s):
+ os.path.walk(s,_gsf_visit_func,filenames)
+ elif os.path.isfile(s):
+ filenames.append(s)
+ return filenames
+
+def dot_join(*args):
+ return '.'.join(filter(None,args))
+
+def get_frame(level=0):
+ try:
+ return sys._getframe(level+1)
+ except AttributeError:
+ frame = sys.exc_info()[2].tb_frame
+ for i in range(level+1):
+ frame = frame.f_back
+ return frame
+
+######################
+
+class Configuration:
+
+ _list_keys = ['packages','ext_modules','data_files','include_dirs',
+ 'libraries','headers','scripts']
+ _dict_keys = ['package_dir']
+
+ def __init__(self,
+ package_name=None,
+ parent_name=None,
+ top_path=None,
+ package_path=None,
+ **attrs):
+ """ Construct configuration instance of a package.
+ """
+ self.name = dot_join(parent_name, package_name)
+
+ caller_frame = get_frame(1)
+ caller_name = eval('__name__',caller_frame.f_globals,caller_frame.f_locals)
+
+ self.local_path = get_path(caller_name, top_path)
+ if top_path is None:
+ top_path = self.local_path
+ if package_path is None:
+ package_path = self.local_path
+ elif os.path.isdir(os.path.join(self.local_path,package_path)):
+ package_path = os.path.join(self.local_path,package_path)
+ self.top_path = top_path
+
+ self.list_keys = copy.copy(self._list_keys)
+ self.dict_keys = copy.copy(self._dict_keys)
+
+ for n in self.list_keys:
+ setattr(self,n,copy.copy(attrs.get(n,[])))
+
+ for n in self.dict_keys:
+ setattr(self,n,copy.copy(attrs.get(n,{})))
+
+ known_keys = self.list_keys + self.dict_keys
+ self.extra_keys = []
+ for n in attrs.keys():
+ if n in known_keys:
+ continue
+ a = attrs[n]
+ setattr(self,n,a)
+ if type(a) is types.ListType:
+ self.list_keys.append(n)
+ elif type(a) is types.DictType:
+ self.dict_keys.append(n)
+ else:
+ self.extra_keys.append(n)
+
+
+ if os.path.exists(os.path.join(package_path,'__init__.py')):
+ self.packages.append(self.name)
+ self.package_dir[self.name] = package_path
+ return
+
+ def todict(self):
+ """ Return configuration distionary suitable for passing
+ to distutils.core.setup() function.
+ """
+ d = {}
+ for n in self.list_keys + self.dict_keys + self.extra_keys:
+ a = getattr(self,n)
+ if a:
+ d[n] = a
+ if self.name:
+ d['name'] = self.name
+ return d
+
+ def add_subpackage(self,subpackage_name,subpackage_path=None):
+ """ Add subpackage configuration.
+ """
+ assert '.' not in subpackage_name,`subpackage_name`
+ if subpackage_path is None:
+ subpackage_path = os.path.join(self.local_path,subpackage_name)
+ setup_py = os.path.join(subpackage_path,'setup_%s.py' % (subpackage_name))
+ if not os.path.isfile(setup_py):
+ setup_py = os.path.join(subpackage_path,'setup.py')
+ if not os.path.isfile(setup_py):
+ print 'Assuming default configuration '\
+ '(%s/{setup_%s,setup}.py was not found)' \
+ % (os.path.dirname(setup_py),subpackage_name)
+ name = dot_join(self.name, subpackage_name)
+ self.packages.append(name)
+ self.package_dir[name] = subpackage_path
+ return
+
+ # In case setup_py imports local modules:
+ sys.path.insert(0,os.path.dirname(setup_py))
+
+ try:
+ info = (open(setup_py),setup_py,('.py','U',1))
+ setup_name = os.path.splitext(os.path.basename(setup_py))[0]
+ n = dot_join(self.name,setup_name)
+ setup_module = imp.load_module('_'.join(n.split('.')),*info)
+ if not hasattr(setup_module,'configuration'):
+ print 'Assuming default configuration '\
+ '(%s does not define configuration())' % (setup_module)
+ name = dot_join(self.name, subpackage_name)
+ self.packages.append(name)
+ self.package_dir[name] = subpackage_path
+ else:
+ args = (self.name,)
+ if setup_module.configuration.func_code.co_argcount>1:
+ args = args + (self.top_path,)
+ config = setup_module.configuration(*args)
+ if not config:
+ print 'No configuration returned, assuming unavailable.'
+ else:
+ if isinstance(config,Configuration):
+ config = config.todict()
+ self.dict_append(**config)
+ finally:
+ del sys.path[0]
+ return
+
+ def add_data_dir(self,data_path):
+ """ Add files under data_path to data_files list.
+ """
+ path = os.path.join(self.local_path,data_path)
+ filenames = []
+ os.path.walk(path, _gsf_visit_func,filenames)
+ self.add_data_files(*filenames)
+ return
+
+ def add_data_files(self,*files):
+ data_dict = {}
+ for f in files:
+ lf = f[len(self.local_path)+1:]
+ d = os.path.dirname(lf)
+ d = os.path.join(*(self.name.split('.')+[d]))
+ if not data_dict.has_key(d):
+ data_dict[d] = [f]
+ else:
+ data_dict[d].append(f)
+ self.data_files.extend(data_dict.items())
+ return
+
+ def add_include_dirs(self,*paths):
+ self.include_dirs.extend(self._fix_paths(paths))
+
+ def add_headers(self,*paths):
+ paths = self._fix_paths(paths)
+ self.headers.extend([(self.name,p) for p in paths])
+
+ def _fix_paths(self,paths):
+ new_paths = []
+ for n in paths:
+ if isinstance(n,str):
+ if '*' in n or '?' in n:
+ p = glob.glob(n)
+ p2 = glob.glob(os.path.join(self.local_path,n))
+ if p2:
+ new_paths.extend(p2)
+ elif p:
+ new_paths.extend(p)
+ else:
+ new_paths.append(n)
+ else:
+ n2 = os.path.join(self.local_path,n)
+ if os.path.exists(n2):
+ new_paths.append(n2)
+ else:
+ new_paths.append(n)
+ else:
+ new_paths.append(n)
+ return new_paths
+
+ def paths(self,*paths):
+ return self._fix_paths(paths)
+
+ def add_extension(self,name,sources,**kw):
+ ext_args = copy.copy(kw)
+ ext_args['name'] = dot_join(self.name,name)
+ ext_args['sources'] = sources
+
+ for k in ext_args.keys():
+ v = ext_args[k]
+ if k in ['sources','depends']:
+ new_v = self._fix_paths(v)
+ ext_args[k] = new_v
+
+ from scipy.distutils.core import Extension
+ ext = Extension(**ext_args)
+ self.ext_modules.append(ext)
+ return ext
+
+ def add_library(self,name,sources,**build_info):
+ """
+ Valid keywords for build_info:
+ depends
+ macros
+ include_dirs
+ extra_compiler_args
+ f2py_options
+ """
+ build_info = copy.copy(build_info)
+ name = name + '@' + self.name
+ build_info['sources'] = sources
+
+ for k in build_info.keys():
+ v = build_info[k]
+ if k in ['sources','depends']:
+ new_v = self._fix_paths(v)
+ build_info[k] = new_v
+ self.libraries.append((name,build_info))
+ return
+
+ def dict_append(self,**dict):
+ for key in self.list_keys:
+ a = getattr(self,key)
+ a.extend(dict.get(key,[]))
+ for key in self.dict_keys:
+ a = getattr(self,key)
+ a.update(dict.get(key,{}))
+ known_keys = self.list_keys + self.dict_keys + self.extra_keys
+ for key in dict.keys():
+ if key not in known_keys and not hasattr(self,key):
+ print 'Inheriting attribute %r from %r' \
+ % (key,dict.get('name','?'))
+ setattr(self,key,dict[key])
+ self.extra_keys.append(key)
+ return
+
+ def __str__(self):
+ known_keys = self.list_keys + self.dict_keys + self.extra_keys
+ s = '<'+5*'-' + '\n'
+ s += 'Configuration of '+self.name+':\n'
+ for k in known_keys:
+ a = getattr(self,k,None)
+ if a:
+ s += '%s = %r\n' % (k,a)
+ s += 5*'-' + '>'
+ return s
+
+ def get_config_cmd(self):
+ cmd = get_cmd('config')
+ cmd.ensure_finalized()
+ cmd.dump_source = 0
+ cmd.noisy = 0
+ old_path = os.environ.get('PATH')
+ if old_path:
+ path = os.pathsep.join(['.',old_path])
+ os.environ['PATH'] = path
+ return cmd
+
+ def get_build_temp_dir(self):
+ cmd = get_cmd('build')
+ cmd.ensure_finalized()
+ return cmd.build_temp
+
+ def have_f77c(self):
+ """ Check for availability of Fortran 77 compiler.
+ Use it inside source generating function to ensure that
+ setup distribution instance has been initialized.
+ """
+ simple_fortran_subroutine = '''
+ subroutine simple
+ end
+ '''
+ config_cmd = self.get_config_cmd()
+ flag = config_cmd.try_compile(simple_fortran_subroutine,lang='f77')
+ return flag
+
+ def have_f90c(self):
+ """ Check for availability of Fortran 90 compiler.
+ Use it inside source generating function to ensure that
+ setup distribution instance has been initialized.
+ """
+ simple_fortran_subroutine = '''
+ subroutine simple
+ end
+ '''
+ config_cmd = self.get_config_cmd()
+ flag = config_cmd.try_compile(simple_fortran_subroutine,lang='f90')
+ return flag
+
+def get_cmd(cmdname,_cache={}):
+ if not _cache.has_key(cmdname):
+ import distutils.core
+ dist = distutils.core._setup_distribution
+ if dist is None:
+ from distutils.errors import DistutilsInternalError
+ raise DistutilsInternalError,\
+ 'setup distribution instance not initialized'
+ cmd = dist.get_command_obj(cmdname)
+ _cache[cmdname] = cmd
+ return _cache[cmdname]
+
+#########################
+
+def dict_append(d,**kws):
+ for k,v in kws.items():
+ if d.has_key(k):
+ d[k].extend(v)
+ else:
+ d[k] = v
+
+def generate_config_py(extension, build_dir):
+ """ Generate <package>/config.py file containing system_info
+ information used during building the package.
+
+ Usage:\
+ ext = Extension(dot_join(config['name'],'config'),
+ sources=[generate_config_py])
+ config['ext_modules'].append(ext)
+ """
+ from scipy.distutils.system_info import system_info
+ from distutils.dir_util import mkpath
+ target = os.path.join(*([build_dir]+extension.name.split('.'))) + '.py'
+ mkpath(os.path.dirname(target))
+ f = open(target,'w')
+ f.write('# This file is generated by %s\n' % (os.path.abspath(sys.argv[0])))
+ f.write('# It contains system_info results at the time of building this package.\n')
+ f.write('__all__ = ["get_info","show"]\n\n')
+ for k,i in system_info.saved_results.items():
+ f.write('%s=%r\n' % (k,i))
+ f.write('\ndef get_info(name): g=globals(); return g.get(name,g.get(name+"_info",{}))\n')
+ f.write('''
+def show():
+ for name,info_dict in globals().items():
+ if name[0]=="_" or type(info_dict) is not type({}): continue
+ print name+":"
+ if not info_dict:
+ print " NOT AVAILABLE"
+ for k,v in info_dict.items():
+ v = str(v)
+ if k==\'sources\' and len(v)>200: v = v[:60]+\' ...\\n... \'+v[-60:]
+ print \' %s = %s\'%(k,v)
+ print
+ return
+ ''')
+
+ f.close()
+ return target
+
+def generate_svn_version_py(extension, build_dir):
+ """ Generate __svn_version__.py file containing SVN
+ revision number of a module.
+
+ To use, add the following codelet to setup
+ configuration(..) function
+
+ ext = Extension(dot_join(config['name'],'__svn_version__'),
+ sources=[generate_svn_version_py])
+ ext.local_path = local_path
+ config['ext_modules'].append(ext)
+
+ """
+ from distutils import dep_util
+ local_path = extension.local_path
+ target = os.path.join(build_dir, '__svn_version__.py')
+ entries = os.path.join(local_path,'.svn','entries')
+ if os.path.isfile(entries):
+ if not dep_util.newer(entries, target):
+ return target
+ elif os.path.isfile(target):
+ return target
+
+ revision = get_svn_revision(local_path)
+ f = open(target,'w')
+ f.write('revision=%s\n' % (revision))
+ f.close()
+ return target
diff --git a/distutils/setup.py b/distutils/setup.py
new file mode 100644
index 000000000..a7e1ad114
--- /dev/null
+++ b/distutils/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+from scipy.distutils.core import setup
+from scipy.distutils.misc_util import Configuration
+
+def configuration(parent_package='',top_path=None):
+ config = Configuration('distutils',parent_package,top_path)
+ config.add_subpackage('command')
+ config.add_subpackage('fcompiler')
+ config.add_data_dir('tests')
+ return config.todict()
+
+if __name__ == '__main__':
+ setup(**configuration(top_path=''))
diff --git a/distutils/system_info.py b/distutils/system_info.py
new file mode 100644
index 000000000..2492a0c89
--- /dev/null
+++ b/distutils/system_info.py
@@ -0,0 +1,1494 @@
+#!/usr/bin/env python
+"""
+This file defines a set of system_info classes for getting
+information about various resources (libraries, library directories,
+include directories, etc.) in the system. Currently, the following
+classes are available:
+
+ atlas_info
+ atlas_threads_info
+ atlas_blas_info
+ atlas_blas_threads_info
+ lapack_atlas_info
+ blas_info
+ lapack_info
+ blas_opt_info # usage recommended
+ lapack_opt_info # usage recommended
+ fftw_info,dfftw_info,sfftw_info
+ fftw_threads_info,dfftw_threads_info,sfftw_threads_info
+ djbfft_info
+ x11_info
+ lapack_src_info
+ blas_src_info
+ numpy_info
+ numarray_info
+ boost_python_info
+ agg2_info
+ wx_info
+ gdk_pixbuf_xlib_2_info
+ gdk_pixbuf_2_info
+ gdk_x11_2_info
+ gtkp_x11_2_info
+ gtkp_2_info
+ xft_info
+ freetype2_info
+
+Usage:
+ info_dict = get_info(<name>)
+ where <name> is a string 'atlas','x11','fftw','lapack','blas',
+ 'lapack_src', 'blas_src', etc. For a complete list of allowed names,
+ see the definition of get_info() function below.
+
+ Returned info_dict is a dictionary which is compatible with
+ distutils.setup keyword arguments. If info_dict == {}, then the
+ asked resource is not available (system_info could not find it).
+
+ Several *_info classes specify an environment variable to specify
+ the locations of software. When setting the corresponding environment
+ variable to 'None' then the software will be ignored, even when it
+ is available in system.
+
+Global parameters:
+ system_info.search_static_first - search static libraries (.a)
+ in precedence to shared ones (.so, .sl) if enabled.
+ system_info.verbosity - output the results to stdout if enabled.
+
+The file 'site.cfg' in the same directory as this module is read
+for configuration options. The format is that used by ConfigParser (i.e.,
+Windows .INI style). The section DEFAULT has options that are the default
+for each section. The available sections are fftw, atlas, and x11. Appropiate
+defaults are used if nothing is specified.
+
+The order of finding the locations of resources is the following:
+ 1. environment variable
+ 2. section in site.cfg
+ 3. DEFAULT section in site.cfg
+Only the first complete match is returned.
+
+Example:
+----------
+[DEFAULT]
+library_dirs = /usr/lib:/usr/local/lib:/opt/lib
+include_dirs = /usr/include:/usr/local/include:/opt/include
+src_dirs = /usr/local/src:/opt/src
+# search static libraries (.a) in preference to shared ones (.so)
+search_static_first = 0
+
+[fftw]
+fftw_libs = rfftw, fftw
+fftw_opt_libs = rfftw_threaded, fftw_threaded
+# if the above aren't found, look for {s,d}fftw_libs and {s,d}fftw_opt_libs
+
+[atlas]
+library_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas
+# for overriding the names of the atlas libraries
+atlas_libs = lapack, f77blas, cblas, atlas
+
+[x11]
+library_dirs = /usr/X11R6/lib
+include_dirs = /usr/X11R6/include
+----------
+
+Authors:
+ Pearu Peterson <pearu@cens.ioc.ee>, February 2002
+ David M. Cooke <cookedm@physics.mcmaster.ca>, April 2002
+
+Copyright 2002 Pearu Peterson all rights reserved,
+Pearu Peterson <pearu@cens.ioc.ee>
+Permission to use, modify, and distribute this software is given under the
+terms of the SciPy (BSD style) license. See LICENSE.txt that came with
+this distribution for specifics.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+"""
+
+__revision__ = '$Id: system_info.py,v 1.1 2005/04/09 19:29:35 pearu Exp $'
+import sys,os,re,types
+import warnings
+from distutils.errors import DistutilsError
+from glob import glob
+import ConfigParser
+from exec_command import find_executable, exec_command, get_pythonexe
+
+from distutils.sysconfig import get_config_vars
+
+if sys.platform == 'win32':
+ default_lib_dirs = ['C:\\'] # probably not very helpful...
+ default_include_dirs = []
+ default_src_dirs = ['.']
+ default_x11_lib_dirs = []
+ default_x11_include_dirs = []
+else:
+ default_lib_dirs = ['/usr/local/lib', '/opt/lib', '/usr/lib',
+ '/sw/lib']
+ default_include_dirs = ['/usr/local/include',
+ '/opt/include', '/usr/include',
+ '/sw/include']
+ default_src_dirs = ['.','/usr/local/src', '/opt/src','/sw/src']
+ default_x11_lib_dirs = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']
+ default_x11_include_dirs = ['/usr/X11R6/include','/usr/X11/include',
+ '/usr/include']
+
+if os.path.join(sys.prefix, 'lib') not in default_lib_dirs:
+ default_lib_dirs.insert(0,os.path.join(sys.prefix, 'lib'))
+ default_include_dirs.append(os.path.join(sys.prefix, 'include'))
+ default_src_dirs.append(os.path.join(sys.prefix, 'src'))
+
+default_lib_dirs = filter(os.path.isdir, default_lib_dirs)
+default_include_dirs = filter(os.path.isdir, default_include_dirs)
+default_src_dirs = filter(os.path.isdir, default_src_dirs)
+
+so_ext = get_config_vars('SO')[0] or ''
+
+def get_info(name,notfound_action=0):
+ """
+ notfound_action:
+ 0 - do nothing
+ 1 - display warning message
+ 2 - raise error
+ """
+ cl = {'atlas':atlas_info, # use lapack_opt or blas_opt instead
+ 'atlas_threads':atlas_threads_info, # ditto
+ 'atlas_blas':atlas_blas_info,
+ 'atlas_blas_threads':atlas_blas_threads_info,
+ 'lapack_atlas':lapack_atlas_info, # use lapack_opt instead
+ 'lapack_atlas_threads':lapack_atlas_threads_info, # ditto
+ 'x11':x11_info,
+ 'fftw':fftw_info,
+ 'dfftw':dfftw_info,
+ 'sfftw':sfftw_info,
+ 'fftw_threads':fftw_threads_info,
+ 'dfftw_threads':dfftw_threads_info,
+ 'sfftw_threads':sfftw_threads_info,
+ 'djbfft':djbfft_info,
+ 'blas':blas_info, # use blas_opt instead
+ 'lapack':lapack_info, # use lapack_opt instead
+ 'lapack_src':lapack_src_info,
+ 'blas_src':blas_src_info,
+ 'numpy':numpy_info,
+ 'numeric':numpy_info, # alias to numpy, for build_ext --backends support
+ 'numarray':numarray_info,
+ 'lapack_opt':lapack_opt_info,
+ 'blas_opt':blas_opt_info,
+ 'boost_python':boost_python_info,
+ 'agg2':agg2_info,
+ 'wx':wx_info,
+ 'gdk_pixbuf_xlib_2':gdk_pixbuf_xlib_2_info,
+ 'gdk-pixbuf-xlib-2.0':gdk_pixbuf_xlib_2_info,
+ 'gdk_pixbuf_2':gdk_pixbuf_2_info,
+ 'gdk-pixbuf-2.0':gdk_pixbuf_2_info,
+ 'gdk':gdk_info,
+ 'gdk_2':gdk_2_info,
+ 'gdk-2.0':gdk_2_info,
+ 'gdk_x11_2':gdk_x11_2_info,
+ 'gdk-x11-2.0':gdk_x11_2_info,
+ 'gtkp_x11_2':gtkp_x11_2_info,
+ 'gtk+-x11-2.0':gtkp_x11_2_info,
+ 'gtkp_2':gtkp_2_info,
+ 'gtk+-2.0':gtkp_2_info,
+ 'xft':xft_info,
+ 'freetype2':freetype2_info,
+ }.get(name.lower(),system_info)
+ return cl().get_info(notfound_action)
+
+class NotFoundError(DistutilsError):
+ """Some third-party program or library is not found."""
+
+class AtlasNotFoundError(NotFoundError):
+ """
+ Atlas (http://math-atlas.sourceforge.net/) libraries not found.
+ Directories to search for the libraries can be specified in the
+ scipy_distutils/site.cfg file (section [atlas]) or by setting
+ the ATLAS environment variable."""
+
+class LapackNotFoundError(NotFoundError):
+ """
+ Lapack (http://www.netlib.org/lapack/) libraries not found.
+ Directories to search for the libraries can be specified in the
+ scipy_distutils/site.cfg file (section [lapack]) or by setting
+ the LAPACK environment variable."""
+
+class LapackSrcNotFoundError(LapackNotFoundError):
+ """
+ Lapack (http://www.netlib.org/lapack/) sources not found.
+ Directories to search for the sources can be specified in the
+ scipy_distutils/site.cfg file (section [lapack_src]) or by setting
+ the LAPACK_SRC environment variable."""
+
+class BlasNotFoundError(NotFoundError):
+ """
+ Blas (http://www.netlib.org/blas/) libraries not found.
+ Directories to search for the libraries can be specified in the
+ scipy_distutils/site.cfg file (section [blas]) or by setting
+ the BLAS environment variable."""
+
+class BlasSrcNotFoundError(BlasNotFoundError):
+ """
+ Blas (http://www.netlib.org/blas/) sources not found.
+ Directories to search for the sources can be specified in the
+ scipy_distutils/site.cfg file (section [blas_src]) or by setting
+ the BLAS_SRC environment variable."""
+
+class FFTWNotFoundError(NotFoundError):
+ """
+ FFTW (http://www.fftw.org/) libraries not found.
+ Directories to search for the libraries can be specified in the
+ scipy_distutils/site.cfg file (section [fftw]) or by setting
+ the FFTW environment variable."""
+
+class DJBFFTNotFoundError(NotFoundError):
+ """
+ DJBFFT (http://cr.yp.to/djbfft.html) libraries not found.
+ Directories to search for the libraries can be specified in the
+ scipy_distutils/site.cfg file (section [djbfft]) or by setting
+ the DJBFFT environment variable."""
+
+class F2pyNotFoundError(NotFoundError):
+ """
+ f2py2e (http://cens.ioc.ee/projects/f2py2e/) module not found.
+ Get it from above location, install it, and retry setup.py."""
+
+class NumericNotFoundError(NotFoundError):
+ """
+ Numeric (http://www.numpy.org/) module not found.
+ Get it from above location, install it, and retry setup.py."""
+
+class X11NotFoundError(NotFoundError):
+ """X11 libraries not found."""
+
+class system_info:
+
+ """ get_info() is the only public method. Don't use others.
+ """
+ section = 'DEFAULT'
+ dir_env_var = None
+ search_static_first = 0 # XXX: disabled by default, may disappear in
+ # future unless it is proved to be useful.
+ verbosity = 1
+ saved_results = {}
+
+ notfounderror = NotFoundError
+
+ def __init__ (self,
+ default_lib_dirs=default_lib_dirs,
+ default_include_dirs=default_include_dirs,
+ verbosity = 1,
+ ):
+ self.__class__.info = {}
+ self.local_prefixes = []
+ defaults = {}
+ defaults['libraries'] = ''
+ defaults['library_dirs'] = os.pathsep.join(default_lib_dirs)
+ defaults['include_dirs'] = os.pathsep.join(default_include_dirs)
+ defaults['src_dirs'] = os.pathsep.join(default_src_dirs)
+ defaults['search_static_first'] = str(self.search_static_first)
+ self.cp = ConfigParser.ConfigParser(defaults)
+ try:
+ f = __file__
+ except NameError,msg:
+ f = sys.argv[0]
+ cf = os.path.join(os.path.split(os.path.abspath(f))[0],
+ 'site.cfg')
+ self.cp.read([cf])
+ if not self.cp.has_section(self.section):
+ self.cp.add_section(self.section)
+ self.search_static_first = self.cp.getboolean(self.section,
+ 'search_static_first')
+ assert isinstance(self.search_static_first, type(0))
+
+ def calc_libraries_info(self):
+ libs = self.get_libraries()
+ dirs = self.get_lib_dirs()
+ info = {}
+ for lib in libs:
+ i = None
+ for d in dirs:
+ i = self.check_libs(d,[lib])
+ if i is not None:
+ break
+ if i is not None:
+ dict_append(info,**i)
+ else:
+ print 'Library %s was not found. Ignoring' % (lib)
+ return info
+
+ def set_info(self,**info):
+ if info:
+ lib_info = self.calc_libraries_info()
+ dict_append(info,**lib_info)
+ self.saved_results[self.__class__.__name__] = info
+
+ def has_info(self):
+ return self.saved_results.has_key(self.__class__.__name__)
+
+ def get_info(self,notfound_action=0):
+ """ Return a dictonary with items that are compatible
+ with scipy_distutils.setup keyword arguments.
+ """
+ flag = 0
+ if not self.has_info():
+ flag = 1
+ if self.verbosity>0:
+ print self.__class__.__name__ + ':'
+ if hasattr(self, 'calc_info'):
+ self.calc_info()
+ if notfound_action:
+ if not self.has_info():
+ if notfound_action==1:
+ warnings.warn(self.notfounderror.__doc__)
+ elif notfound_action==2:
+ raise self.notfounderror,self.notfounderror.__doc__
+ else:
+ raise ValueError,`notfound_action`
+
+ if self.verbosity>0:
+ if not self.has_info():
+ print ' NOT AVAILABLE'
+ self.set_info()
+ else:
+ print ' FOUND:'
+
+ res = self.saved_results.get(self.__class__.__name__)
+ if self.verbosity>0 and flag:
+ for k,v in res.items():
+ v = str(v)
+ if k=='sources' and len(v)>200: v = v[:60]+' ...\n... '+v[-60:]
+ print ' %s = %s'%(k,v)
+ print
+
+ return res
+
+ def get_paths(self, section, key):
+ dirs = self.cp.get(section, key).split(os.pathsep)
+ env_var = self.dir_env_var
+ if env_var:
+ if type(env_var) is type([]):
+ e0 = env_var[-1]
+ for e in env_var:
+ if os.environ.has_key(e):
+ e0 = e
+ break
+ if not env_var[0]==e0:
+ print 'Setting %s=%s' % (env_var[0],e0)
+ env_var = e0
+ if env_var and os.environ.has_key(env_var):
+ d = os.environ[env_var]
+ if d=='None':
+ print 'Disabled',self.__class__.__name__,'(%s is None)' \
+ % (self.dir_env_var)
+ return []
+ if os.path.isfile(d):
+ dirs = [os.path.dirname(d)] + dirs
+ l = getattr(self,'_lib_names',[])
+ if len(l)==1:
+ b = os.path.basename(d)
+ b = os.path.splitext(b)[0]
+ if b[:3]=='lib':
+ print 'Replacing _lib_names[0]==%r with %r' \
+ % (self._lib_names[0], b[3:])
+ self._lib_names[0] = b[3:]
+ else:
+ ds = d.split(os.pathsep)
+ ds2 = []
+ for d in ds:
+ if os.path.isdir(d):
+ ds2.append(d)
+ for dd in ['include','lib']:
+ d1 = os.path.join(d,dd)
+ if os.path.isdir(d1):
+ ds2.append(d1)
+ dirs = ds2 + dirs
+ default_dirs = self.cp.get('DEFAULT', key).split(os.pathsep)
+ dirs.extend(default_dirs)
+ ret = []
+ [ret.append(d) for d in dirs if os.path.isdir(d) and d not in ret]
+ if self.verbosity>1:
+ print '(',key,'=',':'.join(ret),')'
+ return ret
+
+ def get_lib_dirs(self, key='library_dirs'):
+ return self.get_paths(self.section, key)
+
+ def get_include_dirs(self, key='include_dirs'):
+ return self.get_paths(self.section, key)
+
+ def get_src_dirs(self, key='src_dirs'):
+ return self.get_paths(self.section, key)
+
+ def get_libs(self, key, default):
+ try:
+ libs = self.cp.get(self.section, key)
+ except ConfigParser.NoOptionError:
+ if not default:
+ return []
+ if type(default) is type(''):
+ return [default]
+ return default
+ return [b for b in [a.strip() for a in libs.split(',')] if b]
+
+ def get_libraries(self, key='libraries'):
+ return self.get_libs(key,'')
+
+ def check_libs(self,lib_dir,libs,opt_libs =[]):
+ """ If static or shared libraries are available then return
+ their info dictionary. """
+ if self.search_static_first:
+ exts = ['.a',so_ext]
+ else:
+ exts = [so_ext,'.a']
+ if sys.platform=='cygwin':
+ exts.append('.dll.a')
+ for ext in exts:
+ info = self._check_libs(lib_dir,libs,opt_libs,ext)
+ if info is not None: return info
+ return
+
+ def _lib_list(self, lib_dir, libs, ext):
+ assert type(lib_dir) is type('')
+ liblist = []
+ for l in libs:
+ p = self.combine_paths(lib_dir, 'lib'+l+ext)
+ if p:
+ assert len(p)==1
+ liblist.append(p[0])
+ return liblist
+
+ def _extract_lib_names(self,libs):
+ return [os.path.splitext(os.path.basename(p))[0][3:] \
+ for p in libs]
+
+ def _check_libs(self,lib_dir,libs, opt_libs, ext):
+ found_libs = self._lib_list(lib_dir, libs, ext)
+ if len(found_libs) == len(libs):
+ found_libs = self._extract_lib_names(found_libs)
+ info = {'libraries' : found_libs, 'library_dirs' : [lib_dir]}
+ opt_found_libs = self._lib_list(lib_dir, opt_libs, ext)
+ if len(opt_found_libs) == len(opt_libs):
+ opt_found_libs = self._extract_lib_names(opt_found_libs)
+ info['libraries'].extend(opt_found_libs)
+ return info
+
+ def combine_paths(self,*args):
+ return combine_paths(*args,**{'verbosity':self.verbosity})
+
+class fftw_info(system_info):
+ section = 'fftw'
+ dir_env_var = 'FFTW'
+ libs = ['rfftw', 'fftw']
+ includes = ['fftw.h','rfftw.h']
+ macros = [('SCIPY_FFTW_H',None)]
+ notfounderror = FFTWNotFoundError
+
+ def __init__(self):
+ system_info.__init__(self)
+
+ def calc_info(self):
+ lib_dirs = self.get_lib_dirs()
+ incl_dirs = self.get_include_dirs()
+ incl_dir = None
+ libs = self.get_libs(self.section+'_libs', self.libs)
+ info = None
+ for d in lib_dirs:
+ r = self.check_libs(d,libs)
+ if r is not None:
+ info = r
+ break
+ if info is not None:
+ flag = 0
+ for d in incl_dirs:
+ if len(self.combine_paths(d,self.includes))==2:
+ dict_append(info,include_dirs=[d])
+ flag = 1
+ incl_dirs = [d]
+ incl_dir = d
+ break
+ if flag:
+ dict_append(info,define_macros=self.macros)
+ else:
+ info = None
+ if info is not None:
+ self.set_info(**info)
+
+class dfftw_info(fftw_info):
+ section = 'fftw'
+ dir_env_var = 'FFTW'
+ libs = ['drfftw','dfftw']
+ includes = ['dfftw.h','drfftw.h']
+ macros = [('SCIPY_DFFTW_H',None)]
+
+class sfftw_info(fftw_info):
+ section = 'fftw'
+ dir_env_var = 'FFTW'
+ libs = ['srfftw','sfftw']
+ includes = ['sfftw.h','srfftw.h']
+ macros = [('SCIPY_SFFTW_H',None)]
+
+class fftw_threads_info(fftw_info):
+ section = 'fftw'
+ dir_env_var = 'FFTW'
+ libs = ['rfftw_threads','fftw_threads']
+ includes = ['fftw_threads.h','rfftw_threads.h']
+ macros = [('SCIPY_FFTW_THREADS_H',None)]
+
+class dfftw_threads_info(fftw_info):
+ section = 'fftw'
+ dir_env_var = 'FFTW'
+ libs = ['drfftw_threads','dfftw_threads']
+ includes = ['dfftw_threads.h','drfftw_threads.h']
+ macros = [('SCIPY_DFFTW_THREADS_H',None)]
+
+class sfftw_threads_info(fftw_info):
+ section = 'fftw'
+ dir_env_var = 'FFTW'
+ libs = ['srfftw_threads','sfftw_threads']
+ includes = ['sfftw_threads.h','srfftw_threads.h']
+ macros = [('SCIPY_SFFTW_THREADS_H',None)]
+
+class djbfft_info(system_info):
+ section = 'djbfft'
+ dir_env_var = 'DJBFFT'
+ notfounderror = DJBFFTNotFoundError
+
+ def get_paths(self, section, key):
+ pre_dirs = system_info.get_paths(self, section, key)
+ dirs = []
+ for d in pre_dirs:
+ dirs.extend(self.combine_paths(d,['djbfft'])+[d])
+ return [ d for d in dirs if os.path.isdir(d) ]
+
+ def calc_info(self):
+ lib_dirs = self.get_lib_dirs()
+ incl_dirs = self.get_include_dirs()
+ info = None
+ for d in lib_dirs:
+ p = self.combine_paths (d,['djbfft.a'])
+ if p:
+ info = {'extra_objects':p}
+ break
+ p = self.combine_paths (d,['libdjbfft.a'])
+ if p:
+ info = {'libraries':['djbfft'],'library_dirs':[d]}
+ break
+ if info is None:
+ return
+ for d in incl_dirs:
+ if len(self.combine_paths(d,['fftc8.h','fftfreq.h']))==2:
+ dict_append(info,include_dirs=[d],
+ define_macros=[('SCIPY_DJBFFT_H',None)])
+ self.set_info(**info)
+ return
+ return
+
+class atlas_info(system_info):
+ section = 'atlas'
+ dir_env_var = 'ATLAS'
+ _lib_names = ['f77blas','cblas']
+ if sys.platform[:7]=='freebsd':
+ _lib_atlas = ['atlas_r']
+ _lib_lapack = ['alapack_r']
+ else:
+ _lib_atlas = ['atlas']
+ _lib_lapack = ['lapack']
+
+ notfounderror = AtlasNotFoundError
+
+ def get_paths(self, section, key):
+ pre_dirs = system_info.get_paths(self, section, key)
+ dirs = []
+ for d in pre_dirs:
+ dirs.extend(self.combine_paths(d,['atlas*','ATLAS*',
+ 'sse','3dnow','sse2'])+[d])
+ return [ d for d in dirs if os.path.isdir(d) ]
+
+ def calc_info(self):
+ lib_dirs = self.get_lib_dirs()
+ info = {}
+ atlas_libs = self.get_libs('atlas_libs',
+ self._lib_names + self._lib_atlas)
+ lapack_libs = self.get_libs('lapack_libs',self._lib_lapack)
+ atlas = None
+ lapack = None
+ atlas_1 = None
+ for d in lib_dirs:
+ atlas = self.check_libs(d,atlas_libs,[])
+ lapack_atlas = self.check_libs(d,['lapack_atlas'],[])
+ if atlas is not None:
+ lib_dirs2 = self.combine_paths(d,['atlas*','ATLAS*'])+[d]
+ for d2 in lib_dirs2:
+ lapack = self.check_libs(d2,lapack_libs,[])
+ if lapack is not None:
+ break
+ else:
+ lapack = None
+ if lapack is not None:
+ break
+ if atlas:
+ atlas_1 = atlas
+ print self.__class__
+ if atlas is None:
+ atlas = atlas_1
+ if atlas is None:
+ return
+ include_dirs = self.get_include_dirs()
+ h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]
+ if h:
+ h = os.path.dirname(h)
+ dict_append(info,include_dirs=[h])
+ info['language'] = 'c'
+ if lapack is not None:
+ dict_append(info,**lapack)
+ dict_append(info,**atlas)
+ elif 'lapack_atlas' in atlas['libraries']:
+ dict_append(info,**atlas)
+ dict_append(info,define_macros=[('ATLAS_WITH_LAPACK_ATLAS',None)])
+ self.set_info(**info)
+ return
+ else:
+ dict_append(info,**atlas)
+ dict_append(info,define_macros=[('ATLAS_WITHOUT_LAPACK',None)])
+ message = """
+*********************************************************************
+ Could not find lapack library within the ATLAS installation.
+*********************************************************************
+"""
+ warnings.warn(message)
+ self.set_info(**info)
+ return
+
+ # Check if lapack library is complete, only warn if it is not.
+ lapack_dir = lapack['library_dirs'][0]
+ lapack_name = lapack['libraries'][0]
+ lapack_lib = None
+ for e in ['.a',so_ext]:
+ fn = os.path.join(lapack_dir,'lib'+lapack_name+e)
+ if os.path.exists(fn):
+ lapack_lib = fn
+ break
+ if lapack_lib is not None:
+ sz = os.stat(lapack_lib)[6]
+ if sz <= 4000*1024:
+ message = """
+*********************************************************************
+ Lapack library (from ATLAS) is probably incomplete:
+ size of %s is %sk (expected >4000k)
+
+ Follow the instructions in the KNOWN PROBLEMS section of the file
+ scipy/INSTALL.txt.
+*********************************************************************
+""" % (lapack_lib,sz/1024)
+ warnings.warn(message)
+ else:
+ info['language'] = 'f77'
+
+ self.set_info(**info)
+
+class atlas_blas_info(atlas_info):
+ _lib_names = ['f77blas','cblas']
+
+ def calc_info(self):
+ lib_dirs = self.get_lib_dirs()
+ info = {}
+ atlas_libs = self.get_libs('atlas_libs',
+ self._lib_names + self._lib_atlas)
+ atlas = None
+ for d in lib_dirs:
+ atlas = self.check_libs(d,atlas_libs,[])
+ if atlas is not None:
+ break
+ if atlas is None:
+ return
+ include_dirs = self.get_include_dirs()
+ h = (self.combine_paths(lib_dirs+include_dirs,'cblas.h') or [None])[0]
+ if h:
+ h = os.path.dirname(h)
+ dict_append(info,include_dirs=[h])
+ info['language'] = 'c'
+
+ dict_append(info,**atlas)
+
+ self.set_info(**info)
+ return
+
+
+class atlas_threads_info(atlas_info):
+ dir_env_var = ['PTATLAS','ATLAS']
+ _lib_names = ['ptf77blas','ptcblas']
+
+class atlas_blas_threads_info(atlas_blas_info):
+ dir_env_var = ['PTATLAS','ATLAS']
+ _lib_names = ['ptf77blas','ptcblas']
+
+class lapack_atlas_info(atlas_info):
+ _lib_names = ['lapack_atlas'] + atlas_info._lib_names
+
+class lapack_atlas_threads_info(atlas_threads_info):
+ _lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names
+
+class lapack_info(system_info):
+ section = 'lapack'
+ dir_env_var = 'LAPACK'
+ _lib_names = ['lapack']
+ notfounderror = LapackNotFoundError
+
+ def calc_info(self):
+ lib_dirs = self.get_lib_dirs()
+
+ lapack_libs = self.get_libs('lapack_libs', self._lib_names)
+ for d in lib_dirs:
+ lapack = self.check_libs(d,lapack_libs,[])
+ if lapack is not None:
+ info = lapack
+ break
+ else:
+ return
+ info['language'] = 'f77'
+ self.set_info(**info)
+
+class lapack_src_info(system_info):
+ section = 'lapack_src'
+ dir_env_var = 'LAPACK_SRC'
+ notfounderror = LapackSrcNotFoundError
+
+ def get_paths(self, section, key):
+ pre_dirs = system_info.get_paths(self, section, key)
+ dirs = []
+ for d in pre_dirs:
+ dirs.extend([d] + self.combine_paths(d,['LAPACK*/SRC','SRC']))
+ return [ d for d in dirs if os.path.isdir(d) ]
+
+ def calc_info(self):
+ src_dirs = self.get_src_dirs()
+ src_dir = ''
+ for d in src_dirs:
+ if os.path.isfile(os.path.join(d,'dgesv.f')):
+ src_dir = d
+ break
+ if not src_dir:
+ #XXX: Get sources from netlib. May be ask first.
+ return
+ # The following is extracted from LAPACK-3.0/SRC/Makefile
+ allaux='''
+ ilaenv ieeeck lsame lsamen xerbla
+ ''' # *.f
+ laux = '''
+ bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1
+ laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2
+ lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre
+ larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4
+ lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1
+ lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf
+ stebz stedc steqr sterf
+ ''' # [s|d]*.f
+ lasrc = '''
+ gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak
+ gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv
+ gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2
+ geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd
+ gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal
+ gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd
+ ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein
+ hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0
+ lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb
+ lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp
+ laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv
+ lartv larz larzb larzt laswp lasyf latbs latdf latps latrd
+ latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv
+ pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2
+ potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri
+ pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs
+ spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv
+ sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2
+ tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs
+ trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs
+ tzrqf tzrzf
+ ''' # [s|c|d|z]*.f
+ sd_lasrc = '''
+ laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l
+ org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr
+ orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3
+ ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx
+ sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd
+ stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd
+ sygvx sytd2 sytrd
+ ''' # [s|d]*.f
+ cz_lasrc = '''
+ bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev
+ heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv
+ hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd
+ hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf
+ hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7
+ laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe
+ laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv
+ spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq
+ ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2
+ unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr
+ ''' # [c|z]*.f
+ #######
+ sclaux = laux + ' econd ' # s*.f
+ dzlaux = laux + ' secnd ' # d*.f
+ slasrc = lasrc + sd_lasrc # s*.f
+ dlasrc = lasrc + sd_lasrc # d*.f
+ clasrc = lasrc + cz_lasrc + ' srot srscl ' # c*.f
+ zlasrc = lasrc + cz_lasrc + ' drot drscl ' # z*.f
+ oclasrc = ' icmax1 scsum1 ' # *.f
+ ozlasrc = ' izmax1 dzsum1 ' # *.f
+ sources = ['s%s.f'%f for f in (sclaux+slasrc).split()] \
+ + ['d%s.f'%f for f in (dzlaux+dlasrc).split()] \
+ + ['c%s.f'%f for f in (clasrc).split()] \
+ + ['z%s.f'%f for f in (zlasrc).split()] \
+ + ['%s.f'%f for f in (allaux+oclasrc+ozlasrc).split()]
+ sources = [os.path.join(src_dir,f) for f in sources]
+ #XXX: should we check here actual existence of source files?
+ info = {'sources':sources,'language':'f77'}
+ self.set_info(**info)
+
+atlas_version_c_text = r'''
+/* This file is generated from scipy_distutils/system_info.py */
+#ifdef __CPLUSPLUS__
+extern "C" {
+#endif
+#include "Python.h"
+static PyMethodDef module_methods[] = { {NULL,NULL} };
+DL_EXPORT(void) initatlas_version(void) {
+ void ATL_buildinfo(void);
+ ATL_buildinfo();
+ Py_InitModule("atlas_version", module_methods);
+}
+#ifdef __CPLUSCPLUS__
+}
+#endif
+'''
+
+def get_atlas_version(**config):
+ from core import Extension, setup
+ from misc_util import get_build_temp
+ import log
+ magic = hex(hash(`config`))
+ def atlas_version_c(extension, build_dir,magic=magic):
+ source = os.path.join(build_dir,'atlas_version_%s.c' % (magic))
+ if os.path.isfile(source):
+ from distutils.dep_util import newer
+ if newer(source,__file__):
+ return source
+ f = open(source,'w')
+ f.write(atlas_version_c_text)
+ f.close()
+ return source
+ ext = Extension('atlas_version',
+ sources=[atlas_version_c],
+ **config)
+ extra_args = ['--build-lib',get_build_temp()]
+ for a in sys.argv:
+ if re.match('[-][-]compiler[=]',a):
+ extra_args.append(a)
+ try:
+ dist = setup(ext_modules=[ext],
+ script_name = 'get_atlas_version',
+ script_args = ['build_src','build_ext']+extra_args)
+ except Exception,msg:
+ print "##### msg: %s" % msg
+ if not msg:
+ msg = "Unknown Exception"
+ log.warn(msg)
+ return None
+
+ from distutils.sysconfig import get_config_var
+ so_ext = get_config_var('SO')
+ build_ext = dist.get_command_obj('build_ext')
+ target = os.path.join(build_ext.build_lib,'atlas_version'+so_ext)
+ cmd = [get_pythonexe(),'-c',
+ '"import imp;imp.load_dynamic(\\"atlas_version\\",\\"%s\\")"'\
+ % (os.path.basename(target))]
+ s,o = exec_command(cmd,execute_in=os.path.dirname(target),use_tee=0)
+ atlas_version = None
+ if not s:
+ m = re.search(r'ATLAS version (?P<version>\d+[.]\d+[.]\d+)',o)
+ if m:
+ atlas_version = m.group('version')
+ if atlas_version is None:
+ if re.search(r'undefined symbol: ATL_buildinfo',o,re.M):
+ atlas_version = '3.2.1_pre3.3.6'
+ else:
+ print 'Command:',' '.join(cmd)
+ print 'Status:',s
+ print 'Output:',o
+ return atlas_version
+
+
+class lapack_opt_info(system_info):
+
+ def calc_info(self):
+
+ if sys.platform=='darwin' and not os.environ.get('ATLAS',None):
+ args = []
+ link_args = []
+ if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):
+ args.extend(['-faltivec','-framework','Accelerate'])
+ link_args.extend(['-Wl,-framework','-Wl,Accelerate'])
+ elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):
+ args.extend(['-faltivec','-framework','vecLib'])
+ link_args.extend(['-Wl,-framework','-Wl,vecLib'])
+ if args:
+ self.set_info(extra_compile_args=args,
+ extra_link_args=link_args,
+ define_macros=[('NO_ATLAS_INFO',3)])
+ return
+
+ atlas_info = get_info('atlas_threads')
+ if not atlas_info:
+ atlas_info = get_info('atlas')
+ #atlas_info = {} ## uncomment for testing
+ atlas_version = None
+ need_lapack = 0
+ need_blas = 0
+ info = {}
+ if atlas_info:
+ version_info = atlas_info.copy()
+ atlas_version = get_atlas_version(**version_info)
+ if not atlas_info.has_key('define_macros'):
+ atlas_info['define_macros'] = []
+ if atlas_version is None:
+ atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))
+ else:
+ atlas_info['define_macros'].append(('ATLAS_INFO',
+ '"\\"%s\\""' % atlas_version))
+ if atlas_version=='3.2.1_pre3.3.6':
+ atlas_info['define_macros'].append(('NO_ATLAS_INFO',4))
+ l = atlas_info.get('define_macros',[])
+ if ('ATLAS_WITH_LAPACK_ATLAS',None) in l \
+ or ('ATLAS_WITHOUT_LAPACK',None) in l:
+ need_lapack = 1
+ info = atlas_info
+ else:
+ warnings.warn(AtlasNotFoundError.__doc__)
+ need_blas = 1
+ need_lapack = 1
+ dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])
+
+ if need_lapack:
+ lapack_info = get_info('lapack')
+ #lapack_info = {} ## uncomment for testing
+ if lapack_info:
+ dict_append(info,**lapack_info)
+ else:
+ warnings.warn(LapackNotFoundError.__doc__)
+ lapack_src_info = get_info('lapack_src')
+ if not lapack_src_info:
+ warnings.warn(LapackSrcNotFoundError.__doc__)
+ return
+ dict_append(info,libraries=[('flapack_src',lapack_src_info)])
+
+ if need_blas:
+ blas_info = get_info('blas')
+ #blas_info = {} ## uncomment for testing
+ if blas_info:
+ dict_append(info,**blas_info)
+ else:
+ warnings.warn(BlasNotFoundError.__doc__)
+ blas_src_info = get_info('blas_src')
+ if not blas_src_info:
+ warnings.warn(BlasSrcNotFoundError.__doc__)
+ return
+ dict_append(info,libraries=[('fblas_src',blas_src_info)])
+
+ self.set_info(**info)
+ return
+
+
+class blas_opt_info(system_info):
+
+ def calc_info(self):
+
+ if sys.platform=='darwin' and not os.environ.get('ATLAS',None):
+ args = []
+ link_args = []
+ if os.path.exists('/System/Library/Frameworks/Accelerate.framework/'):
+ args.extend(['-faltivec','-framework','Accelerate'])
+ link_args.extend(['-Wl,-framework','-Wl,Accelerate'])
+ elif os.path.exists('/System/Library/Frameworks/vecLib.framework/'):
+ args.extend(['-faltivec','-framework','vecLib'])
+ link_args.extend(['-Wl,-framework','-Wl,vecLib'])
+ if args:
+ self.set_info(extra_compile_args=args,
+ extra_link_args=link_args,
+ define_macros=[('NO_ATLAS_INFO',3)])
+ return
+
+ atlas_info = get_info('atlas_blas_threads')
+ if not atlas_info:
+ atlas_info = get_info('atlas_blas')
+ atlas_version = None
+ need_blas = 0
+ info = {}
+ if atlas_info:
+ version_info = atlas_info.copy()
+ atlas_version = get_atlas_version(**version_info)
+ if not atlas_info.has_key('define_macros'):
+ atlas_info['define_macros'] = []
+ if atlas_version is None:
+ atlas_info['define_macros'].append(('NO_ATLAS_INFO',2))
+ else:
+ atlas_info['define_macros'].append(('ATLAS_INFO',
+ '"\\"%s\\""' % atlas_version))
+ info = atlas_info
+ else:
+ warnings.warn(AtlasNotFoundError.__doc__)
+ need_blas = 1
+ dict_append(info,define_macros=[('NO_ATLAS_INFO',1)])
+
+ if need_blas:
+ blas_info = get_info('blas')
+ if blas_info:
+ dict_append(info,**blas_info)
+ else:
+ warnings.warn(BlasNotFoundError.__doc__)
+ blas_src_info = get_info('blas_src')
+ if not blas_src_info:
+ warnings.warn(BlasSrcNotFoundError.__doc__)
+ return
+ dict_append(info,libraries=[('fblas_src',blas_src_info)])
+
+ self.set_info(**info)
+ return
+
+
+class blas_info(system_info):
+ section = 'blas'
+ dir_env_var = 'BLAS'
+ _lib_names = ['blas']
+ notfounderror = BlasNotFoundError
+
+ def calc_info(self):
+ lib_dirs = self.get_lib_dirs()
+
+ blas_libs = self.get_libs('blas_libs', self._lib_names)
+ for d in lib_dirs:
+ blas = self.check_libs(d,blas_libs,[])
+ if blas is not None:
+ info = blas
+ break
+ else:
+ return
+ info['language'] = 'f77' # XXX: is it generally true?
+ self.set_info(**info)
+
+
+class blas_src_info(system_info):
+ section = 'blas_src'
+ dir_env_var = 'BLAS_SRC'
+ notfounderror = BlasSrcNotFoundError
+
+ def get_paths(self, section, key):
+ pre_dirs = system_info.get_paths(self, section, key)
+ dirs = []
+ for d in pre_dirs:
+ dirs.extend([d] + self.combine_paths(d,['blas']))
+ return [ d for d in dirs if os.path.isdir(d) ]
+
+ def calc_info(self):
+ src_dirs = self.get_src_dirs()
+ src_dir = ''
+ for d in src_dirs:
+ if os.path.isfile(os.path.join(d,'daxpy.f')):
+ src_dir = d
+ break
+ if not src_dir:
+ #XXX: Get sources from netlib. May be ask first.
+ return
+ blas1 = '''
+ caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot
+ dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2
+ srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg
+ dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax
+ snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap
+ '''
+ blas2 = '''
+ cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv
+ chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv
+ dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv
+ sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger
+ stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc
+ zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2
+ ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv
+ '''
+ blas3 = '''
+ cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k
+ dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm
+ ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm
+ '''
+ sources = [os.path.join(src_dir,f+'.f') \
+ for f in (blas1+blas2+blas3).split()]
+ #XXX: should we check here actual existence of source files?
+ info = {'sources':sources,'language':'f77'}
+ self.set_info(**info)
+
+class x11_info(system_info):
+ section = 'x11'
+ notfounderror = X11NotFoundError
+
+ def __init__(self):
+ system_info.__init__(self,
+ default_lib_dirs=default_x11_lib_dirs,
+ default_include_dirs=default_x11_include_dirs)
+
+ def calc_info(self):
+ if sys.platform in ['win32']:
+ return
+ lib_dirs = self.get_lib_dirs()
+ include_dirs = self.get_include_dirs()
+ x11_libs = self.get_libs('x11_libs', ['X11'])
+ for lib_dir in lib_dirs:
+ info = self.check_libs(lib_dir, x11_libs, [])
+ if info is not None:
+ break
+ else:
+ return
+ inc_dir = None
+ for d in include_dirs:
+ if self.combine_paths(d, 'X11/X.h'):
+ inc_dir = d
+ break
+ if inc_dir is not None:
+ dict_append(info, include_dirs=[inc_dir])
+ self.set_info(**info)
+
+class numpy_info(system_info):
+ section = 'numpy'
+ modulename = 'Numeric'
+ notfounderror = NumericNotFoundError
+
+ def __init__(self):
+ from distutils.sysconfig import get_python_inc
+ include_dirs = []
+ try:
+ module = __import__(self.modulename)
+ prefix = []
+ for name in module.__file__.split(os.sep):
+ if name=='lib':
+ break
+ prefix.append(name)
+ include_dirs.append(get_python_inc(prefix=os.sep.join(prefix)))
+ except ImportError:
+ pass
+ py_incl_dir = get_python_inc()
+ include_dirs.append(py_incl_dir)
+ for d in default_include_dirs:
+ d = os.path.join(d, os.path.basename(py_incl_dir))
+ if d not in include_dirs:
+ include_dirs.append(d)
+ system_info.__init__(self,
+ default_lib_dirs=[],
+ default_include_dirs=include_dirs)
+
+ def calc_info(self):
+ try:
+ module = __import__(self.modulename)
+ except ImportError:
+ return
+ info = {}
+ macros = [(self.modulename.upper()+'_VERSION',
+ '"\\"%s\\""' % (module.__version__)),
+ (self.modulename.upper(),None)]
+## try:
+## macros.append(
+## (self.modulename.upper()+'_VERSION_HEX',
+## hex(vstr2hex(module.__version__))),
+## )
+## except Exception,msg:
+## print msg
+ dict_append(info, define_macros = macros)
+ include_dirs = self.get_include_dirs()
+ inc_dir = None
+ for d in include_dirs:
+ if self.combine_paths(d,
+ os.path.join(self.modulename,
+ 'arrayobject.h')):
+ inc_dir = d
+ break
+ if inc_dir is not None:
+ dict_append(info, include_dirs=[inc_dir])
+ if info:
+ self.set_info(**info)
+ return
+
+class numarray_info(numpy_info):
+ section = 'numarray'
+ modulename = 'numarray'
+
+class boost_python_info(system_info):
+ section = 'boost_python'
+ dir_env_var = 'BOOST'
+
+ def get_paths(self, section, key):
+ pre_dirs = system_info.get_paths(self, section, key)
+ dirs = []
+ for d in pre_dirs:
+ dirs.extend([d] + self.combine_paths(d,['boost*']))
+ return [ d for d in dirs if os.path.isdir(d) ]
+
+ def calc_info(self):
+ from distutils.sysconfig import get_python_inc
+ src_dirs = self.get_src_dirs()
+ src_dir = ''
+ for d in src_dirs:
+ if os.path.isfile(os.path.join(d,'libs','python','src','module.cpp')):
+ src_dir = d
+ break
+ if not src_dir:
+ return
+ py_incl_dir = get_python_inc()
+ srcs_dir = os.path.join(src_dir,'libs','python','src')
+ bpl_srcs = glob(os.path.join(srcs_dir,'*.cpp'))
+ bpl_srcs += glob(os.path.join(srcs_dir,'*','*.cpp'))
+ info = {'libraries':[('boost_python_src',{'include_dirs':[src_dir,py_incl_dir],
+ 'sources':bpl_srcs})],
+ 'include_dirs':[src_dir],
+ }
+ if info:
+ self.set_info(**info)
+ return
+
+class agg2_info(system_info):
+ section = 'agg2'
+ dir_env_var = 'AGG2'
+
+ def get_paths(self, section, key):
+ pre_dirs = system_info.get_paths(self, section, key)
+ dirs = []
+ for d in pre_dirs:
+ dirs.extend([d] + self.combine_paths(d,['agg2*']))
+ return [ d for d in dirs if os.path.isdir(d) ]
+
+ def calc_info(self):
+ src_dirs = self.get_src_dirs()
+ src_dir = ''
+ for d in src_dirs:
+ if os.path.isfile(os.path.join(d,'src','agg_affine_matrix.cpp')):
+ src_dir = d
+ break
+ if not src_dir:
+ return
+ if sys.platform=='win32':
+ agg2_srcs = glob(os.path.join(src_dir,'src','platform','win32','agg_win32_bmp.cpp'))
+ else:
+ agg2_srcs = glob(os.path.join(src_dir,'src','*.cpp'))
+ agg2_srcs += [os.path.join(src_dir,'src','platform','X11','agg_platform_support.cpp')]
+
+ info = {'libraries':[('agg2_src',{'sources':agg2_srcs,
+ 'include_dirs':[os.path.join(src_dir,'include')],
+ })],
+ 'include_dirs':[os.path.join(src_dir,'include')],
+ }
+ if info:
+ self.set_info(**info)
+ return
+
+class _pkg_config_info(system_info):
+ section = None
+ config_env_var = 'PKG_CONFIG'
+ default_config_exe = 'pkg-config'
+ append_config_exe = ''
+ version_macro_name = None
+ release_macro_name = None
+ version_flag = '--modversion'
+ cflags_flag = '--cflags'
+
+ def get_config_exe(self):
+ if os.environ.has_key(self.config_env_var):
+ return os.environ[self.config_env_var]
+ return self.default_config_exe
+ def get_config_output(self, config_exe, option):
+ s,o = exec_command(config_exe+' '+self.append_config_exe+' '+option,use_tee=0)
+ if not s:
+ return o
+
+ def calc_info(self):
+ config_exe = find_executable(self.get_config_exe())
+ if not os.path.isfile(config_exe):
+ print 'File not found: %s. Cannot determine %s info.' \
+ % (config_exe, self.section)
+ return
+ info = {}
+ macros = []
+ libraries = []
+ library_dirs = []
+ include_dirs = []
+ extra_link_args = []
+ extra_compile_args = []
+ version = self.get_config_output(config_exe,self.version_flag)
+ if version:
+ macros.append((self.__class__.__name__.split('.')[-1].upper(),
+ '"\\"%s\\""' % (version)))
+ if self.version_macro_name:
+ macros.append((self.version_macro_name+'_%s' % (version.replace('.','_')),None))
+ if self.release_macro_name:
+ release = self.get_config_output(config_exe,'--release')
+ if release:
+ macros.append((self.release_macro_name+'_%s' % (release.replace('.','_')),None))
+ opts = self.get_config_output(config_exe,'--libs')
+ if opts:
+ for opt in opts.split():
+ if opt[:2]=='-l':
+ libraries.append(opt[2:])
+ elif opt[:2]=='-L':
+ library_dirs.append(opt[2:])
+ else:
+ extra_link_args.append(opt)
+ opts = self.get_config_output(config_exe,self.cflags_flag)
+ if opts:
+ for opt in opts.split():
+ if opt[:2]=='-I':
+ include_dirs.append(opt[2:])
+ elif opt[:2]=='-D':
+ if '=' in opt:
+ n,v = opt[2:].split('=')
+ macros.append((n,v))
+ else:
+ macros.append((opt[2:],None))
+ else:
+ extra_compile_args.append(opt)
+ if macros: dict_append(info, define_macros = macros)
+ if libraries: dict_append(info, libraries = libraries)
+ if library_dirs: dict_append(info, library_dirs = library_dirs)
+ if include_dirs: dict_append(info, include_dirs = include_dirs)
+ if extra_link_args: dict_append(info, extra_link_args = extra_link_args)
+ if extra_compile_args: dict_append(info, extra_compile_args = extra_compile_args)
+ if info:
+ self.set_info(**info)
+ return
+
+class wx_info(_pkg_config_info):
+ section = 'wx'
+ config_env_var = 'WX_CONFIG'
+ default_config_exe = 'wx-config'
+ append_config_exe = ''
+ version_macro_name = 'WX_VERSION'
+ release_macro_name = 'WX_RELEASE'
+ version_flag = '--version'
+ cflags_flag = '--cxxflags'
+
+class gdk_pixbuf_xlib_2_info(_pkg_config_info):
+ section = 'gdk_pixbuf_xlib_2'
+ append_config_exe = 'gdk-pixbuf-xlib-2.0'
+ version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'
+
+class gdk_pixbuf_2_info(_pkg_config_info):
+ section = 'gdk_pixbuf_2'
+ append_config_exe = 'gdk-pixbuf-2.0'
+ version_macro_name = 'GDK_PIXBUF_VERSION'
+
+class gdk_x11_2_info(_pkg_config_info):
+ section = 'gdk_x11_2'
+ append_config_exe = 'gdk-x11-2.0'
+ version_macro_name = 'GDK_X11_VERSION'
+
+class gdk_2_info(_pkg_config_info):
+ section = 'gdk_2'
+ append_config_exe = 'gdk-2.0'
+ version_macro_name = 'GDK_VERSION'
+
+class gdk_info(_pkg_config_info):
+ section = 'gdk'
+ append_config_exe = 'gdk'
+ version_macro_name = 'GDK_VERSION'
+
+class gtkp_x11_2_info(_pkg_config_info):
+ section = 'gtkp_x11_2'
+ append_config_exe = 'gtk+-x11-2.0'
+ version_macro_name = 'GTK_X11_VERSION'
+
+
+class gtkp_2_info(_pkg_config_info):
+ section = 'gtkp_2'
+ append_config_exe = 'gtk+-2.0'
+ version_macro_name = 'GTK_VERSION'
+
+class xft_info(_pkg_config_info):
+ section = 'xft'
+ append_config_exe = 'xft'
+ version_macro_name = 'XFT_VERSION'
+
+class freetype2_info(_pkg_config_info):
+ section = 'freetype2'
+ append_config_exe = 'freetype2'
+ version_macro_name = 'FREETYPE2_VERSION'
+
+## def vstr2hex(version):
+## bits = []
+## n = [24,16,8,4,0]
+## r = 0
+## for s in version.split('.'):
+## r |= int(s) << n[0]
+## del n[0]
+## return r
+
+#--------------------------------------------------------------------
+
+def combine_paths(*args,**kws):
+ """ Return a list of existing paths composed by all combinations of
+ items from arguments.
+ """
+ r = []
+ for a in args:
+ if not a: continue
+ if type(a) is types.StringType:
+ a = [a]
+ r.append(a)
+ args = r
+ if not args: return []
+ if len(args)==1:
+ result = reduce(lambda a,b:a+b,map(glob,args[0]),[])
+ elif len (args)==2:
+ result = []
+ for a0 in args[0]:
+ for a1 in args[1]:
+ result.extend(glob(os.path.join(a0,a1)))
+ else:
+ result = combine_paths(*(combine_paths(args[0],args[1])+args[2:]))
+ verbosity = kws.get('verbosity',1)
+ if verbosity>1 and result:
+ print '(','paths:',','.join(result),')'
+ return result
+
+language_map = {'c':0,'c++':1,'f77':2,'f90':3}
+inv_language_map = {0:'c',1:'c++',2:'f77',3:'f90'}
+def dict_append(d,**kws):
+ languages = []
+ for k,v in kws.items():
+ if k=='language':
+ languages.append(v)
+ continue
+ if d.has_key(k):
+ if k in ['library_dirs','include_dirs','define_macros']:
+ [d[k].append(vv) for vv in v if vv not in d[k]]
+ else:
+ d[k].extend(v)
+ else:
+ d[k] = v
+ if languages:
+ l = inv_language_map[max([language_map.get(l,0) for l in languages])]
+ d['language'] = l
+ return
+
+def show_all():
+ import system_info
+ import pprint
+ match_info = re.compile(r'.*?_info').match
+ show_only = []
+ for n in sys.argv[1:]:
+ if n[-5:] != '_info':
+ n = n + '_info'
+ show_only.append(n)
+ show_all = not show_only
+ for n in filter(match_info,dir(system_info)):
+ if n in ['system_info','get_info']: continue
+ if not show_all:
+ if n not in show_only: continue
+ del show_only[show_only.index(n)]
+ c = getattr(system_info,n)()
+ c.verbosity = 2
+ r = c.get_info()
+ if show_only:
+ print 'Info classes not defined:',','.join(show_only)
+if __name__ == "__main__":
+ show_all()
diff --git a/distutils/tests/f2py_ext/__init__.py b/distutils/tests/f2py_ext/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/distutils/tests/f2py_ext/__init__.py
diff --git a/distutils/tests/f2py_ext/setup.py b/distutils/tests/f2py_ext/setup.py
new file mode 100644
index 000000000..6b786a97e
--- /dev/null
+++ b/distutils/tests/f2py_ext/setup.py
@@ -0,0 +1,12 @@
+
+import os
+from scipy.distutils.core import setup, Extension
+
+ext = Extension('f2py_ext.fib2',['src/fib2.pyf','src/fib1.f'])
+
+setup(
+ name = 'f2py_ext',
+ ext_modules = [ext],
+ packages = ['f2py_ext.tests','f2py_ext'],
+ package_dir = {'f2py_ext':'.'})
+
diff --git a/distutils/tests/f2py_ext/src/fib1.f b/distutils/tests/f2py_ext/src/fib1.f
new file mode 100644
index 000000000..cfbb1eea0
--- /dev/null
+++ b/distutils/tests/f2py_ext/src/fib1.f
@@ -0,0 +1,18 @@
+C FILE: FIB1.F
+ SUBROUTINE FIB(A,N)
+C
+C CALCULATE FIRST N FIBONACCI NUMBERS
+C
+ INTEGER N
+ REAL*8 A(N)
+ DO I=1,N
+ IF (I.EQ.1) THEN
+ A(I) = 0.0D0
+ ELSEIF (I.EQ.2) THEN
+ A(I) = 1.0D0
+ ELSE
+ A(I) = A(I-1) + A(I-2)
+ ENDIF
+ ENDDO
+ END
+C END FILE FIB1.F
diff --git a/distutils/tests/f2py_ext/src/fib2.pyf b/distutils/tests/f2py_ext/src/fib2.pyf
new file mode 100644
index 000000000..90a8cf00c
--- /dev/null
+++ b/distutils/tests/f2py_ext/src/fib2.pyf
@@ -0,0 +1,9 @@
+! -*- f90 -*-
+python module fib2
+ interface
+ subroutine fib(a,n)
+ real*8 dimension(n),intent(out),depend(n) :: a
+ integer intent(in) :: n
+ end subroutine fib
+ end interface
+end python module fib2
diff --git a/distutils/tests/f2py_ext/tests/test_fib2.py b/distutils/tests/f2py_ext/tests/test_fib2.py
new file mode 100644
index 000000000..633e2ba20
--- /dev/null
+++ b/distutils/tests/f2py_ext/tests/test_fib2.py
@@ -0,0 +1,13 @@
+import sys
+from scipy.base.testing import *
+set_package_path()
+from f2py_ext import fib2
+del sys.path[0]
+
+class test_fib2(ScipyTestCase):
+
+ def check_fib(self):
+ assert_array_equal(fib2.fib(6),[0,1,1,2,3,5])
+
+if __name__ == "__main__":
+ ScipyTest(fib2).run()
diff --git a/distutils/tests/f2py_f90_ext/__init__.py b/distutils/tests/f2py_f90_ext/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/distutils/tests/f2py_f90_ext/__init__.py
diff --git a/distutils/tests/f2py_f90_ext/include/body.f90 b/distutils/tests/f2py_f90_ext/include/body.f90
new file mode 100644
index 000000000..90b44e29d
--- /dev/null
+++ b/distutils/tests/f2py_f90_ext/include/body.f90
@@ -0,0 +1,5 @@
+ subroutine bar13(a)
+ !f2py intent(out) a
+ integer a
+ a = 13
+ end subroutine bar13
diff --git a/distutils/tests/f2py_f90_ext/setup.py b/distutils/tests/f2py_f90_ext/setup.py
new file mode 100644
index 000000000..f3ab45045
--- /dev/null
+++ b/distutils/tests/f2py_f90_ext/setup.py
@@ -0,0 +1,16 @@
+
+import os
+from scipy_distutils.core import setup, Extension
+
+package = 'f2py_f90_ext'
+
+ext = Extension(package+'.foo',['src/foo_free.f90'],
+ include_dirs=['include'],
+ f2py_options=['--include_paths','include'])
+
+setup(
+ name = package,
+ ext_modules = [ext],
+ packages = [package+'.tests',package],
+ package_dir = {package:'.'})
+
diff --git a/distutils/tests/f2py_f90_ext/src/foo_free.f90 b/distutils/tests/f2py_f90_ext/src/foo_free.f90
new file mode 100644
index 000000000..c7713be59
--- /dev/null
+++ b/distutils/tests/f2py_f90_ext/src/foo_free.f90
@@ -0,0 +1,6 @@
+module foo_free
+contains
+
+include "body.f90"
+
+end module foo_free
diff --git a/distutils/tests/f2py_f90_ext/tests/test_foo.py b/distutils/tests/f2py_f90_ext/tests/test_foo.py
new file mode 100644
index 000000000..544f94ab4
--- /dev/null
+++ b/distutils/tests/f2py_f90_ext/tests/test_foo.py
@@ -0,0 +1,13 @@
+import sys
+from scipy.base.testing import *
+set_package_path()
+from f2py_f90_ext import foo
+del sys.path[0]
+
+class test_foo(ScipyTestCase):
+
+ def check_foo_free(self):
+ assert_equal(foo.foo_free.bar13(),13)
+
+if __name__ == "__main__":
+ ScipyTest().run()
diff --git a/distutils/tests/gen_ext/__init__.py b/distutils/tests/gen_ext/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/distutils/tests/gen_ext/__init__.py
diff --git a/distutils/tests/gen_ext/setup.py b/distutils/tests/gen_ext/setup.py
new file mode 100644
index 000000000..7b12c1f55
--- /dev/null
+++ b/distutils/tests/gen_ext/setup.py
@@ -0,0 +1,47 @@
+
+import os
+from scipy.distutils.core import setup, Extension
+from distutils.dep_util import newer
+
+fib3_f = '''
+C FILE: FIB3.F
+ SUBROUTINE FIB(A,N)
+C
+C CALCULATE FIRST N FIBONACCI NUMBERS
+C
+ INTEGER N
+ REAL*8 A(N)
+Cf2py intent(in) n
+Cf2py intent(out) a
+Cf2py depend(n) a
+ DO I=1,N
+ IF (I.EQ.1) THEN
+ A(I) = 0.0D0
+ ELSEIF (I.EQ.2) THEN
+ A(I) = 1.0D0
+ ELSE
+ A(I) = A(I-1) + A(I-2)
+ ENDIF
+ ENDDO
+ END
+C END FILE FIB3.F
+'''
+
+package = 'gen_ext'
+
+def source_func(ext, src_dir):
+ source = os.path.join(src_dir,'fib3.f')
+ if newer(__file__, source):
+ f = open(source,'w')
+ f.write(fib3_f)
+ f.close()
+ return [source]
+
+ext = Extension(package+'.fib3',[source_func])
+
+setup(
+ name = package,
+ ext_modules = [ext],
+ packages = [package+'.tests',package],
+ package_dir = {package:'.'})
+
diff --git a/distutils/tests/gen_ext/tests/test_fib3.py b/distutils/tests/gen_ext/tests/test_fib3.py
new file mode 100644
index 000000000..c8ee2441c
--- /dev/null
+++ b/distutils/tests/gen_ext/tests/test_fib3.py
@@ -0,0 +1,13 @@
+import sys
+from scipy.base.testing import *
+set_package_path()
+from gen_ext import fib3
+del sys.path[0]
+
+class test_fib3(ScipyTestCase):
+
+ def check_fib(self):
+ assert_array_equal(fib3.fib(6),[0,1,1,2,3,5])
+
+if __name__ == "__main__":
+ ScipyTest().run()
diff --git a/distutils/tests/swig_ext/__init__.py b/distutils/tests/swig_ext/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/distutils/tests/swig_ext/__init__.py
diff --git a/distutils/tests/swig_ext/setup.py b/distutils/tests/swig_ext/setup.py
new file mode 100644
index 000000000..b6fe8eed4
--- /dev/null
+++ b/distutils/tests/swig_ext/setup.py
@@ -0,0 +1,14 @@
+
+import os
+from scipy_distutils.core import setup, Extension
+
+ext_c = Extension('swig_ext._example',['src/example.i','src/example.c'])
+ext_cpp = Extension('swig_ext._example2',['src/zoo.i','src/zoo.cc'],
+ depends=['src/zoo.h'],include_dirs=['src'])
+
+setup(
+ name = 'swig_ext',
+ ext_modules = [ext_c,ext_cpp],
+ packages = ['swig_ext.tests','swig_ext'],
+ package_dir = {'swig_ext':'.'})
+
diff --git a/distutils/tests/swig_ext/src/example.c b/distutils/tests/swig_ext/src/example.c
new file mode 100644
index 000000000..7bbb661dd
--- /dev/null
+++ b/distutils/tests/swig_ext/src/example.c
@@ -0,0 +1,14 @@
+/* File : example.c */
+
+double My_variable = 3.0;
+
+/* Compute factorial of n */
+int fact(int n) {
+ if (n <= 1) return 1;
+ else return n*fact(n-1);
+}
+
+/* Compute n mod m */
+int my_mod(int n, int m) {
+ return(n % m);
+}
diff --git a/distutils/tests/swig_ext/src/example.i b/distutils/tests/swig_ext/src/example.i
new file mode 100644
index 000000000..6d61062b3
--- /dev/null
+++ b/distutils/tests/swig_ext/src/example.i
@@ -0,0 +1,11 @@
+/* -*- c -*- */
+
+/* File : example.i */
+%module example
+%{
+/* Put headers and other declarations here */
+%}
+
+extern double My_variable;
+extern int fact(int);
+extern int my_mod(int n, int m);
diff --git a/distutils/tests/swig_ext/src/zoo.cc b/distutils/tests/swig_ext/src/zoo.cc
new file mode 100644
index 000000000..0a643d1e5
--- /dev/null
+++ b/distutils/tests/swig_ext/src/zoo.cc
@@ -0,0 +1,23 @@
+#include "zoo.h"
+#include <cstdio>
+#include <cstring>
+
+Zoo::Zoo()
+{
+ n = 0;
+}
+
+void Zoo::shut_up(char *animal)
+{
+ if (n < 10) {
+ strcpy(animals[n], animal);
+ n++;
+ }
+}
+
+void Zoo::display()
+{
+ int i;
+ for(i = 0; i < n; i++)
+ printf("%s\n", animals[i]);
+}
diff --git a/distutils/tests/swig_ext/src/zoo.h b/distutils/tests/swig_ext/src/zoo.h
new file mode 100644
index 000000000..cb26e6cef
--- /dev/null
+++ b/distutils/tests/swig_ext/src/zoo.h
@@ -0,0 +1,9 @@
+
+class Zoo{
+ int n;
+ char animals[10][50];
+public:
+ Zoo();
+ void shut_up(char *animal);
+ void display();
+};
diff --git a/distutils/tests/swig_ext/src/zoo.i b/distutils/tests/swig_ext/src/zoo.i
new file mode 100644
index 000000000..a029c03e8
--- /dev/null
+++ b/distutils/tests/swig_ext/src/zoo.i
@@ -0,0 +1,10 @@
+// -*- c++ -*-
+// Example copied from http://linuxgazette.net/issue49/pramode.html
+
+%module example2
+
+%{
+#include "zoo.h"
+%}
+
+%include "zoo.h"
diff --git a/distutils/tests/swig_ext/tests/test_example.py b/distutils/tests/swig_ext/tests/test_example.py
new file mode 100644
index 000000000..baedec642
--- /dev/null
+++ b/distutils/tests/swig_ext/tests/test_example.py
@@ -0,0 +1,18 @@
+import sys
+from scipy.base.testing import *
+set_package_path()
+from swig_ext import example
+del sys.path[0]
+
+class test_example(ScipyTestCase):
+
+ def check_fact(self):
+ assert_equal(example.fact(10),3628800)
+
+ def check_cvar(self):
+ assert_equal(example.cvar.My_variable,3.0)
+ example.cvar.My_variable = 5
+ assert_equal(example.cvar.My_variable,5.0)
+
+if __name__ == "__main__":
+ ScipyTest().run()
diff --git a/distutils/tests/swig_ext/tests/test_example2.py b/distutils/tests/swig_ext/tests/test_example2.py
new file mode 100644
index 000000000..af066be68
--- /dev/null
+++ b/distutils/tests/swig_ext/tests/test_example2.py
@@ -0,0 +1,17 @@
+import sys
+from scipy.base.testing import *
+set_package_path()
+from swig_ext import example2
+del sys.path[0]
+
+class test_example2(ScipyTestCase):
+
+ def check_zoo(self):
+ z = example2.Zoo()
+ z.shut_up('Tiger')
+ z.shut_up('Lion')
+ z.display()
+
+
+if __name__ == "__main__":
+ ScipyTest().run()
diff --git a/distutils/tests/test_build_src.py b/distutils/tests/test_build_src.py
new file mode 100644
index 000000000..a2e5a2cec
--- /dev/null
+++ b/distutils/tests/test_build_src.py
@@ -0,0 +1,31 @@
+import sys
+from scipy.base.testing import *
+from scipy.distutils.command.build_src import appendpath
+from os.path import join
+
+class test_appendpath(ScipyTestCase):
+
+ def check_1(self):
+ assert_equal(appendpath('prefix','name'),join('prefix','name'))
+ assert_equal(appendpath('/prefix','name'),join('/prefix','name'))
+ assert_equal(appendpath('/prefix','/name'),join('/prefix','name'))
+ assert_equal(appendpath('prefix','/name'),join('prefix','name'))
+
+ def check_2(self):
+ assert_equal(appendpath('prefix/sub','name'),
+ join('prefix','sub','name'))
+ assert_equal(appendpath('prefix/sub','sup/name'),
+ join('prefix','sub','sup','name'))
+ assert_equal(appendpath('/prefix/sub','/prefix/name'),
+ join('/prefix','sub','name'))
+
+ def check_3(self):
+ assert_equal(appendpath('/prefix/sub','/prefix/sup/name'),
+ join('/prefix','sub','sup','name'))
+ assert_equal(appendpath('/prefix/sub/sub2','/prefix/sup/sup2/name'),
+ join('/prefix','sub','sub2','sup','sup2','name'))
+ assert_equal(appendpath('/prefix/sub/sub2','/prefix/sub/sup/name'),
+ join('/prefix','sub','sub2','sup','name'))
+
+if __name__ == "__main__":
+ ScipyTest().run()
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
new file mode 100644
index 000000000..214763cef
--- /dev/null
+++ b/distutils/unixccompiler.py
@@ -0,0 +1,65 @@
+"""
+unixccompiler - can handle very long argument lists for ar.
+"""
+
+import os
+import sys
+import new
+
+from distutils.errors import DistutilsExecError, LinkError, CompileError
+from distutils.unixccompiler import *
+
+
+import log
+
+# Note that UnixCCompiler._compile appeared in Python 2.3
+def UnixCCompiler__compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ display = '%s: %s' % (os.path.basename(self.compiler_so[0]),src)
+ try:
+ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+ extra_postargs, display = display)
+ except DistutilsExecError, msg:
+ raise CompileError, msg
+UnixCCompiler._compile = new.instancemethod(UnixCCompiler__compile,
+ None,
+ UnixCCompiler)
+
+
+def UnixCCompile_create_static_lib(self, objects, output_libname,
+ output_dir=None, debug=0, target_lang=None):
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+
+ output_filename = \
+ self.library_filename(output_libname, output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ self.mkpath(os.path.dirname(output_filename))
+ tmp_objects = objects + self.objects
+ while tmp_objects:
+ objects = tmp_objects[:50]
+ tmp_objects = tmp_objects[50:]
+ display = '%s: adding %d object files to %s' % (os.path.basename(self.archiver[0]),
+ len(objects),output_filename)
+ self.spawn(self.archiver + [output_filename] + objects,
+ display = display)
+
+ # Not many Unices required ranlib anymore -- SunOS 4.x is, I
+ # think the only major Unix that does. Maybe we need some
+ # platform intelligence here to skip ranlib if it's not
+ # needed -- or maybe Python's configure script took care of
+ # it for us, hence the check for leading colon.
+ if self.ranlib:
+ display = '%s:@ %s' % (os.path.basename(self.ranlib[0]),
+ output_filename)
+ try:
+ self.spawn(self.ranlib + [output_filename],
+ display = display)
+ except DistutilsExecError, msg:
+ raise LibError, msg
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+ return
+
+UnixCCompiler.create_static_lib = \
+ new.instancemethod(UnixCCompile_create_static_lib,
+ None,UnixCCompiler)