summaryrefslogtreecommitdiff
path: root/build/generator/gen_base.py
diff options
context:
space:
mode:
Diffstat (limited to 'build/generator/gen_base.py')
-rw-r--r--build/generator/gen_base.py222
1 files changed, 212 insertions, 10 deletions
diff --git a/build/generator/gen_base.py b/build/generator/gen_base.py
index 19b0dc3..02c8ed2 100644
--- a/build/generator/gen_base.py
+++ b/build/generator/gen_base.py
@@ -22,6 +22,7 @@
# gen_base.py -- infrastructure for generating makefiles, dependencies, etc.
#
+import collections
import os
import sys
import glob
@@ -75,7 +76,7 @@ class GeneratorBase:
# Now read and parse build.conf
parser = configparser.ConfigParser()
- parser.read(fname)
+ parser.readfp(open(fname))
self.conf = build_path(os.path.abspath(fname))
@@ -239,6 +240,178 @@ class GeneratorBase:
except: pass
os.rename(new_hdrfile, hdrfile)
+ def write_file_if_changed(self, fname, new_contents):
+ """Rewrite the file if NEW_CONTENTS are different than its current content.
+
+ If you have your windows projects open and generate the projects
+ it's not a small thing for windows to re-read all projects so
+ only update those that have changed.
+
+ Under Python >=3, NEW_CONTENTS must be a 'str', not a 'bytes'.
+ """
+ if sys.version_info[0] >= 3:
+ new_contents = new_contents.encode()
+
+ try:
+ old_contents = open(fname, 'rb').read()
+ except IOError:
+ old_contents = None
+ if old_contents != new_contents:
+ open(fname, 'wb').write(new_contents)
+ print("Wrote: %s" % fname)
+
+
+ def write_errno_table(self):
+ # ### We generate errorcode.inc at autogen.sh time (here!).
+ # ###
+ # ### Currently it's only used by maintainer-mode builds. If this
+ # ### functionality ever moves to release builds, it will have to move
+ # ### to configure-time (but remember that Python cannot be assumed to
+ # ### be available from 'configure').
+ import errno
+
+ lines = [
+ '/* This file was generated by build/generator/gen_base.py */',
+ ''
+ ]
+
+ def write_struct(name, codes):
+ lines.extend([
+ 'static struct {',
+ ' int errcode;',
+ ' const char *errname;',
+ '} %s[] = {' % (name,),
+ ])
+
+ for num, val in sorted(codes):
+ lines.extend([
+ ' { %d, "%s" },' % (num, val),
+ ])
+
+ # Remove ',' for c89 compatibility
+ lines[-1] = lines[-1][0:-1]
+
+ lines.extend([
+ '};',
+ '',
+ ])
+
+ write_struct('svn__errno', errno.errorcode.items())
+
+ # Fetch and write apr_errno.h codes.
+ aprerr = []
+ for line in open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),
+ 'tools', 'dev', 'aprerr.txt')):
+ # aprerr.txt parsing duplicated in which-error.py
+ if line.startswith('#'):
+ continue
+ key, _, val = line.split()
+ aprerr += [(int(val), key)]
+ write_struct('svn__apr_errno', aprerr)
+ aprdict = dict(aprerr)
+ del aprerr
+
+ ## sanity check
+ intersection = set(errno.errorcode.keys()) & set(aprdict.keys())
+ intersection = filter(lambda x: errno.errorcode[x] != aprdict[x],
+ intersection)
+ if self.errno_filter(intersection):
+ print("WARNING: errno intersects APR error codes; "
+ "runtime computation of symbolic error names for the following numeric codes might be wrong: "
+ "%r" % (intersection,))
+
+ self.write_file_if_changed('subversion/libsvn_subr/errorcode.inc',
+ '\n'.join(lines))
+
+ def errno_filter(self, codes):
+ return codes
+
+ class FileSectionOptionEnum(object):
+ # These are accessed via getattr() later on
+ file = object()
+ section = object()
+ option = object()
+
+ def _client_configuration_defines(self):
+ """Return an iterator over SVN_CONFIG_* #define's in the "Client
+ configuration files strings" section of svn_config.h."""
+
+ pattern = re.compile(
+ r'^\s*#\s*define\s+'
+ r'(?P<macro>SVN_CONFIG_(?P<kind>CATEGORY|SECTION|OPTION)_[A-Z0-9a-z_]+)'
+ )
+ kind = {
+ 'CATEGORY': self.FileSectionOptionEnum.file,
+ 'SECTION': self.FileSectionOptionEnum.section,
+ 'OPTION': self.FileSectionOptionEnum.option,
+ }
+
+ fname = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),
+ 'subversion', 'include', 'svn_config.h')
+ lines = iter(open(fname))
+ for line in lines:
+ if "@name Client configuration files strings" in line:
+ break
+ else:
+ raise Exception("Unable to parse svn_config.h")
+
+ for line in lines:
+ if "@{" in line:
+ break
+ else:
+ raise Exception("Unable to parse svn_config.h")
+
+ for line in lines:
+ if "@}" in line:
+ break
+ match = pattern.match(line)
+ if match:
+ yield (
+ match.group('macro'),
+ kind[match.group('kind')],
+ )
+ else:
+ raise Exception("Unable to parse svn_config.h")
+
+ def write_config_keys(self):
+ groupby = collections.defaultdict(list)
+ empty_sections = []
+ previous = (None, None)
+ for macro, kind in self._client_configuration_defines():
+ if kind is previous[1] is self.FileSectionOptionEnum.section:
+ empty_sections.append(previous[0])
+ groupby[kind].append(macro)
+ previous = (macro, kind)
+ else:
+ # If the last (macro, kind) is a section, then it's an empty section.
+ if kind is self.FileSectionOptionEnum.section:
+ empty_sections.append(macro)
+
+ lines = []
+ lines.append('/* Automatically generated by %s:write_config_keys() */'
+ % (__file__,))
+ lines.append('')
+
+ for kind in ('file', 'section', 'option'):
+ macros = groupby[getattr(self.FileSectionOptionEnum, kind)]
+ lines.append('static const char *svn__valid_config_%ss[] = {' % (kind,))
+ for macro in macros:
+ lines.append(' %s,' % (macro,))
+ # Remove ',' for c89 compatibility
+ lines[-1] = lines[-1][0:-1]
+ lines.append('};')
+ lines.append('')
+
+ lines.append('static const char *svn__empty_config_sections[] = {');
+ for section in empty_sections:
+ lines.append(' %s,' % (section,))
+ # Remove ',' for c89 compatibility
+ lines[-1] = lines[-1][0:-1]
+ lines.append('};')
+ lines.append('')
+
+ self.write_file_if_changed('subversion/libsvn_subr/config_keys.inc',
+ '\n'.join(lines))
class DependencyGraph:
"""Record dependencies between build items.
@@ -259,6 +432,10 @@ class DependencyGraph:
else:
self.deps[type][target] = [ source ]
+ def remove(self, type, target, source):
+ if target in self.deps[type] and source in self.deps[type][target]:
+ self.deps[type][target].remove(source)
+
def bulk_add(self, type, target, sources):
if target in self.deps[type]:
self.deps[type][target].extend(sources)
@@ -429,13 +606,14 @@ class TargetLinked(Target):
# the specified install area depends upon this target
self.gen_obj.graph.add(DT_INSTALL, self.install, self)
- sources = sorted(_collect_paths(self.sources or '*.c' or '*.cpp', self.path))
+ sources = sorted(_collect_paths(self.sources, self.path))
- for srcs, reldir in sources:
- for src in srcs.split(" "):
+ for src, reldir in sources:
if glob.glob(src):
if src[-2:] == '.c':
objname = src[:-2] + self.objext
+ elif src[-3:] == '.cc':
+ objname = src[:-3] + self.objext
elif src[-4:] == '.cpp':
objname = src[:-4] + self.objext
else:
@@ -470,6 +648,8 @@ class TargetExe(TargetLinked):
self.manpages = options.get('manpages', '')
self.testing = options.get('testing')
+ self.msvc_force_static = options.get('msvc-force-static') == 'yes'
+
def add_dependencies(self):
TargetLinked.add_dependencies(self)
@@ -519,6 +699,11 @@ class TargetLib(TargetLinked):
self.msvc_fake = options.get('msvc-fake') == 'yes' # has fake target
self.msvc_export = options.get('msvc-export', '').split()
+ def disable_shared(self):
+ "tries to disable building as a shared library,"
+
+ self.msvc_static = True
+
class TargetApacheMod(TargetLib):
def __init__(self, name, options, gen_obj):
@@ -580,7 +765,6 @@ class TargetSWIG(TargetLib):
TargetLib.__init__(self, name, options, gen_obj)
self.lang = lang
self.desc = self.desc + ' for ' + lang_full_name[lang]
- self.include_runtime = options.get('include-runtime') == 'yes'
### hmm. this is Makefile-specific
self.link_cmd = '$(LINK_%s_WRAPPER)' % lang_abbrev[lang].upper()
@@ -604,13 +788,16 @@ class TargetSWIG(TargetLib):
module_name = iname[:4] != 'svn_' and iname[:-2] or iname[4:-2]
lib_extension = self.gen_obj._extension_map['lib', 'target']
- if self.lang == "ruby":
+ if self.lang == "python":
+ lib_extension = self.gen_obj._extension_map['pyd', 'target']
+ lib_filename = '_' + module_name + lib_extension
+ elif self.lang == "ruby":
+ lib_extension = self.gen_obj._extension_map['so', 'target']
lib_filename = module_name + lib_extension
elif self.lang == "perl":
lib_filename = '_' + module_name.capitalize() + lib_extension
else:
- lib_extension = self.gen_obj._extension_map['pyd', 'target']
- lib_filename = '_' + module_name + lib_extension
+ lib_filename = module_name + lib_extension
self.name = self.lang + '_' + module_name
self.path = build_path_join(self.path, self.lang)
@@ -662,6 +849,12 @@ class TargetSWIGLib(TargetLib):
return [ self.target ]
return [ ]
+ def disable_shared(self):
+ "disables building shared libraries"
+
+ return # Explicit NO-OP
+
+
class TargetProject(Target):
def __init__(self, name, options, gen_obj):
Target.__init__(self, name, options, gen_obj)
@@ -914,10 +1107,13 @@ def _collect_paths(pats, path=None):
path (string), if specified, is a path that will be prepended to each
glob pattern before it is evaluated
- If path is none the return value is a list of filenames, otherwise
+ If path is None the return value is a list of filenames, otherwise
the return value is a list of 2-tuples. The first element in each tuple
is a matching filename and the second element is the portion of the
glob pattern which matched the file before its last forward slash (/)
+
+ If no files are found matching a pattern, then include the pattern itself
+ as a filename in the results.
"""
result = [ ]
for base_pat in pats.split():
@@ -1107,7 +1303,13 @@ class IncludeDependencyInfo:
direct_possibility_fname = os.path.normpath(os.path.join(
os.path.dirname(fname), include_param))
domain_fnames = self._domain.get(os.path.basename(include_param), [])
- if direct_possibility_fname in domain_fnames:
+ if os.sep.join(['libsvn_subr', 'error.c']) in fname \
+ and 'errorcode.inc' == include_param:
+ continue # generated by GeneratorBase.write_errno_table
+ if os.sep.join(['libsvn_subr', 'cmdline.c']) in fname \
+ and 'config_keys.inc' == include_param:
+ continue # generated by GeneratorBase.write_config_keys
+ elif direct_possibility_fname in domain_fnames:
self._upd_dep_hash(hdrs, direct_possibility_fname, type_code)
elif (len(domain_fnames) == 1
and (include_param.find(os.sep) == -1