summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEli Collins <elic@assurancetechnologies.com>2011-01-06 01:11:16 +0000
committerEli Collins <elic@assurancetechnologies.com>2011-01-06 01:11:16 +0000
commit0dd599bbb323387991cd8a3565ea87f36ff0892f (patch)
tree26705624c861c0725d1e8fd8555f84705775e1c2
downloadpasslib-0dd599bbb323387991cd8a3565ea87f36ff0892f.tar.gz
cloning bps to passlib trunk
-rw-r--r--.hgsvnexternals2
-rw-r--r--README6
-rw-r--r--bps.kpf14
-rwxr-xr-xbps/__init__.py61
-rwxr-xr-xbps/basic.py339
-rw-r--r--bps/cache.py370
-rw-r--r--bps/develop.py357
-rw-r--r--bps/error/__init__.py1
-rw-r--r--bps/error/types.py504
-rw-r--r--bps/error/utils.py140
-rw-r--r--bps/fs.py2843
-rw-r--r--bps/host/__init__.py521
-rw-r--r--bps/host/base.py631
-rw-r--r--bps/host/const.py9
-rw-r--r--bps/host/mailclient.py549
-rw-r--r--bps/host/posix.py531
-rw-r--r--bps/host/utils.py526
-rw-r--r--bps/host/windows.py928
-rw-r--r--bps/logs/__init__.py53
-rw-r--r--bps/logs/capture.py444
-rw-r--r--bps/logs/config.py1495
-rw-r--r--bps/logs/formatters.py432
-rw-r--r--bps/logs/handlers.py257
-rw-r--r--bps/logs/loggers.py256
-rw-r--r--bps/logs/proxy_logger.py84
-rw-r--r--bps/meta.py1190
-rw-r--r--bps/misc.py487
-rw-r--r--bps/numeric.py1192
-rw-r--r--bps/parsing/__init__.py1
-rw-r--r--bps/parsing/config.py178
-rw-r--r--bps/refs.py646
-rw-r--r--bps/rng.py218
-rw-r--r--bps/security/__init__.py1
-rw-r--r--bps/security/_bcrypt.py727
-rw-r--r--bps/security/_gpw_data.py1580
-rw-r--r--bps/security/_unix_crypt.py664
-rw-r--r--bps/security/policy.py1405
-rw-r--r--bps/security/pwgen.py453
-rw-r--r--bps/security/pwhash.py1693
-rw-r--r--bps/stream.py578
-rw-r--r--bps/tests/__init__.py5
-rw-r--r--bps/tests/_logs_parse_config_sample1.ini10
-rw-r--r--bps/tests/test_basic.py319
-rwxr-xr-xbps/tests/test_fs.py1321
-rw-r--r--bps/tests/test_host.py142
-rw-r--r--bps/tests/test_logs.py309
-rw-r--r--bps/tests/test_logs_apply_config.py190
-rw-r--r--bps/tests/test_logs_capture.py306
-rw-r--r--bps/tests/test_logs_parse_config.py537
-rw-r--r--bps/tests/test_meta.py678
-rw-r--r--bps/tests/test_misc.py179
-rw-r--r--bps/tests/test_numeric.py771
-rwxr-xr-xbps/tests/test_security_bcrypt.py210
-rw-r--r--bps/tests/test_security_policy.py1710
-rw-r--r--bps/tests/test_security_pwgen.py109
-rw-r--r--bps/tests/test_security_pwhash.py1220
-rw-r--r--bps/tests/test_stream.py33
-rw-r--r--bps/tests/test_text.py927
-rw-r--r--bps/tests/test_text_format.py1118
-rw-r--r--bps/tests/test_types.py278
-rw-r--r--bps/tests/test_unstable_ansi.py344
-rw-r--r--bps/tests/test_warndep.py480
-rw-r--r--bps/tests/utils.py384
-rw-r--r--bps/text/__init__.py1294
-rw-r--r--bps/text/_string_format.py742
-rw-r--r--bps/text/patch_format.py97
-rw-r--r--bps/types.py732
-rw-r--r--bps/undef.py59
-rw-r--r--bps/unstable/__init__.py1084
-rw-r--r--bps/unstable/ansi.py683
-rw-r--r--bps/unstable/bpsdoc/__init__.py8
-rw-r--r--bps/unstable/bpsdoc/ast/layout.html29
-rw-r--r--bps/unstable/bpsdoc/ast/static/ast-website.css733
-rw-r--r--bps/unstable/bpsdoc/ast/static/ast.css153
-rw-r--r--bps/unstable/bpsdoc/ast/static/bg_top.jpgbin0 -> 357 bytes
-rw-r--r--bps/unstable/bpsdoc/ast/theme.conf3
-rw-r--r--bps/unstable/bpsdoc/cloud/layout.html51
-rw-r--r--bps/unstable/bpsdoc/cloud/static/ast-website.css733
-rw-r--r--bps/unstable/bpsdoc/cloud/static/ast.css_t248
-rw-r--r--bps/unstable/bpsdoc/cloud/static/bg_top.jpgbin0 -> 357 bytes
-rw-r--r--bps/unstable/bpsdoc/cloud/static/header.pngbin0 -> 330 bytes
-rw-r--r--bps/unstable/bpsdoc/cloud/static/header.svg101
-rw-r--r--bps/unstable/bpsdoc/cloud/theme.conf35
-rw-r--r--bps/unstable/bpsdoc/index_styles.py69
-rw-r--r--bps/unstable/bpsdoc/make_helper.py149
-rw-r--r--bps/unstable/bpsdoc/nested_sections.py95
-rw-r--r--bps/unstable/bpsdoc/relbar_toc.py32
-rw-r--r--bps/unstable/softref.py847
-rw-r--r--bps/unstable/winconsole.py406
-rw-r--r--bps/warndep.py592
-rw-r--r--docs/_static/logo.icobin0 -> 1150 bytes
-rw-r--r--docs/_static/logo.pngbin0 -> 4646 bytes
-rw-r--r--docs/_static/logo.svg382
-rw-r--r--docs/_static/masthead.pngbin0 -> 7061 bytes
-rw-r--r--docs/_static/masthead.svg423
-rw-r--r--docs/conf.py216
-rw-r--r--docs/contents.rst35
-rw-r--r--docs/copyright.rst142
-rw-r--r--docs/history.rst70
-rw-r--r--docs/index.rst52
-rw-r--r--docs/install.rst53
-rw-r--r--docs/lib/bps.basic.rst35
-rw-r--r--docs/lib/bps.cache.rst26
-rw-r--r--docs/lib/bps.error.types.rst73
-rw-r--r--docs/lib/bps.error.utils.rst17
-rw-r--r--docs/lib/bps.fs.rst20
-rw-r--r--docs/lib/bps.fs/filepath.rst91
-rw-r--r--docs/lib/bps.fs/utils.rst47
-rw-r--r--docs/lib/bps.host.posix.rst23
-rw-r--r--docs/lib/bps.host.rst128
-rw-r--r--docs/lib/bps.host.utils.rst20
-rw-r--r--docs/lib/bps.host.windows.rst23
-rw-r--r--docs/lib/bps.logs-config.rst73
-rwxr-xr-xdocs/lib/bps.logs-config_format.rst283
-rw-r--r--docs/lib/bps.logs.rst34
-rwxr-xr-xdocs/lib/bps.meta.rst42
-rw-r--r--docs/lib/bps.misc.rst23
-rw-r--r--docs/lib/bps.numeric.rst55
-rw-r--r--docs/lib/bps.refs.rst22
-rw-r--r--docs/lib/bps.rng.rst71
-rw-r--r--docs/lib/bps.rst79
-rw-r--r--docs/lib/bps.security.policy.rst106
-rw-r--r--docs/lib/bps.security.pwgen.rst15
-rw-r--r--docs/lib/bps.security.pwhash.rst44
-rw-r--r--docs/lib/bps.security.pwhash/algorithms.rst49
-rw-r--r--docs/lib/bps.security.pwhash/contexts.rst35
-rw-r--r--docs/lib/bps.security.pwhash/implementation.rst17
-rw-r--r--docs/lib/bps.security.pwhash/quickstart.rst46
-rw-r--r--docs/lib/bps.security.pwhash/utils.rst19
-rw-r--r--docs/lib/bps.security.rst17
-rw-r--r--docs/lib/bps.stream.rst24
-rwxr-xr-xdocs/lib/bps.text.rst105
-rw-r--r--docs/lib/bps.types.rst40
-rw-r--r--docs/lib/bps.undef.rst37
-rw-r--r--docs/lib/bps.warndep.rst29
-rw-r--r--docs/make.py3
-rw-r--r--docs/overview.rst154
-rw-r--r--docs/roadmap.rst54
-rw-r--r--setup.cfg10
-rw-r--r--setup.py43
140 files changed, 46326 insertions, 0 deletions
diff --git a/.hgsvnexternals b/.hgsvnexternals
new file mode 100644
index 0000000..c8d8ea9
--- /dev/null
+++ b/.hgsvnexternals
@@ -0,0 +1,2 @@
+[docs]
+
diff --git a/README b/README
new file mode 100644
index 0000000..e5e109f
--- /dev/null
+++ b/README
@@ -0,0 +1,6 @@
+======================
+The BPS Python Library
+======================
+
+* For installation instructions, see "docs/install.rst"
+* For license & copyright information, see "docs/copyright.rst"
diff --git a/bps.kpf b/bps.kpf
new file mode 100644
index 0000000..f18d9f6
--- /dev/null
+++ b/bps.kpf
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Komodo Project File - DO NOT EDIT -->
+<project id="f57a343a-bef4-4b7e-9d57-2ce8862e06e0" kpf_version="4" name="bps.kpf">
+<preference-set idref="f57a343a-bef4-4b7e-9d57-2ce8862e06e0">
+ <string id="import_exclude_matches">*.*~;*.bak;*.tmp;CVS;.#*;*.pyo;*.pyc;.svn;*%*;tmp*.html;.DS_Store;*eric4project;*.e4p;*.e3p;*.egg-info;build;dist;_build</string>
+ <string id="import_include_matches"></string>
+ <boolean id="import_live">1</boolean>
+ <boolean id="import_recursive">1</boolean>
+ <string id="import_type">useFolders</string>
+ <string id="mappedPaths"></string>
+ <string relative="path" id="pythonExtraPaths"></string>
+ <string id="rubyExtraPaths"></string>
+</preference-set>
+</project>
diff --git a/bps/__init__.py b/bps/__init__.py
new file mode 100755
index 0000000..43b03c9
--- /dev/null
+++ b/bps/__init__.py
@@ -0,0 +1,61 @@
+"""BPS (version 4) -- Big Package of Stuff -- (c) Assurance Technologies LLC 2003-2009
+
+Requirements:
+ BPS requires at least Python 2.5,
+ but tries to be compatible with Python 2.6.
+ Python 3.0 compatibility has not yet been explored.
+
+ Some parts of BPS (namely :mod:`bps.host.windows`) relies
+ on the ``win32all`` library under windows.
+
+ Outside of that, there should be no external dependancies.
+
+To import:
+ pxhelpers.threads
+ apf.exception_hook
+"""
+
+#----- BEGIN VERSION STAMP -----
+__version__ = "4.8.1"
+#----- END VERSION STAMP -----
+
+#=========================================================
+#imports
+#=========================================================
+#import first for monkeypatching purposes
+import bps.logs
+
+#core
+from functools import partial
+from warnings import warn
+
+#pkg
+from bps.fs import filepath
+from bps.logs import log
+from bps.meta import abstractmethod
+from bps.types import BaseClass, Undef
+
+#local
+__all__ = [
+ #classes & class constructors
+ "BaseClass", "filepath",
+
+ #functions
+ "partial",
+
+ #decorators
+ "abstractmethod",
+
+ #constants
+ "Undef",
+
+ #exceptions
+
+ #logging
+ "log",
+ "warn",
+ ]
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/basic.py b/bps/basic.py
new file mode 100755
index 0000000..d74edef
--- /dev/null
+++ b/bps/basic.py
@@ -0,0 +1,339 @@
+"""bps.basic -- tools for manipulating basic python datatypes"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from itertools import islice
+from sys import version_info as pyver
+#pkg
+from bps.error.types import ParamError
+#local
+__all__ = [
+ #dict utilities
+ 'zip_dict',
+ 'unzip_dict',
+ 'pop_from_dict',
+## 'extract_from_dict',
+ 'set_dict_defaults',
+
+ #sequence utilities
+ 'intersects', 'sameset',
+ 'unique',
+ 'iter_unique',
+ 'is_unique',
+ 'enum_slice',
+
+ #functional
+ ##'partial' - used to be provided until 2.5 added their implementation
+## 'revpartial',
+]
+
+#=========================================================
+#dictionary helpers
+#=========================================================
+
+def invert_dict(source, dups="error"):
+ """invert dictionary.
+
+ Given a dict mapping key -> value,
+ this returns a new dictionary mapping value -> key.
+
+ :arg source: the source dictionary to invert
+ :param dups:
+ Sets the policy when two keys map to the same value.
+ * By default this is ``"error"``, which raises a ValueError
+ * Set to "ignore", one key will be chosen (the last one returned by iteritems).
+
+ :raises ValueError: if the source dictionary maps two keys to the same value
+
+ Usage Example::
+
+ >>> from bps.basic import invert_dict
+ >>> invert_dict({1:2, 3:4, 5:6})
+ { 2:1, 4:3, 6:5 }
+ """
+ if dups == "error":
+ out = {}
+ for k, v in source.iteritems():
+ if v in out:
+ raise ValueError, "dictionary not invertible: value=%r key1=%r key2=%r" % (v, out[v], k)
+ out[v] = k
+ return out
+ else:
+ assert dups == "ignore"
+ return dict( (v, k) for k, v in source.iteritems())
+
+def update_dict_defaults(target, *args, **kwds):
+ """cross between dict.update and dict.setdefault, which updates only the keys which aren't already present.
+
+ Usage Examples::
+
+ >>> from bps.basic import update_dict_defaults
+ >>> a = dict(x=1,y=2)
+ >>> update_dict_defaults(a, x=100, z=3)
+ >>> a
+ { 'x': 1, 'y': 2, 'z': 3 }
+ >>> update_dict_defaults(a, { 'z': 100, 's': 20 })
+ >>> a
+ { 'x': 1, 'y': 2, 'z': 3, 's': 20 }
+ """
+ if args:
+ if len(args) > 1:
+ raise TypeError, "at most one positional argument is allowed"
+ source = args[0]
+ for k,v in source.iteritems():
+ if k not in target:
+ target[k] = v
+ if kwds:
+ for k,v in kwds.iteritems():
+ if k not in target:
+ target[k] = v
+
+set_dict_defaults = update_dict_defaults #XXX: deprecate this name?
+
+#---------------------------------------
+# (keys, values) <-> dict
+#---------------------------------------
+def zip_dict(keys, values):
+ "converts list of keys, list of values to dict"
+ return dict(zip(keys, values))
+
+def unzip_dict(data):
+ "converts dict to list of keys and list of values"
+ if data is None: #special case
+ return [],[]
+ else:
+ keys = []
+ values = []
+ for k,v in data.iteritems():
+ keys.append(k)
+ values.append(v)
+ return keys,values
+
+#---------------------------------------
+#extract one dict from another
+#---------------------------------------
+def pop_from_dict(source, keys, target=None):
+ """for all keys in <keys>, extract any from <source> dict,
+ and return them in new dictionary (or place in <target> dict)
+ """
+ if target is None:
+ target = {}
+ for k in keys:
+ if k in source:
+ target[k] = source.pop(k)
+ return target
+
+##def filter_dict(func, source, target=None):
+## """filter dictionary. ``func(k,v) -> bool``"""
+## if target is None:
+## target = {}
+## for k, v in source.iteritems():
+## if func(k, v):
+## target[k] = v
+## return target
+
+def prefix_from_dict(source, prefix, target=None):
+ """For all keys in *source* dict with the specified *prefix*,
+ strip the prefix, and copy the k/v pair to the *target* dict.
+
+ If target is specified, it will be used as the dictionary
+ that any matching k/v pairs are inserted into.
+ Otherwise, a new dictionary will be created as the target.
+
+ :Returns:
+ This always returns the target dict,
+ whether passed-in or created.
+
+ Usage Example::
+
+ >>> from bps.basic import strip_from_dict
+ >>> prefix_from_dict({"abc":1, "def": 2, "abxyz": 3}, "ab")
+ { "c": 1, "xyz": 3 }
+
+ """
+ if target is None:
+ target = {}
+ for key in source:
+ if key.startswith(prefix):
+ target[key[len(prefix):]] = source[key]
+ return target
+
+#works, but near useless probably
+##def extract_from_dict(source, keys, target=None):
+## """extract specified keys from dictionary.
+##
+## returns a new dictionary, unless target is specified.
+## if target is a dict, keys are placed in target.
+## if target is ``list`` or ``tuple``, the corresponding class
+## will be returned.
+## """
+## if target is list:
+## return [ source[k] for k in keys ]
+## elif target is tuple:
+## return tuple(source[k] for k in keys)
+## elif target is None:
+## return dict( (k,source[k]) for k in keys)
+## else:
+## for k in keys:
+## target[k] = source[k]
+## return target
+
+#=========================================================
+#set helpers
+#=========================================================
+
+#xxx: would enable this, but could use more intelligent return values
+##def intersection(list1, list2):
+## "returns list containing all elements shared by two sequences / iterables"
+## return list(set(list1).intersection(list2))
+##
+
+#TODO: write unittests
+if pyver < (2,6):
+ def intersects(list1, list2):
+ "returns True if two sequences / iterables have any elements in common"
+ #TODO: would like a more efficient way of doing this for large sets
+ return bool(set(list1).intersection(list2))
+else:
+ def intersects(list1, list2):
+ "returns True if two sequences / iterables have any elements in common"
+ return not set(list1).isdisjoint(list2)
+
+def sameset(list1, list2):
+ "returns True if the two sequences contain exactly the same elements, else False"
+ if not isinstance(list1, set):
+ list1 = set(list1)
+ if not isinstance(list2, set):
+ list2 = set(list2)
+ return list1 == list2
+
+#=========================================================
+#iteration & functional helpers
+#=========================================================
+
+#this works, but not used
+##def revpartial(func, *args, **kwds):
+## "like partial(), but args & kwds are appended to end"
+## #TODO: given this 'func', 'args' and 'kwds' attrs like functools.partial
+## return lambda *p, **n:\
+## func(*p + args, **dict(kw.items() + n.items()))
+
+def iter_unique(seq):
+ """iterate through sequence, yielding only unique values.
+ values will be returned in order of first occurrence.
+
+ Example Usage::
+ >>> from bps.basic import iter_unique
+ >>> for x in iter_unique([1,3,2,1,2,3]):
+ >>> print x
+ 1
+ 3
+ 2
+ """
+ seen = set()
+ cont = seen.__contains__
+ add = seen.add
+ for val in seq:
+ if not cont(val):
+ add(val)
+ yield val
+
+def unique(seq):
+ """return list containing only unique elements in sequence,
+ in order of first occurrence.
+
+ Example Usage::
+ >>> from bps.basic import unique
+ >>> unique([1,3,2,1,2,3])
+ [1,3,2]
+ """
+ return list(iter_unique(seq))
+
+def is_unique(seq):
+ "check if sequence/iterator contains only unique values; returns False after first duplicate is found"
+ if isinstance(seq, (set,frozenset)):
+ return True
+ #XXX: is there a faster way?
+ seen = set()
+ cont = seen.__contains__
+ add = seen.add
+ for elem in seq:
+ if cont(elem):
+ return False
+ add(elem)
+ return True
+
+def enum_slice(seq, *args):
+ """enumslice(iterable, [start,] stop [, step])
+
+ Combination of enumerate & islice which reports original index values.
+ Equivalent to ``islice(enumerate(seq), start, stop, step)``,
+ but without creation of intermediate sequence.
+
+ Usage::
+
+ >>> from bps.basic import enum_slice
+ >>> for idx, value in enum_slice("abcdef", 2, 5):
+ >>> print idx, value
+ 2 c
+ 3 d
+ 4 e
+ """
+ #NOTE: we calc start/stop/step ourselves,
+ #so we can handle negative indices (since islice doesn't).
+ #if islice did, this would be a much simpler function.
+
+ #handle simple case
+ ac = len(args)
+ if ac == 0:
+ for idx, value in enumerate(seq):
+ yield idx, value
+ return
+
+ #figure out params
+ elif ac == 1:
+ start = 0
+ stop, = args
+ step = 1
+ elif ac == 2:
+ start, stop = args
+ step = 1
+ elif ac == 3:
+ start, stop, step = args
+ else:
+ raise ParamError, "too many arguments"
+
+ #normalize inputs
+ if start is None:
+ start = 0
+ elif start < 0:
+ #FIXME: error if passed an iterator (works for lists/strings)
+ start += len(seq)
+ if stop is None:
+ pass
+ elif stop < 0:
+ #FIXME: error if passed an iterator (works for lists/strings)
+ stop += len(seq)
+ if step is None:
+ step = 1
+
+ #run
+ if step < 0:
+ #islice doesn't support negative ints.
+ #FIXME: error if passed an iterator (works for lists/strings)
+ offset = start
+ if stop is None:
+ stop = -1
+ while offset > stop:
+ yield offset, seq[offset]
+ offset += step
+ else:
+ offset = start
+ for value in islice(seq, start, stop, step):
+ yield offset, value
+ offset += step
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/cache.py b/bps/cache.py
new file mode 100644
index 0000000..875276b
--- /dev/null
+++ b/bps/cache.py
@@ -0,0 +1,370 @@
+"""bps.cache -- caching tools"""
+#===================================================
+#imports
+#===================================================
+#core
+import inspect
+from functools import update_wrapper
+import time
+from warnings import warn
+#needed imports
+#legacy imports
+from bps.undef import Undef
+from bps.meta import find_attribute, decorate_per_instance, instancemethod
+#XXX: bps3.misc.finalmethod?
+#TODO: bps3.misc.AbstractMethodError - here or in bps.exc?
+
+__all__ = [
+ #cached decorators
+ 'cached_function',
+ 'cached_method',
+
+ #stateful decorators
+ 'stateful_function',
+ 'stateful_method',
+ 'is_stateful',
+]
+
+#=========================================================
+#function caching decorator
+#=========================================================
+
+def cached_function(key=None, args=None, lifetime=None, tick=time.time):
+ """decorator that caches a function's output.
+
+ This decorator creates an dictionary which caches the return values
+ of the wrapped function, so that successive calls hit the cache
+ rather than calling the function itself. This decorator
+ supports numerous features, including time-limited caching,
+ and customization how the cache key is calculated.
+
+ :param key:
+ This should be a function which takes the wrapper func's inputs,
+ and maps them to a hashable value to identify inputs for caching purposes.
+ If ``key(*args,**kwds)`` returns the ``NotImplemented`` singleton, caching will be bypassed.
+
+ :param args:
+ Alternately, instead of specifying a `key`, this option can be used
+ to specify the number of positional arguments expected, which will be formed into a tuple,
+ and used as the cache key. This option is mutually exlusive with *key*.
+
+ :param lifetime:
+ Amount of time (as measured by `tick` function)
+ before cached values should expire.
+ If lifetime is ``None`` (the default), cached values will never expire,
+ unless ``func.clear()`` is explicitly called by your application.
+
+ :param tick:
+ Function returning arbitrary objects for timestamping,
+ used by `lifetime`. By default, this uses ``time.time()``
+
+ The resulting decorated function object will have a some extra attributes:
+ ``func.key(*args,**kwds)``
+ Calling this with a set of the function's arguments
+ will return the key used to cache the result for those parameters.
+
+ ``func.clear(keys=None)``
+ Calling this will clear the internal cache of function results.
+ If *keys* is specified, only those cache keys will be cleared.
+
+ ``func.set(key,value)``
+ Allows writing to the function cache directly.
+
+ ``func.cache``
+ This is an exposed reference to the actual cache dictionary.
+ Please use this only if you *really* have to.
+
+ .. caution::
+ If your code does access the dictionary, be aware that the
+ ``lifetime`` option will change the organization of the dict from
+ ``key -> result`` to ``key -> (mtime,result)``.
+
+ A simple usage example::
+
+ >>> import time
+ >>> from bps.cache import cached_function
+ >>> #an example which has an expiring cache
+ >>> @cached_function(args=1, lifetime=2)
+ >>> def myfunc(value):
+ >>> print "myfunc called:", value
+ >>> return value*2
+ >>> #the first call won't be cached
+ >>> print "result:", myfunc(2)
+ myfunc called: 2
+ result: 4
+ >>> #but the next one will
+ >>> print "result:", myfunc(2)
+ result: 4
+ >>> #if we wait a bit and try again, the cache will expire
+ >>> time.sleep(2)
+ >>> print "result:", myfunc(2)
+ myfunc called: 2
+ result: 4
+ >>> #or we can manually flush the entire cache
+ >>> myfunc.clear()
+ >>> print "result:", myfunc(2)
+ myfunc called: 2
+ result: 4
+
+ .. seealso::
+ :func:`cached_method`
+ :func:`stateful_function`
+ """
+ if key is None:
+ if args is None:
+ warn("one of `key` or `args` will be required for cached_function() in the future, the bare version is deprecated", DeprecationWarning, stacklevel=3)
+ def key():
+ return None
+ elif args == 0:
+ def key():
+ return None
+ elif args == 1:
+ def key(value):
+ return value
+ else:
+ def key(*a):
+ if len(a) != args:
+ raise ValueError, "expected exactly %s arguments: %r" % (args, a)
+ return a
+ else:
+ assert args is None, "args and key() function are mutually exlusive"
+ assert callable(key), "key() function must be callable"
+ def builder(func):
+ #validate the function
+ if hasattr(func, "on_changed"):
+ warn("cached_function() is wrapping a function that was wrapped with stateful_function()... the opposite wrapping order is recommended for correct behavior", stacklevel=1)
+ #NOTE: why the warning?
+ # because stateful's changed() can call clear_cache(),
+ # but cache_func will hide any state changes which occur.
+ # so you want to decorator your function the other way around.
+
+ #init locals
+ cache = {}
+
+ #create wrapper...
+ if lifetime is None:
+ #...with no expiration
+ def wrapper(*args, **kwds):
+ value = key(*args, **kwds)
+ if value is NotImplemented:
+ return func(*args, **kwds)
+ elif value in cache:
+ return cache[value]
+ result = cache[value] = func(*args, **kwds)
+ return result
+ wrapper.set = cache.__setitem__ #for easy overriding of cache
+
+ else:
+ #...with predefined expiration
+ def wrapper(*args, **kwds):
+ value = key(*args, **kwds)
+ if value is NotImplemented:
+ return func(*args, **kwds)
+ now = tick()
+ if value in cache:
+ expires, result = cache[value]
+ if expires > now:
+ return result
+ result = func(*args, **kwds)
+ cache[value] = (now+lifetime, result)
+ return result
+
+ def set(key, value, expires=None):
+ if expires is None:
+ expires = tick() + lifetime
+ cache[key] = (expires, value)
+ wrapper.set = set #for easy overriding of cache
+ wrapper.tick = tick #in case it's useful
+
+ #fill in common attributes
+ def clear(keys=None):
+ if keys:
+ for key in keys:
+ if key in cache:
+ del cache[key]
+ else:
+ cache.clear()
+ wrapper.expire = clear #legacy ref, do not use
+ wrapper.clear = clear
+ wrapper.key = key #expose the key func
+ wrapper.cache = cache #for times when you really need direct cache access
+
+ #return wrapper
+ update_wrapper(wrapper, func)
+ return wrapper
+
+ return builder
+
+def cached_method(key=None, args=None, lifetime=None, tick=time.time):
+ """decorator that created instance-level cached functions.
+
+ This a wrapper for :func:`cached_function`, which is designed
+ to wrap methods, not functions, by providing a per-instance
+ caching dictionary.
+
+ The options for this are the same as :func:`cached_function`.
+
+ .. note::
+
+ By default, the *self* argument will not be present in the arguments
+ passed to the key function. That can be fixed if needed,
+ but it simplifies the current internal implementation.
+ """
+ #TODO: we use "method" binding so the .clear() etc attributes are instance specific.
+ # so "function" binding can't be used.
+ # but we could expose a bind="function" to fake things,
+ # and artificially insert *self* into the key func's arguments.
+ builder = cached_function(key=key, args=args, lifetime=lifetime, tick=tick)
+ return decorate_per_instance(builder, bind="method")
+
+#=========================================================
+#stateful decorators
+#=========================================================
+
+def stateful_function(func=None):
+ """decorator which adds methods to function allows callbacks
+ to be attached to detect when it changes it's output.
+
+ This decorator is primarily useful for functions
+ which consistently return the same value when called
+ called multiple times, but occasionally change
+ what the value is due to some internal event.
+ Examples of this include functions returning
+ filesystem listings, or in gui programming.
+
+ This decorator adds a simple callback / signalling system
+ by instrumenting the function object with the following methods:
+
+ ``func.changed()``
+ Your program should call this method after a resource has changed
+ which would alter what the function would returned.
+ It will cause all registered callbacks to be fired.
+
+ ``func.on_change(callback, data=Undef, tag=None) -> tag``
+ This attached a callback to the function,
+ Callback are called in FIFO order when ``func.changed()`` is invoked.
+ *on_change* will return a unique tag object to identify the registration
+ of your callback, for use with ``func.forget_callbacks``.
+
+ *callback*
+ This should be a function with the prototype ``callback()``,
+ or ``callback(data)`` if the *data* parameter is set.
+ *data*
+ This is an optional value passed as a positional parameter
+ to your callback.
+ *tag*
+ Optional value specifying custom tag object to use.
+ If not specified, an anonymous object will be created.
+ This option allows you to gang a bunch of callbacks
+ together on one tag, for mass-removal.
+ No restrictions are placed on the nature of the object you provide.
+
+ ``func.forget_callbacks(*tags)``
+ Remove all callbacks attached to this function using any
+ of the specified tags. Any tags that aren't found will be silently ignored.
+
+ Usage example::
+
+ >>> from bps.cache import stateful_function
+ >>> #stateful functions typically have no arguments, but they can if they want
+ >>> @stateful_function
+ >>> def getpi():
+ >>> print "getpi called"
+ >>> return 3.14159
+ >>> getpi()
+ getpi called
+ 3.14159
+ >>> #say we want to attach a callback
+ >>> def hook():
+ >>> print "divide by cucumber error, please reboot universe"
+ >>> print "pi is now:", getpi()
+ >>> getpi.on_change(hook)
+ >>> #and say pi changes value for some reason, our hook will be called..
+ >>> getpi.changed()
+ divide by cucumber error, please reboot universe
+ pi is now: 3.14159
+
+ """
+ #
+ #just return prepared builder if function isn't present
+ #
+ if func is None:
+ return stateful_function
+
+ if isinstance(func, instancemethod):
+ #instance methods have read-only attributes,
+ #so we have to create a wrapper whose attributes we _can_ set
+ orig = func
+ def func(*args, **kwds):
+ return orig(*args, **kwds)
+ update_wrapper(func, orig)
+
+ #
+ #state
+ #
+ hooks = [] #list of (tag, hook, data) callbacks
+
+ #
+ #methods
+ #
+ def on_change(hook, data=Undef, tag=None):
+ "mattaching a callback to be invoked when function state changes"
+ if tag is None: tag = object()
+ entry = (tag, hook, data)
+ hooks.append(entry)
+ return tag
+ func.on_change = on_change
+
+ #clear_cache provides integration with cachemethod() above,
+ #but cachemethod MUST BE CALLED FIRST
+ clear_cache = getattr(func, "clear", None)
+
+ ##func.changing = False
+ def changed():
+ "signal that function's state has changed, triggering callbacks"
+ if clear_cache: clear_cache()
+ ##func.changing = True
+ ##try:
+ for tag, hook, data in hooks:
+ if data is Undef: hook()
+ else: hook(data)
+ ##finally:
+ ## func.changing = False
+ func.changed = changed
+
+ def forget_callbacks(*tags):
+ "remove specified callbacks hooks"
+ pos = 0
+ while pos < len(hooks):
+ if hooks[pos][0] in tags:
+ del hooks[pos]
+ else:
+ pos += 1
+ func.forget_callbacks = forget_callbacks
+
+ #return func
+ return func
+
+def stateful_method(func=None):
+ """Per-instance version of :func:`stateful_function`.
+
+ Unlike stateful_function, all callbacks / triggers will be unique per-instance,
+ rather than shared globally. See stateful_function for more details.
+ """
+ decorator = decorate_per_instance(stateful_function, bind="method")
+ if func:
+ return decorator(func)
+ else:
+ return decorator
+
+def is_stateful(func):
+ """returns ``True`` if function has been instrumentated by @stateful_function or compatible protocol"""
+ return (
+ hasattr(func, "changed")
+ and hasattr(func, "on_change")
+ and hasattr(func, "forget_callbacks")
+ )
+
+#=========================================================
+#
+#=========================================================
diff --git a/bps/develop.py b/bps/develop.py
new file mode 100644
index 0000000..f9019cf
--- /dev/null
+++ b/bps/develop.py
@@ -0,0 +1,357 @@
+"""bps.develop - useful functions for debugging and developing code
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import inspect
+import time
+import os, sys
+import re
+import code as code_mod
+#pkg
+from bps.fs import filepath
+#local
+__all__ = [
+ #console
+ "dbgcon",
+
+ #utils
+ "trap",
+ "timer",
+
+ #ide
+ "global_replace",
+ "purge_bytecode",
+
+ #tracing
+ "log_imports",
+]
+
+#=========================================================
+#debugging console
+#=========================================================
+def _default_shell(local_ns, global_ns):
+ "helper for dbgcon which uses default python interactive shell"
+ #prepare vars
+ if global_ns is None:
+ global_ns = {}
+ if local_ns is None:
+ local_ns = {}
+ filename = "<console>"
+ banner = "Dropping into Python"
+
+ #try to load readline
+ try:
+ import readline
+ except ImportError:
+ pass
+
+ #create console object
+ console = code_mod.InteractiveConsole(local_ns, filename)
+
+ #patch runcode
+ def runcode(code):
+ try:
+ eval(code, global_ns, local_ns)
+ except SystemExit:
+ raise
+ except:
+ console.showtraceback()
+ else:
+ if code_mod.softspace(sys.stdout, 0):
+ print
+ console.runcode = runcode
+
+ #run the console
+ console.interact(banner)
+
+#disabled this for now... integration has some glitches
+def _ipython_shell(local_ns, global_ns):
+ "helper for dbgcon which runs IPython shell, or returns False if IPython not present"
+ #check for IPython
+ try:
+ from IPython.Shell import IPShellEmbed
+ except ImportError:
+ global _ipython_shell
+ _ipython_shell = None #disable in future
+ return False
+
+ #check for nested instance
+ try:
+ __IPYTHON__
+ except NameError:
+ nested = 0
+ args = ['']
+ else:
+ print "Running nested copies of IPython."
+ print "The prompts for the nested copy have been modified"
+ nested = 1
+ # what the embedded instance will see as sys.argv:
+ args = ['-pi1','In <\\#>: ','-pi2',' .\\D.: ',
+ '-po','Out<\\#>: ','-nosep']
+
+ # Now create an instance of the embeddable shell. The first argument is a
+ # string with options exactly as you would type them if you were starting
+ # IPython at the system command line. Any parameters you want to define for
+ # configuration can thus be specified here.
+ ipshell = IPShellEmbed(args,
+ banner = 'Dropping into IPython',
+ exit_msg = 'Leaving Interpreter, back to program.')
+ ipshell(local_ns=local_ns, global_ns=global_ns)
+ return True
+
+def dbgcon(local_ns=None, global_ns=None, stacklevel=1):
+ """opens up an embedded debugging console on stdin/stdout.
+ by default, the accesses the local namespace of the calling function,
+ but this can be altered via the various options.
+
+ This function uses an embedded IPython shell if installed,
+ else it falls back to the builtin python interpreter.
+
+ .. todo::
+
+ A env flag to disable IPython selection would be nice.
+ A env flag to disable readline would be nice.
+
+ :Parameters:
+ stacklevel
+ Choose what stacklevel the default namespaces should be pulled from.
+ ``1`` (the default) uses the namespace of the immediate caller.
+ local_ns
+ Optionally overrides the local namespace that would be chosen via stacklevel.
+ global_ns
+ Optionally overrides the global namespace that would be chosen via stacklevel.
+ """
+ "run interact using caller's frame for locals"
+ #TODO: make this load globals correctly!
+ print "\n", "-=" * 40
+ extra_keys = set() #set of extra keys we added to local_ns
+ orig_ns = {} #set of values we clobbered in local_ns
+ def shadow_local(key, value):
+ if key in local_ns:
+ orig_ns[key] = local_ns[key]
+ else:
+ extra_keys.add(key)
+ local_ns[key] = value
+ frame = inspect.currentframe(stacklevel)
+ try:
+ if local_ns is None:
+ local_ns = frame.f_locals
+ if global_ns is None:
+ global_ns = frame.f_globals
+ shadow_local("exit", sys.exit)
+ if _ipython_shell and _ipython_shell(local_ns, global_ns):
+ return
+ _default_shell(local_ns, global_ns)
+ finally:
+ del frame
+ if local_ns:
+ for key in orig_ns:
+ local_ns[key] = orig_ns[key]
+ for key in extra_keys:
+ del local_ns[key]
+ print "\n", "^=" * 40
+
+#=========================================================
+#other utility funcs
+#=========================================================
+def trap(func, *args, **kwds):
+ """development helper which traps and return errors.
+
+ :param func:
+ function to call
+ :param *args:
+ positional arguments for function
+ :param **kwds:
+ keyword arguments for function
+
+ :returns:
+ * ``(True,result)`` if function returns without error
+ * ``(False,error)`` if function raises error
+ """
+ try:
+ return True, func(*args, **kwds)
+ except Exception, err:
+ return False, err
+
+
+def dbgstack(depth=0, limit=None):
+ "helper for pretty-printing the callstack"
+ out = ''
+ idx = depth
+ frame = inspect.currentframe(1+depth)
+ while frame:
+ out += "(%r, %r, %r, %r),\n" % (
+ depth,
+ frame.f_code.co_filename,
+ frame.f_code.co_name,
+ frame.f_lineno)
+ frame = frame.f_back
+ depth += 1
+ if limit and depth >= limit:
+ break
+ return out
+
+def timer(count, func, *args, **kwds):
+ "helper func for timing a function call"
+ itr = xrange(count)
+ s = time.time()
+ result = func(*args, **kwds)
+ if count > 1:
+ for c in itr:
+ func(*args, **kwds)
+ delta = time.time() - s
+ return delta / float(count), result
+
+#=========================================================
+#global search and replace
+#=========================================================
+
+def global_replace(root_path, match_re, replace_func, guard_func=None, file_filter=None):
+ """This function implements a helpful global search-and-replace for an entire project tree.
+
+ For simple uses, an IDE's global search and replace tool is usually better,
+ the changes that need to be made are too extensive or complicated.
+ This function allows quick scripts to be written which take care of
+ complicated search-and-replace operations across an entire project.
+
+ :arg root_path:
+ root of path to perform search & replace within
+
+ :param match_re:
+ regular expression (if not compiled, will be compiled with re.M flag)
+ any parts of any file in tree which match this regular expression
+ will be passed to the replace_func for (potential) replacement.
+
+ :param replace_func:
+ function to handle analysis and replacement of any matching parts of a file.
+ is called with one argument, the regexp match object.
+ this function should then return the desired replacement string,
+ or it should return None, in which case no replacement is performed.
+
+ :param guard_func:
+ optional function for checking file one last time before saving it...
+ passed (path, input, output)... if it returns True, file is saved, else it isn't.
+
+ .. todo::
+ Give an example for this function
+ """
+ if isinstance(match_re, (tuple, list)):
+ #assume it's a (re_str, re_flags) pair
+ match_re = re.compile(*match_re)
+ elif isinstance(match_re, str):
+ match_re = re.compile(match_re, re.M)
+ if file_filter is None:
+ def file_filter(name):
+ return name.endswith(".py")
+ ctx = [None] #helper for logging
+ for root, dirs, files in os.walk(root_path):
+ if '.svn' in root: continue #skip subversion dirs
+ for name in files:
+ if not file_filter(name):
+ continue
+ path = filepath(root, name)
+ input = path.get()
+ #replace any matches
+ def replace_wrapper(match):
+ output = replace_func(match)
+ input = match.group()
+ if output is None:
+ return input
+ if input != output:
+ if not ctx[0]:
+ if ctx[0] is False: print "\n"
+ print "FILE: %r" % (path,)
+ ctx[0] = True
+ print " MATCH: %r" % (input,)
+ print " -->: %r" % (output,)
+ return output
+ output = match_re.sub(replace_wrapper, input)
+ if ctx[0]: ctx[0] = False
+ #save result only if it changes
+ if output and output != input:
+ if guard_func and not guard_func(path, output, input):
+ print " REJECTED"
+ continue
+ path.set(output)
+
+def purge_bytecode(root_path):
+ """purge all pyc & pyo files from specified path"""
+ for root, dirs, files in os.walk(root_path):
+ if '.svn' in root: continue #skip subversion dirs
+ if '.hg' in root: continue
+ for name in files:
+ if name[-4:] not in (".pyc", ".pyo"): continue
+ path = filepath(root, name)
+ print "PURGING ", path
+ path.remove()
+
+#=========================================================
+#import tracker
+#=========================================================
+def log_imports(logger="sys.imports"):
+ """
+ this code wraps the builtin __import__ function
+ with some code to log a message about every import that occurs.
+
+ :param logger:
+ Name of the logger to send output to.
+ If ``False``, output is written straight to stderr.
+ """
+ depth = [0] #state for tracking import depth
+ orig_import = __builtins__['__import__'] #old import hook
+ import inspect,sys,os
+
+ if logger:
+ from logging import getLogger
+ log = getLogger(logger)
+ log._bps3_flag = False
+ def write(msg, *args):
+ if log._bps3_flag:
+ #why do this? because we get a recursive import otherwise :(
+ return
+ msg = (". " * depth[0]) + msg
+ log._bps3_flag = True
+ log.info(msg, *args)
+ log._bps3_flag = False
+ else:
+ def write(msg, *args):
+ if args: msg %= args
+ sys.stderr.write(" " * depth[0])
+ sys.stderr.write(">>> %s\n" % (msg,))
+ sys.stderr.flush()
+
+ def __import__(name, globals=None, locals=None, fromlist=None, level=-1):
+ fname = inspect.currentframe(1).f_code.co_filename
+ for elem in sys.path:
+ if fname[0:len(elem)] == elem:
+ mname = fname[len(elem):]
+ if mname[0] == "/":
+ mname = mname[1:]
+ break
+ else:
+ mname = fname #inspect.getmodulename(fname)
+ if fromlist is not None:
+ fstr = repr(fromlist)
+ if len(fromlist) == 1:
+ fstr = fstr[1:-2]
+ else:
+ fstr = fstr[1:-1]
+ write("[FOR %r IMPORT %r FROM %r]", mname, fstr, name)
+ else:
+ write("[FOR %r IMPORT %r]", mname, name)
+ depth[0] += 1
+ try:
+ mod = orig_import(name, globals, locals, fromlist, level)
+ finally:
+ depth[0] -= 1
+ return mod
+
+ #make sure all modules from now on use this
+ __builtins__['__import__'] = __import__
+ return __import__
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/error/__init__.py b/bps/error/__init__.py
new file mode 100644
index 0000000..9d4e1ab
--- /dev/null
+++ b/bps/error/__init__.py
@@ -0,0 +1 @@
+"""bps.error -- error classes & utility functions"""
diff --git a/bps/error/types.py b/bps/error/types.py
new file mode 100644
index 0000000..a17ac84
--- /dev/null
+++ b/bps/error/types.py
@@ -0,0 +1,504 @@
+"""
+bps.error.types -- Exception classes & utilities
+"""
+#===================================================
+#imports
+#===================================================
+from warnings import warn
+from bps.undef import Undef
+import errno as _errno
+import os
+__all__ = [
+
+ #func errors
+ "ParamError",
+ "NormError",
+ "RangeError",
+
+ #command invariants
+ 'EnvTypeError',
+ 'DistTypeError',
+
+ #command arg errors
+ 'CommandError',
+ 'ParseError',
+ 'InputError',
+
+ #bps.fs:
+ 'MissingPathError',
+ 'PathExistsError',
+ 'ExpectedDirError',
+## 'ExpectedFileError',
+ 'DirNotEmptyError',
+ 'PathInUseError',
+
+ #bps.reference:
+ 'ProxyEmptyError',
+ 'ProxyNestError',
+
+ #bps.meta
+ 'AbstractMethodError',
+
+ #attribute errors
+ 'MissingAttributeError',
+ 'UnsetAttributeError',
+ 'ReadonlyAttributeError',
+ 'PermanentAttributeError',
+
+]
+
+#===================================================
+#function errors
+#===================================================
+class ParamError(TypeError):
+ """This error should be raised to indicated invalid parameters were passed to function.
+
+ For example:
+ * missing required arguments or keywords
+ * mutually exclusive keywords specified
+ * some other combination of the above
+
+ It should *not* be used for:
+ * incorrect type for a parameter (use :exc:`TypeError`).
+ * incorrect value for a parameter (use :exc:`ValueError`).
+
+ This is a subclass of :exc:`TypeError`, in order to be compatible
+ with Python's normal behavior.
+ """
+
+class NormError(ValueError):
+ """raised to indicate function was passed value that can't be normalized.
+
+ This error is typically raised by normalization functions,
+ whose job is to convert a value to it's canonical form if it's in some domain
+ (such as converting a date in string form to a date type).
+
+ This is a helper for raising an more informative :exc:`ValueError`
+ in the case where the provided value cannot be normalized / decoded / parsed.
+ It is a subclass of :exc:`ValueError`.
+
+ :arg msg:
+ Optional traceback message.
+ If not specified, a sensible default will be chosen
+ based on the other provided values.
+ The result (default or not) is available via ``err.msg`` or ``err.args[0]``.
+
+ :param value:
+ This is the most common keyword to use with this exception.
+ It specifies the actual value which is the cause of the error.
+ It is accessible via ``err.value``, and will be integrated
+ into the default message.
+
+ :param key:
+ If the error is related to a particular key (such as a dictionary,
+ object attribute, etc), it may optionally be specified using
+ this keyword. It's value is accessible via ``err.key``,
+ and will be integrated into the default message.
+
+ :param pretty:
+ Optional "pretty printed" message,
+ suitable for display to the end user.
+ By default, this is the same as *text*,
+ and is accessible via ``err.pretty`` or ``str(err)``.
+ This is present for the cases where the default text
+ is more informative for debugging purposes,
+ but a more concise message is appropriate for displaying
+ to the user.
+
+ Usage Example::
+
+ >>> from bps.error.types import NormError
+ >>> def naive_bool(value):
+ >>> if value == 'true':
+ >>> return True
+ >>> if value == 'false':
+ >>> return False
+ >>> raise NormError(value=value)
+ >>> naive_bool("foo") #will raise a NormError
+ """
+
+ msg = None
+ text = None #NOTE: this is a deprecated name for msg
+ pretty = None
+ key = None
+ value = None
+
+ def __init__(self, msg=None, pretty=None, key=None, value=Undef, text=None):
+ self.key = key
+ if value is not Undef:
+ self.value = value
+ if text:
+ warn("text kwd is deprecated, use msg kwd instead")
+ msg = text
+ if msg is None:
+ msg = self._default_msg()
+ self.msg = self.text = msg
+ self.pretty = pretty or msg
+ if value is not Undef:
+ ValueError.__init__(self, msg, value)
+ else:
+ ValueError.__init__(self, msg)
+
+ def _default_msg(self):
+ "called to create default message"
+ msg = "Invalid value"
+ if self.key is not None:
+ msg += " for key %r" % (self.key,)
+ if value is not Undef:
+ msg += ": %r" % (self.value,)
+ return msg
+
+ def __str__(self):
+ return self.pretty
+
+class RangeError(NormError):
+ """raised to indicate function was passed value that can't be normalized because it was out of range.
+
+ This is a subclass of :exc:`NormError`, which offers a more appropriate
+ error message for when the reason a value couldn't be normalized
+ was because it was outside some supported range (eg: a date contains month 13).
+
+ In addition to all the parameters :exc:`NormError` supports,
+ this class adds two additional parameters:
+
+ :param lower:
+ Optional lower bound of range, will be integrated into default message.
+ :param upper:
+ Optional upper bound of range, will be integrated into default message.
+
+ If possible, lower should be inclusive and upper should be exclusive,
+ following the python slice protocol.
+ """
+ lower = None
+ upper = None
+
+ def __init__(self, msg=None, pretty=None, key=None, value=Undef, lower=None, upper=None, text=None):
+ self.lower = lower
+ self.upper = upper
+ NormError.__init__(self, msg=msg, pretty=pretty, key=key, value=value, text=text)
+
+ def _default_msg(self):
+ msg = NormError._default_msg(self)
+ if self.lower is not None:
+ if self.upper is not None:
+ msg += ", must be within %r and %r" % (self.lower, self.upper)
+ else:
+ msg += ", must be at or above %r" % (self.lower,)
+ elif self.upper is not None:
+ msg += ", must be below %r" % (self.upper,)
+ return msg
+
+#===================================================
+#command invariant errors
+#===================================================
+class EnvTypeError(AssertionError):
+ """raised by subclasses of :class:`bps.app.command.Command` when they encounter an unexpected env_type value.
+
+ This is a rather common case when writing a code
+ which supports multiple env_types in a Command subclass.
+ This error can be raised when a value is encountered
+ that the code is not prepared for.
+
+ If you are not using the Command class, you can ignore this exception.
+
+ .. note::
+ Since env_type values are generally internal to a program,
+ this indicates a violation of an internal invariant,
+ thus this subclasses :exc:`AssertionError`, not :exc:`ValueError`.
+ """
+ def __init__(self, value):
+ if hasattr(value, "env_type"): #allow us to pass in commands, etc
+ value = value.env_type
+ self.value = value
+ AssertionError.__init__(self, "unexpected environment type: %r" % self.value)
+
+class DistTypeError(AssertionError):
+ """raised by subclasses of :class:`bps.app.command.Command` when they encounter an unexpected env_type value.
+
+ This is similar to :exc:`UnexpectedEnvTypeError`, see it for details.
+ """
+ def __init__(self, value):
+ if hasattr(value, "dist_type"): #allow us to pass in commands, etc
+ value = value.dist_type
+ self.value = value
+ AssertionError.__init__(self,"unexpected distribution type: %r" % self.value)
+
+#===================================================
+#user input errors
+#===================================================
+class CommandError(Exception):
+ "base for errors to be raised by code processing sys.argv input"
+
+ @classmethod
+ def format(cls, template, *args, **kwds):
+ warn("this method has been deprecated!", DeprecationWarning)
+ from bps.text import render_format
+ msg = render_format(template, *args, **kwds)
+ return cls(msg)
+
+class ParseError(CommandError):
+ "syntax error when parsing command line arguments (unknown / contradictory options, etc)"
+ #NOTE: this will cause Command to print out it's usage
+
+class InputError(CommandError):
+ "semantic error when parsing command line arguments (file not found, etc)"
+
+#===================================================
+#bps.reference
+#===================================================
+class ProxyEmptyError(TypeError):
+ """error raised when :class:`bps.refs.ProxyObject` is accessed without a proxy target"""
+
+class ProxyNestError(AssertionError):
+ """error raised when targets are removed from :class:`bps.refs.ProxyObject` in wrong order"""
+
+#===================================================
+#bps.meta
+#===================================================
+class AbstractMethodError(NotImplementedError):
+ """error class raised by :func:`bps.meta.abstractmethod` decorator when
+ an abstract method is invoked."""
+ def __init__(self, msg="this method must be implemented in a subclass"):
+ NotImplementedError.__init__(self, msg)
+
+#===================================================
+#attribute error helpers
+#===================================================
+class PrettyAttributeError(AttributeError):
+ msg = None
+ obj = None #stores object for instance
+ attr = None #stores attr for instance
+
+ verb = None #verb to append to default message
+
+ def __init__(self, obj=None, attr=None, msg=None):
+ self.attr = attr
+ self.obj = obj
+ if msg is None:
+ if attr:
+ if obj:
+ msg = "%(obj)r: attribute %(attr)r %(verb)s"
+ else:
+ msg = "attribute %(attr)r %(verb)s"
+ else:
+ if obj:
+ msg = "%(obj)r: attribute %(verb)s"
+ else:
+ msg = "attribute %(verb)s"
+ msg %= dict(obj=obj, attr=attr, verb=self.verb)
+ self.msg = msg
+ AttributeError.__init__(self, msg)
+
+class MissingAttributeError(PrettyAttributeError):
+ """helper for quickly raising an error when getattr fails.
+
+ :param obj:
+ Optionally provide reference to object code was trying to access.
+ Will be integrated into default message.
+ Stored in ``obj`` attribute of error.
+
+ :param attr:
+ Optionally provide name of attribute being read.
+ Will be integrated into default message.
+ Stored in ``attr`` attribute of error.
+
+ :param msg:
+ Override the default message.
+ Can be retreived via ``str(err)``
+
+ Usage Example::
+
+ >>> from bps.error.types import MissingAttributeError
+ >>> # .. in code somewhere ...
+ >>> raise MissingAttributeError(self,attr)
+ """
+ verb = "not found"
+
+class ReadonlyAttributeError(PrettyAttributeError):
+ "helper for raising error when setattr fails, used just like MissingAttributeError"
+ verb = "is read-only"
+
+class PermanentAttributeError(PrettyAttributeError):
+ "helper for raising error when delattr fails, used just like MissingAttributeError"
+ verb = "cannot be deleted"
+
+class UnsetAttributeError(MissingAttributeError):
+ """helper for raising error when descriptor managing an attribute wishes
+ to indicate it has no defined value to return"""
+ verb = "has no set value"
+
+#===================================================
+#filesystem errors - used by bps.fs.FilePath
+#===================================================
+
+#---------------------------------------------------
+#internal wrapper backends
+#---------------------------------------------------
+#NOTE: internal inheritance may change in the future,
+# the only guarantee is that all these errors will be OSError subclasses
+# (and also WindowsError subclasses, where appropriate).
+
+class _OSErrorHelper(OSError):
+ "helper used by errors which wrap a specific OSError errno"
+ errno = None
+ #NOTE: order of kwds is different from OSError, mainly to aid in creation of instances
+ # with more helpful messages
+ def __init__(self, strerror=None, filename=None, errno=None):
+## if isinstance(errno, str) and strerror is None:
+## errno, strerror = None, errno
+ if errno is None:
+ errno = self.errno
+ if strerror is None and errno:
+ strerror = os.strerror(errno)
+ OSError.__init__(self, errno, strerror, filename)
+
+if os.name == "nt":
+ assert WindowsError
+ #NOTE: order of kwds is different from WindowsError, mainly to aid in creation of instances
+ # with more helpful messages
+ class _WindowsErrorHelper(WindowsError):
+ "helper used by errors which wrap a specific WindowsError winerror"
+ winerror = None
+ strerror = None
+ #errno autocalculated
+ def __init__(self, strerror=None, filename=None, winerror=None):
+ if winerror is None:
+ winerror = self.winerror
+ if strerror is None:
+ strerror = self.strerror
+ if streror is None and errno:
+ strerror = os.strerror(errno)
+ WindowsError.__init__(self, winerror, strerror, filename)
+
+ def __str__(self):
+ out = "[WinError %r] " % self.winerror
+ if self.errno:
+ out += "[Errno %r] " % self.errno
+ if self.strerror:
+ out += self.strerror
+ if self.filename:
+ out += ": %r" % (self.filename,)
+ return out
+
+else:
+ WindowsError = None
+ _WindowsErrorHelper = _OSErrorHelper
+ #under non-nt, the _WindowsErrorHelper errors shouldn't be raised,
+ #but we define them anyway so any use-cases don't have to deal w/ them vanishing.
+ #so, we use _OSErrorHelper to provide a base class.
+
+#---------------------------------------------------
+#filepath errors
+#---------------------------------------------------
+
+class MissingPathError(_OSErrorHelper):
+ """Indicates a filepath that was expected to exist could not be found.
+
+ This is a wrapper for ``OSError(errno.ENOENT)`` raised by :class:`bps.fs.FilePath`.
+ """
+ errno = _errno.ENOENT
+
+class PathExistsError(_OSErrorHelper):
+ """Indicates a filepath exists on the filesystem when it was expected to be empty.
+
+ This is a wrapper for ``OSError(errno.EEXIST)`` raised by :class:`bps.fs.FilePath`.
+ """
+ errno = _errno.EEXIST
+
+class ExpectedDirError(_OSErrorHelper):
+ """Indicates filepath was expected to be a directory, but was found to be another filetype.
+
+ This is a wrapper for ``OSError(errno.ENOTDIR)`` raised by :class:`bps.fs.FilePath`.
+ """
+ errno = _errno.ENOTDIR
+
+class DirNotEmptyError(_OSErrorHelper):
+ """Indicated directory should have been empty, but wasn't (mainly caused by rmdir)
+
+ This is a wrapper for ``OSError(errno.ENOTEMPTY)`` raised by :class:`bps.fs.FilePath`.
+ """
+ errno = _errno.ENOTEMPTY
+
+##class ExpectedFileError(WrongPathTypeError):
+## errno = _errno.EISDIR
+
+##PathPermissionError(_OSErrorHelper) - _errno.EACCES, should be parent of PathInUseError
+
+class PathInUseError(_WindowsErrorHelper):
+ """Indicates the filepath is currently locked by another process, and cannot be moved/opened/etc.
+
+ This is a wrapper for ``WindowsError(32)`` raised by :class:`bps.fs.FilePath`.
+ It will currently only be raised under Windows.
+ """
+ errno = _errno.EACCES
+ winerror = 32
+ strerror = "The process cannot access the file because it is being used by another process"
+
+#------------------------------------------
+#aliases based on errno id's
+#------------------------------------------
+ENOENT_Error = MissingPathError
+EEXIST_Error = PathExistsError
+##EISDIR_Error = ExpectedFileError
+ENOTDIR_Error = ExpectedDirError
+ENOTEMPTY_Error = DirNotEmptyError
+
+#------------------------------------------
+#adapt os errors to one of BPS's subclasses
+#------------------------------------------
+if WindowsError:
+ _win_err_map = {
+ 32: PathInUseError,
+ }
+
+_os_err_map = { #dict mapping errno to oserror subclass
+ _errno.ENOENT: ENOENT_Error,
+ _errno.EEXIST: EEXIST_Error,
+## _errno.EISDIR: EISDIR_Error,
+ _errno.ENOTDIR: ENOTDIR_Error,
+ _errno.ENOTEMPTY: ENOTEMPTY_Error,
+}
+
+def adapt_os_errors(func, *args, **kwds):
+ "wraps function call, trapping & adapting os errors into BPS errors"
+ try:
+ return func(*args, **kwds)
+ except OSError, err:
+ new_err = translate_os_error(err)
+ if new_err:
+ raise new_err
+ log.warning("unmanaged os error: %r", err)
+ raise
+
+def translate_os_error(err):
+ "adapt a plain os error into one of BPS's OSError subclasses"
+ global _os_err_map, _win_err_map
+
+ #for debugging/development
+## if True:
+## from bps.error.utils import format_exception
+## from logging import getLogger
+## getLogger("bps.error.types").critical(format_exception(err))
+
+ #check if we've already wrapped it, or if it's not an OSError
+ if isinstance(err, (_OSErrorHelper, _WindowsErrorHelper)):
+ return None
+
+ #check for WindowsErrors (since WindowsError subclass of OSError)
+ #NOTE: <py2.5, .errno contained the windows error value, not the errno value!
+ elif WindowsError and isinstance(err, WindowsError) and err.winerror in _win_err_map:
+ cls = _win_err_map[err.winerror]
+ return cls(err.strerror, err.filename, err.winerror)
+
+ #else it should be an OS Error
+ elif isinstance(err, OSError) and err.errno in _os_err_map:
+ #XXX: what if this _is_ a WindowsError? should we do something?
+ cls = _os_err_map[err.errno]
+ return cls(err.strerror, err.filename, err.errno)
+
+ #don't handle the rest
+ return None
+
+#===================================================
+#eof
+#===================================================
diff --git a/bps/error/utils.py b/bps/error/utils.py
new file mode 100644
index 0000000..c408aa0
--- /dev/null
+++ b/bps/error/utils.py
@@ -0,0 +1,140 @@
+"""bps.error.utils -- error utility functions"""
+#========================================================
+#imports
+#========================================================
+#core
+import sys
+import traceback
+from cStringIO import StringIO
+from bps.undef import Undef
+#local
+__all__ = [
+ #display
+ 'format_exception'
+
+ #helpers
+ 'get_sysexit_rc',
+]
+#========================================================
+#format an exception
+#========================================================
+EXC_ATTRS = ("errno", "filename", "winerror") #common exc attributes to display if present
+EXC_HEADER = "@@@@@@@@@@@@@@@@@@@@_exception_@@@@@@@@@@@@@@@@@@@@@\n"
+EXC_DIVIDER = "----------------------------------------------------\n"
+EXC_FOOTER = "@@@@@@@@@@@@@@@@@@@@^^^^^^^^^^^@@@@@@@@@@@@@@@@@@@@@\n"
+
+def format_exception(exc_info=True, limit=None): ##, depth=0):
+ """An enhanced version of traceback.format_exception
+
+ This function prints out the specified exception info,
+ but tries to given additional information about the exception,
+ which is frequently useful when debugging unknown errors from a log file.
+
+ :param exc_info:
+ The exc info tuple to format.
+ If this is set to ``True`` (the default), :func:`sys.exc_info()`
+ will be called to get the real exc info tuple.
+
+ This can also be an error instance, in which case
+ the error will be treated the same as the exc_info tuple ``(type(err),err,None)``.
+
+ :param limit:
+ Limit on how far from the original caller
+ that the traceback should go.
+
+ """
+## :param depth:
+## If set, the first *depth* frames will be skipped.
+## This is useful for displaying tracebacks that occur
+## inside an interactive shell, so that the top frames
+## can be ignored.
+
+ #get exc info
+ if exc_info is True:
+ exc_info = sys.exc_info()
+ if exc_info:
+ if isinstance(exc_info, BaseException):
+ exc_type, exc_value, exc_trace = type(exc_info), exc_info, None
+ else:
+ exc_type, exc_value, exc_trace = exc_info
+ else:
+ return EXC_HEADER + " exc_info: None\n" + EXC_FOOTER
+
+ #
+ #prepare output buffer, write header
+ #
+ out = StringIO()
+ write = out.write
+ write(EXC_HEADER)
+
+ #
+ #write exc value info
+ #
+ write(" exc_type: %s\n" % (_safe_repr(exc_type),))
+ write(" exc_value: %s\n" % (_safe_repr(exc_value),))
+
+ #show the err's args one by one
+ if hasattr(exc_value, "args") and isinstance(exc_value.args,(tuple,list)):
+ for i,arg in enumerate(exc_value.args):
+ write(" args[%d]: %s\n" % (i,_safe_repr(arg)))
+ for attr in EXC_ATTRS:
+ if hasattr(exc_value, attr):
+ value = getattr(exc_value, attr)
+ write("%13s: %s\n" % (attr, _safe_repr(value)))
+
+ #
+ #write traceback
+ #
+ write(EXC_DIVIDER)
+## while exc_trace and depth > 0:
+## exc_trace = exc_trace.tb_next
+## depth -= 1
+ if exc_trace:
+ stack = traceback.extract_tb(exc_trace, limit=limit)
+ if stack:
+ write(" traceback:\n")
+ traceback.print_list(stack, out)
+
+ #
+ #write the error text
+ #
+ write(EXC_DIVIDER)
+ lines = traceback.format_exception_only(exc_type, exc_value)
+ if lines: #should always be >0, usually ==1
+ for line in lines:
+ write(line)
+ #should always end in \n
+
+ #
+ #write the footer
+ #
+ write(EXC_FOOTER)
+
+ #
+ #return
+ #
+ del exc_type, exc_value, exc_trace
+ return out.getvalue()
+
+def _safe_repr(value):
+ try:
+ return repr(value)
+ except Exception:
+ return '<unrepresentable %s object>' % type(value).__name__
+
+#===================================================
+#helpers
+#===================================================
+def get_sysexit_rc(err):
+ "get int return code from SystemExit instance"
+ code = err.code
+ if isinstance(code, int):
+ return code
+ elif code:
+ return 1
+ else:
+ return 0
+
+#========================================================
+#eof
+#========================================================
diff --git a/bps/fs.py b/bps/fs.py
new file mode 100644
index 0000000..667a333
--- /dev/null
+++ b/bps/fs.py
@@ -0,0 +1,2843 @@
+"""bps3.fs -- filesystem interaction -- (c) Assurance Technologies 2003-2009
+
+See bps documentation for information about this module.
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+from collections import deque
+import codecs
+##import mmap
+import stat
+import errno as _errno
+import os, sys, time
+from os.path import curdir as _CURDIR, pardir as _PARDIR, \
+ sep as _SEP #cause we access these A LOT
+import struct
+import sys
+import logging
+log = logging.getLogger(__name__)
+import shutil
+import threading
+from warnings import warn
+import hashlib
+#pkg
+from bps.meta import is_seq
+from bps.types import Undef
+from bps.refs import SoftValueDict
+from bps.warndep import deprecated_method, deprecated_function, relocated_function, deprecated_property
+from bps.error import types as errors
+#module
+
+__all__ = [
+ #filepath
+ "filepath",
+ "is_filepath",
+ "getcwd", "setcwd",
+
+ #deprecated
+ "getFile", "setFile",
+ "curpath",
+
+ #perms
+ 'parse_mode_mask', 'repr_mode_mask',
+ 'chmod',
+ 'setumask', 'getumask',
+
+ #shortcuts
+ 'is_shortcut', 'read_shortcut',
+
+ #misc
+ 'posix_to_local', 'local_to_posix',
+
+ #constants
+ 'os_has_symlinks',
+ 'os_has_shortcuts',
+
+]
+
+#some quick constants
+os_has_symlinks = hasattr(os, "symlink")
+os_has_shortcuts = (os.name == "nt") #note that bps can resolve shortcuts under any os
+
+#all known filetypes ('link' only returned by path.lfiletype, not path.filetype)
+ALL_FILETYPES = [ 'missing', 'link', 'file', 'dir', 'char', 'block', 'fifo', 'socket', 'unknown']
+
+#all filetypes we can set perms on
+PERM_FILESTYPES = ['file', 'dir', 'char', 'block', 'fifo', 'socket']
+
+#=========================================================
+#dry run support
+#=========================================================
+
+#TODO: implement dry run support via a global context manager,
+# such that a single flag in this module can disable all filepath operations.
+_wet_run = True
+
+#=========================================================
+#main functions
+#=========================================================
+_fp_cache = SoftValueDict(expire=300, flush=150) #cache of existing filepaths, keyed by str
+_fp_lock = threading.Lock() #lock for cache access
+
+def filepath(src, *parts):
+ """Return :class:`FilePath` instance representing a local file path.
+
+ Input can be any number of positional arguments,
+ made of a mix of strings and :class:`FilePath` instances.
+ They will all be joined together using the local path separator,
+ and then a :class:`FilePath` instance returned representing that path.
+
+ This function maintains an internal cache, so that successive calls
+ for the same path should result in the same object. Thus, this is
+ the preferred way to create :class:`FilePath` instances.
+
+ Usage::
+
+ >>> from bps3 import *
+ >>> #simple example
+ >>> path = filepath("/home")
+ >>> filepath("/home") is path #multiple calls should return same object
+ True
+ >>> path.isabs
+ True
+ >>> path.listdir()
+ [ 'joe', 'bob', 'sue', 'ftp', 'elic' ]
+ >>> #example with multiple arguments
+ >>> path = filepath(path, "elic", "dev")
+ >>> path
+ '/home/elic/dev'
+
+ .. note::
+ the ``None`` value is treated as a special case,
+ such that ``filepath(None)`` will return ``None`` unchanged.
+
+ .. seealso::
+ :func:`is_filepath`
+ :class:`FilePath`
+ """
+ #join all args together...
+ if parts:
+ src = os.path.join(src, *parts)
+
+ #just return unchanged None or any FilePath instances
+ if src is None or isinstance(src, FilePath):
+ return src
+
+ #coerce to string
+ #FIXME: treat unicode right!
+ if not isinstance(src, str):
+ src = str(src)
+
+ #get instance from cache, or create new one
+ global _fp_cache, _fp_lock
+ _fp_lock.acquire()
+ try:
+ cache = _fp_cache
+ if src in cache:
+ return cache[src]
+ cache.flush() #HACK: forget about old instances so memory doesn't get too big
+ obj = cache[src] = FilePath(src)
+ return obj
+ finally:
+ _fp_lock.release()
+
+#=========================================================
+#main class -- should only be called via filepath() function
+#=========================================================
+class FilePath(str):
+ """Instances of this class represent a path on the host filesystem.
+
+ This class wraps the :class:`str`, and instances of this class should
+ be usable everywhere a string would be. Without breaking compatibility
+ with the string object, this class provides a wide variety of methods
+ and attributes for interacting with the local filesystem.
+
+ Instances should not be created directly from this class,
+ but through the :func:`filepath` constructor function.
+ See that function for more usage examples.
+
+ Unless otherwise noted, attributes which one might expect to return
+ strings will always return :class:`FilePath` instances.
+
+ What follows is all the methods and attributes of this class,
+ grouped by category:
+
+ * :ref:`Path Properties <bps-fs-filepath-path-properties>`
+ * :ref:`Path Components <bps-fs-filepath-path-components>`
+ * :ref:`Derived Paths <bps-fs-filepath-derived-paths>`
+ * :ref:`Filesystem Properties <bps-fs-filepath-filesystem-properties>`
+ * :ref:`Filesystem Manipulation <bps-fs-filepath-filesystem-manipulation>`
+ * :ref:`Symlink Manipulation <bps-fs-filepath-symlink-manipulation>`
+ * :ref:`Directory Manipulation <bps-fs-filepath-directory-manipulation>`
+ * :ref:`File Manipulation <bps-fs-filepath-file-manipulation>`
+
+ ..
+ Also, most attributes can be accessed via a method of the same
+ name as the attribute, just with a "get" prefix, eg ``path.getdir()``
+ instead of ``path.dir``. For attributes which are writeable,
+ there should also be a corresponding ``path.setdir
+
+ Path Properties
+ ===============
+
+ .. attribute:: isnorm
+
+ ``True`` if path is already normalized, otherwise ``False``.
+ This is equivalent to ``path == path.normpath``, but faster.
+ See :func:`os.path.normpath` for details of what normalization entails.
+
+ .. attribute:: isabs
+
+ ``True`` if path is absolute, otherwise ``False`` for relative paths.
+ This wraps :func:`os.path.isabs`.
+
+ .. attribute:: isrel
+
+ ``True`` if path is relative (to current working directory),
+ otherwise ``False``. This will always be the opposite of ``path.isabs``.
+ This wraps :func:`os.path.isabs`.
+
+ .. attribute:: iscanon
+
+ ``True`` if path is the canonical path to a resource, otherwise ``False``.
+ See :attr:`canonpath` for details.
+
+ Path Components
+ ===============
+
+ Every filepath is composed of a number of parts, and the following
+ attributes allow quick access to the desired part of a path's anatomy.
+ A simple diagram of the parts of a path::
+
+ path = "/home/eric/test.png"
+ \________/x\______/
+ | dir | name | path.dir == "/home/eric"
+ | | | # x -- separator not included in dir *or* name
+ | | | path.name == "test.png"
+ | \__/\__/
+ | root ext path.root == "test"
+ | | path.ext == ".png"
+ \_____________/
+ base path.base == "/home/eric/test"
+
+ .. attribute:: dir
+
+ This returns all of the filepath up
+ to (but not including) the last path separator.
+ For example, ``filepath("/home/eric/test.png").dir`` would return ``'/home/eric'``.
+
+ .. attribute:: name
+
+ This returns all the filepath *after* the last path separator.
+ For example, ``filepath("/home/eric/test.png").dir`` would return ``'test.png'``.
+ It should always be true that ``filepath(dir_part,name_part)`` returns the original path.
+
+ .. attribute:: ext
+
+ This returns the filetype extension portion of the *name*.
+ For example, ``filepath("/home/eric/test.png").dir`` would return ``'.png'``.
+
+ .. attribute:: root
+
+ This returns the root part of the *name*, excluding the extension.
+ For example, ``filepath("/home/eric/test.png").root`` would return ``'test'``.
+ It should always be true that ``root_part + ext_part`` returns the original name attribute.
+
+ .. attribute:: base
+
+ This returns the *dir* plus the *root* of the filename.
+ In other words, it returns the full path, but without the file extension (if any).
+ For example, ``filepath("/home/eric/test.png").base`` would return ``'/home/eric/test'``.
+
+ Derived Paths
+ =============
+
+ The following paths can be derived from a given filepath,
+ via common attributes and methods:
+
+ .. autoattribute:: parentpath
+ .. autoattribute:: normpath
+ .. autoattribute:: abspath
+ .. autoattribute:: canonpath
+
+ .. automethod:: getabspath
+ .. automethod:: getrelpath
+ .. automethod:: expand
+ .. automethod:: samepath
+
+ Filesystem Properties
+ =====================
+ .. autoattribute:: exists
+ .. autoattribute:: isfile
+ .. autoattribute:: isdir
+ .. autoattribute:: islink
+ .. autoattribute:: ismissing
+ .. autoattribute:: ismount
+
+ .. autoattribute:: filetype
+ .. automethod:: getfiletype
+
+ .. autoattribute:: atime
+ .. autoattribute:: ctime
+ .. autoattribute:: mtime
+ .. autoattribute:: size
+ .. autoattribute:: linecount
+ .. autoattribute:: dircount
+
+ Filesystem Manipulation
+ =======================
+ .. automethod:: chdir
+ .. automethod:: touch
+
+ .. automethod:: remove
+ .. automethod:: discard
+ .. automethod:: clear
+
+ .. automethod:: copy_to
+ .. automethod:: move_to
+
+ Symlink Manipulation
+ ======================
+ .. autoattribute:: lexists
+ .. autoattribute:: ltarget
+ .. automethod:: mklink
+ .. seealso::
+ :attr:`islink`
+
+ Directory Manipulation
+ ======================
+ .. automethod:: listdir
+ .. automethod:: iterdir
+
+ .. automethod:: mkdir
+ .. automethod:: makedirs
+ .. automethod:: ensuredirs
+ .. automethod:: removedirs
+
+ File Manipulation
+ =================
+ .. automethod:: open
+ .. automethod:: get
+ .. automethod:: set
+
+ .. todo::
+
+ others part of object that aren't officially added yet:
+
+ .. autoattribute:: joinsep
+ .. autoattribute:: splitdir
+ .. autoattribute:: splitext
+
+ .. autoattribute:: md5
+
+ .. autoattribute:: mode
+ .. autoattribute:: modestr
+
+ """
+ #=========================================================
+ #init
+ #=========================================================
+
+ def __init__(self, path):
+ #init attrs
+ str.__init__(self)
+ self._path = path
+
+ #set invariant components
+ self._dir, self._name = os.path.split(self)
+ self._root = os.path.splitext(self._name)[0]
+ self._base, self._ext = os.path.splitext(self)
+ self._normpath = os.path.normpath(self)
+
+ #set invariant properties
+ #TODO: should lock these so they can't be changed publically
+ self.isnorm = (self == self._normpath)
+ self.isabs = os.path.isabs(self)
+ self.isrel = not self.isabs
+
+ def __str__(self):
+ return self._path
+
+ #=========================================================
+ #path properties
+ #=========================================================
+ #NOTE: all these are filled in by the __init__ method
+ #FIXME: these should really be readonly
+ _path = None
+ isnorm = None
+ isabs = None
+ isrel = None
+
+ #is path relative to cwd?
+ def getisrel(self): return self.isrel
+
+ #is path relative to host?
+ def getisabs(self): return self.isabs
+
+ #is path normalized?
+ def getisnorm(self): return self.isnorm
+
+ #is path canonical?
+ def getiscanon(self): return self == self.canonpath
+ iscanon = property(getiscanon)
+
+ #=========================================================
+ #posix/url <-> local separator convention
+ #=========================================================
+ #NOTE: these should only be used for relative paths,
+ #since abs paths may have drive prefix, etc, which won't be translated correctly
+ #commented out until needed
+## if os.path.sep == '/':
+## def posix_to_local(self):
+## return self
+## def local_to_posix(self): #not strictly a "path" anymore
+## return self
+## else:
+## def posix_to_local(self):
+## return filepath(self.replace("/", os.path.sep))
+## def local_to_posix(self): #not strictly a "path" anymore
+## return filepath(self.replace(os.path.sep,"/"))
+
+## if os.path.sep == '/':
+## def to_posix(self):
+## "Returns filepath as a posix-style string"
+## return self._path
+## else:
+## def to_posix(self):
+## "Returns filepath as a posix-style string"
+## return self._path.replace(os.path.sep, "/")
+
+ #=========================================================
+ #path components
+ #=========================================================
+
+ #directory component of path
+ def getdir(self): return filepath(self._dir)
+ dir = property(getdir)
+
+ #name of path inside parent directory
+ def getname(self): return filepath(self._name)
+ name = property(getname)
+
+ #file extension of path
+ def getext(self): return filepath(self._ext)
+ ext = property(getext)
+
+ #root of pathname -- ie, full path sans the file extension
+ def getbase(self): return filepath(self._base)
+ base = property(getbase)
+
+ #root of filename -- ie, basename sans the file extension
+ def getroot(self): return filepath(self._root)
+ root = property(getroot)
+
+ #=========================================================
+ #derived paths
+ #=========================================================
+
+ #parent directory of path
+ #XXX: should this be renamed to parpath / parentpath?
+ def getparentpath(self):
+ """Returns this path's normalized parent directory.
+
+ Unlike the *dir* attribute, which returns the literal
+ portion of the specified path, this attribute attempts
+ to always return a valid path indicating the parent directory,
+ even in cases where that information is not explicitly coded
+ into the path.
+ """
+ path = self._path
+ if not path:
+ return _CURDIR #special case the empty string
+ #just add PARDIR to end and normalize
+ return filepath(os.path.normpath(os.path.join(self._path, _PARDIR)))
+ parentpath = property(getparentpath)
+
+ def _getparent(self):
+ return self.getparentpath()
+ parent = deprecated_property(_getparent, new_name="parentpath")
+
+ #normalized for of path
+ def getnormpath(self):
+ """Returns a normalized version of the path.
+
+ Redundant parts will be consolidated. This function
+ should be idempotent. If the original path is relative,
+ the result should remain relative.
+
+ This is a wrapper for :func:`os.path.normpath`.
+ """
+ return filepath(self._normpath)
+ normpath = property(getnormpath)
+
+ #normalized for case
+ #XXX: should this be shown publically?
+ # it's not really used for much besides canonpath
+ if os.name == "nt":
+ def getnormcase(self):
+ return filepath(os.path.normcase(self))
+ else:
+ def getnormcase(self):
+ return self
+ normcase = property(getnormcase)
+
+ def expand(self, user=None, vars=None, shortcuts=None, symlinks=None, all=False):
+ """expand various substitutions found in filepath, returning the result.
+
+ :param user:
+ Set to ``True`` to expand user home directory refs (``~/``).
+ This option calls :func:`os.path.expanduser`.
+
+ :param vars:
+ Set to ``True`` to expand environmental variables.
+ This option calls :func:`os.path.expandvars`.
+
+ :param shortcuts:
+ Set to ``True`` to resolve windows shortcut (.lnk) files.
+ This options calls :func:`read_shortcut`.
+
+ :param symlinks:
+ Set to ``True`` to expand symlinks.
+ Note that currently this just calls ``self.abspath``,
+ whereas a proper implementation should probably
+ respect ``self.isrel`` when possible.
+
+ :param all:
+ If set to ``True``, all other options
+ which aren't explictly set to ``False``
+ will be enabled. This why the default
+ value for the above keywords is ``None``,
+ allowing for inclusive or exclusive masking.
+ """
+ if all:
+ if user is None:
+ user = True
+ if vars is None:
+ vars = True
+ if shortcuts is None:
+ shortcuts = True
+ if symlinks is None:
+ symlinks = True
+ path = self
+ if vars:
+ path = os.path.expandvars(path)
+ if user:
+ path = os.path.expanduser(path)
+ if shortcuts and os_has_shortcuts:
+ #XXX: shortcuts won't make sense unless under nt,
+ # but we might be running via cygwin or something.
+ # hmm.
+ path = self._expand_shortcuts(path)
+ if symlinks and os_has_symlinks:
+ #we could use realpath() since we're probably under posix,
+ #but realpath won't preserve contents, which we'd like
+ ##path=os.path.realpath(path)
+ path = self._expand_symlinks(path)
+ return filepath(path)
+
+ @staticmethod
+ def _expand_shortcuts(path):
+ "helper for expand()"
+ parts = splitsep(path)
+ path = parts.pop(0)
+ while True:
+ target = read_shortcut(path)
+ if target:
+ #XXX: can user/vars ever be inside target?
+ #insert target parts and restart hunt
+ parts = splitsep(target) + parts
+ path = parts.pop(0)
+ elif parts:
+ #not shortcut, move on to next part
+ path = os.path.join(path, parts.pop(0))
+ else:
+ #nothing more to check
+ return path
+
+ @staticmethod
+ def _expand_symlinks(path):
+ "helper for expand()"
+ parts = splitsep(path)
+ path = parts.pop(0)
+ while True:
+ if os.path.islink(path):
+ #insert target parts and restart hunt
+ parts = splitsep(os.readlink(path)) + parts
+ path = parts.pop(0)
+ elif parts:
+ #not shortcut, move on to next part
+ path = os.path.join(path, parts.pop(0))
+ else:
+ #nothing more to check
+ return path
+
+ def getabspath(self, start=None):
+ """This is the function form of :attr:`abspath`.
+
+ Without arguments, it behaves exactly like the *abspath* attribute.
+ However, it also accepts a single argument, *start*, which
+ specifies an alternate working directory to prepend
+ if the path is relative.
+ """
+ if self.isabs:
+ #even if technically absolute, it might not be normalized
+ if self.isnorm:
+ return self
+ else:
+ return self.normpath
+ elif start is None:
+ #NOTE: output changes relative to CWD
+ return filepath(os.path.abspath(self))
+ else:
+ #NOTE: we wrap in abspath in case base is relative
+ return filepath(os.path.abspath(os.path.join(start, self)))
+ abspath = property(getabspath, None, None,"""
+ This attribute returns a normalized path relative
+ to the root of the filesystem.""")
+
+ #XXX: should this do a hasattr(os.path,"realpath") test instead?
+ #path in host-relative form with all symlinks resolved
+ if os.name == "nt":
+ def getcanonpath(self):
+ "Return absolute & normalized path, but with symlinks unresolved"
+ #no 'realpath' available, so just do our best
+ #NOTE: have to normcase AFTER, in case abspath has mixed-case
+ return filepath(os.path.normcase(os.path.abspath(self)))
+ else:
+ def getcanonpath(self):
+ "Return absolute & normalized path, but with symlinks unresolved"
+ return filepath(os.path.realpath(self))
+ canonpath = property(getcanonpath, None, None, """Returns canonical form of path.
+
+ This attribute returns a normalized path, relative
+ to the root of the filesystem, and attempts to
+ normalize for case (if appropriate), resolve symlinks,
+ and in general return a single "canonical" path.
+
+ .. note::
+
+ If attempting to determine if two paths point
+ to the same resource, use :meth:`sameas` instead,
+ since that function can frequently perform such a check
+ in a more robust and efficient manner.
+
+ .. warning::
+
+ Internally this uses :func:`os.path.realpath` if available.
+ But on platforms where is missing (windows), BPS uses
+ a custom approximation, which may sometimes be fooled by
+ complex cases. Haven't seen it fail, but no guarantees.
+
+ """)
+
+ def getrelpath(self, start=None, strict=False):
+ """Return path relative to specified base (the inverse of abspath).
+
+ This function acts as the inverse of :func:`getabspath`.
+ It returns the path as it would be relative to given directory
+ (which defaults to the current working directory).
+
+ :param start:
+ Directory that result should be relative to.
+ If not specified, the current working directory is used.
+
+ :param strict:
+ If ``True``, *self* must be a subpath of the *start* directory,
+ not rooted in some other directory. If it is not, an error will be raised.
+ When this option is enabled, ``".."`` will never be used
+ when building a relative path.
+ """
+ #TODO: under >=py2.6, we should use os.path.relpath()
+ assert len(os.path.sep) == 1
+ if start is None:
+ base = curpath()
+ else:
+ base = filepath(start)
+ if base[-1] == os.path.sep:
+ base = filepath(base[1:])
+ #accelarate some common cases, before the heavy lifter
+ if self == base:
+ #assume self is a dir, since base is a dir
+ return filepath(_CURDIR)
+ if self.startswith(base):
+ offset = len(base)
+ suffix = self[offset:]
+ if suffix[0] == os.path.sep:
+ return filepath(suffix[1:])
+ #XXX: the algorithm this uses needs more testing,
+ # there may be glitches in it
+ sp = splitsep(self.canonpath)
+ bp = splitsep(base.canonpath)
+ common = []
+ while sp and bp:
+ if sp[0] == bp[0]:
+ common.append(sp.pop(0))
+ bp.pop(0)
+ else:
+ break
+ #NOTE: 'common' should now contain largest common ancestor of the two
+ if bp:
+ if strict:
+ raise ValueError, "path %r not child of base path %r" % (self, base)
+ if os.name == "nt":
+ if not common:
+ #under windows, they may have different drive letters,
+ #at which point there's no relative path...
+ #guess we can just fall back on an absolute path
+ log.warning("no common ancestor between paths: %r %r", self, base)
+ return self
+ #else there should be at least a drive letter in common
+ assert common[0].endswith(":")
+ else:
+ #under posix, common should always be at least '/'
+ assert common and common[0] == '/'
+ #parts of bp & sp were left over, sp is not a child of base,
+ #so we have to add '..' to move back to closest common ancestor
+ sp = [ _PARDIR ] * len(bp) + sp
+ return filepath(os.path.join(*sp))
+ elif sp:
+ #only sp was left over, that part should be relative to basepath
+ return filepath(os.path.join(*sp))
+ else:
+ #paths were the same
+ #NOTE: since base is assumed to be a dir,
+ # and they compared the same, we assume self is a dir as well.
+ return filepath(_CURDIR)
+
+## def get_common_path(self, other):
+## "return the large common path shared with other"
+
+ def contained_in_path(self, other, strict=False):
+ """Check if the path *self* is contained within the path *other*.
+
+ :param other:
+ Candidate parent path.
+
+ :param strict:
+ If ``True``, strict containment is assumed,
+ and the case of self being the same path as other returns ``False``.
+ By default, loose containment is assumed,
+ and this function will report a path as containing itself.
+
+ :returns:
+ Returns ``True`` if the path *self* is a file or directory
+ contained with the directory structure of the path *other*.
+
+
+ This is equivalent to ``shutil.dstinsrc(other,self)``
+ """
+ cur = self.canonpath._path
+ if not cur.endswith(_SEP):
+ cur += _SEP
+ other = filepath(other).canonpath._path
+ if not other.endswith(_SEP):
+ other += _SEP
+ if other == cur:
+ return not strict
+ return cur.startswith(other)
+
+ #XXX: should this be renamed same_as_path() to match contained_in_path()?
+ if os.name == "nt":
+ def samepath(self, other):
+ if other is None:
+ return False
+ return self.canonpath == filepath(other).canonpath
+ else:
+ def samepath(self, other):
+ if other is None:
+ return False
+ other = filepath(other)
+ if self.exists and other.exists:
+ return os.path.samefile(self, other)
+ else: #samefile doesn't work if paths don't exist
+ #NOTE: this hopes that realpath() will resolve what symlinks it can
+ warn("path.samepath may not be reliable if paths don't exist", RuntimeWarning)
+ return self.canonpath == other.canonpath
+ samepath.__doc__ = """compare if two paths refer to the same resource (ie, same canonical path).
+
+ This function performs a similar role to :func:`os.samefile`,
+ and is also similar to comparing ``self.canonpath == other.canonpath``,
+ but attempts this function to work around any cases where those two options
+ would not give correct results (for example, samefile
+ cannot handle non-existent paths).
+ """
+
+ #=========================================================
+ #split path into various pairs
+ #=========================================================
+ #NOTE: this aren't officially listing in documentation yet, still deciding about them
+
+ #would have named this "split", but that's reserved for str.split()
+ #XXX: would splitsep be a better name?
+ def splitdir(self, full=False):
+ if full:
+ #they want a list of ALL the parts
+ return [ filepath(elem) for elem in splitsep(self) ]
+ else:
+ #they just want topmost name split off
+ return self.dir, self.name
+
+## @deprecated_method("splitsep")
+## def splitdir(self, *a, **k):
+## return self.splitsep(*a, **k)
+
+ def splitext(self): return self.base, self.ext
+
+ if os.name == "nt":
+ def splitdrive(self):
+ drive, tail = os.path.splitdrive(self)
+ return filepath(drive), filepath(tail)
+ else:
+ def splitdrive(self): return filepath(""), self
+
+ if os.name == "nt":
+ def splitunc(self):
+ unc, tail = os.path.splitunc(self)
+ return filepath(unc), filepath(tail)
+ else:
+ def splitunc(self): return filepath(""), self
+
+
+ #=========================================================
+ #path tree traversal
+ #=========================================================
+ #path joining via separator
+
+ #would have named this "join", but that's reserved for str.join()
+ def joinsep(self, *paths):
+ """Returns a new path made of this path joined with the additional *paths*
+
+ This is the method called by the division operator.
+ """
+ if not paths:
+ return self
+ return filepath(os.path.join(self, *paths))
+
+ def joinfmt(self, txt, *args):
+ if isinstance(txt, (list,tuple)):
+ txt = os.path.join(*txt)
+ return filepath(os.path.join(self, txt % args))
+ #TODO: should this be deprecated?
+ #TODO: should __mod__ be implemented?
+
+ #divison is the same as a joinsep
+ def __div__(self, other):
+ if isinstance(other, (list,tuple)):
+ if len(other) == 0:
+ return self
+ return filepath(os.path.join(self, *other))
+ else:
+ return filepath(os.path.join(self, other))
+
+ __truediv__ = __div__
+
+ #addition is the same as a norm str add
+ def __add__(self, other):
+ if other is None:
+ return self
+ elif isinstance(other, PathType):
+ return filepath(self._path + other._path)
+ elif isinstance(other, str):
+ return filepath(self._path + other)
+ else:
+ return str.__add__(self, other)
+
+ #addition is the same as a norm str add
+ def __radd__(self, other):
+ if other is None:
+ return self
+ elif isinstance(other, PathType):
+ return filepath(other._path + self._path)
+ elif isinstance(other, str):
+ return filepath(other + self._path)
+ else:
+ return str.__radd__(self, other)
+
+## def getischdir(self):
+## "does this point to the current working directory?"
+## return self.samepath(os.getcwd())
+## ischdir = property(getischdir)
+
+
+ #=========================================================
+ #filesystem properties
+ #=========================================================
+
+ def getexists(self):
+ "return ``True`` if path exists"
+ return os.path.exists(self)
+ exists = property(getexists)
+
+ def getismissing(self):
+ "return ``True`` if path does not exist, otherwise ``False``"
+ return not os.path.exists(self)
+ ismissing = property(getismissing)
+
+ ##def getisbroken(self): return self.ismissing and self.islink
+
+ def getisfile(self):
+ "return ``True`` if path is a file, otherwise ``False``"
+ return os.path.isfile(self)
+ isfile = property(getisfile)
+
+ def getisdir(self):
+ "return ``True`` if path is a directory, otherwise ``False``"
+ return os.path.isdir(self)
+ isdir = property(getisdir)
+
+ def getislink(self):
+ "return ``True`` if path is a symbolic link, otherwise ``False``"
+ return os.path.islink(self)
+ islink = property(getislink)
+
+ #is path a mountpoint?
+ def getismount(self):
+ """``True`` if path is a mountpoint, otherwise ``False``.
+ This wraps :func:`os.path.ismount`.
+ """
+ return os.path.ismount(self)
+ ismount = property(getismount)
+
+ def getfiletype(self, followlinks=True):
+ """return a string identifying the type of the file.
+
+ :param followlinks:
+ If ``True`` (the default), symlinks will be dereferenced,
+ and the filetype of their target will be reported.
+ If ``False``, symlinks will be reported as links,
+ and not dereferenced.
+
+ The possible values that may be returned:
+
+ ============ =====================================================
+ Value Meaning
+ ------------ -----------------------------------------------------
+ link [Only if ``followlinks=False``] Path is a symlink (whether broken or not).
+
+ missing Path doesn't exist. (Or ``followlinks=True`` and path is a broken symlink).
+
+ file Path is a regular file.
+
+ dir Path is a directory.
+
+ char [POSIX only] Path is a character device.
+
+ block [POSIX only] Path is block device.
+
+ fifo [POSIX only] Path is a FIFO device (aka named pipe).
+
+ socket [POSIX only] Path is a socket file.
+
+ unknown Can't recognize type of file this path points to.
+ This should generally never happen, probably means
+ BPS lacks tests for some type of file on your OS.
+ ============ =====================================================
+ """
+ if not followlinks and self.islink:
+ return "link"
+ #NOTE: for efficiency, this bypassing os.path.isxxx() tests and reads st_mode directly
+ try:
+ mode = os.stat(self).st_mode
+ except OSError, err:
+ if err.errno == 2: #no such file/dir
+ return "missing"
+ raise
+ if stat.S_ISDIR(mode):
+ return "dir"
+ elif stat.S_ISREG(mode):
+ return "file"
+ elif stat.S_ISCHR(mode): #posix character device (usually found in /dev)
+ return "char"
+ elif stat.S_ISBLK(mode): #posix block device (usually found in /dev)
+ return "block"
+ elif stat.S_ISFIFO(mode): #posix fifo device (named pipe)
+ return "fifo"
+ elif stat.S_ISSOCK(mode): #unix socket
+ return "socket"
+ else:
+ #FIXME: this is probably a sign that the code missed testing for something,
+ # any occurrences should be remedied...
+ log.critical("unknown filetype encountered: path=%r st_mode=%r", self, mode)
+ return "unknown"
+
+ filetype = property(getfiletype, None, None,
+ """Indicates the type of resource located at path (file, dir, etc),
+ after symlinks have be dereferenced. (See :meth:`getfiletype` for possible values)""")
+
+ def getlfiletype(self):
+ return self.getfiletype(followlinks=False)
+ lfiletype = property(getlfiletype, None, None,
+ """Indicates the type of resource located at path (file, dir, etc),
+ does *not* dereference symbolic links.""")
+
+ #creation time
+ def getctime(self):
+ "returns the time when file was created"
+ return errors.adapt_os_errors(os.path.getctime, self)
+ ctime = property(getctime)
+
+ #last modify time -- not used for nt dirs
+ def getmtime(self):
+ "returns the time when file was last modified"
+ return errors.adapt_os_errors(os.path.getmtime, self)
+ def setmtime(self, time):
+ return os.utime(self,(self.atime, time))
+ mtime = property(getmtime, setmtime, None,
+ "returns time when file was last modified, can be written to override")
+
+ #last accessed time
+ def getatime(self):
+ "returns the time when file was last accessed"
+ return errors.adapt_os_errors(os.path.getatime, self)
+ def setatime(self, time): return os.utime(self,(time, self.mtime))
+ atime = property(getatime, setatime, None,
+ "returns the time when file was last accessed, can be written to override")
+
+ #get size in bytes (only for applies to files)
+ def getsize(self):
+ "returns the size of the file located at path, in bytes"
+ #FIXME: if self.filetype == "block", getsize reports 0, we have to open/seek(0,2)/tell/close to get real size
+ return errors.adapt_os_errors(os.path.getsize, self)
+ size = property(getsize)
+
+ #=========================================================
+ #filesystem manipulation
+ #=========================================================
+
+ #XXX: rename to setcwd() or something more explicit?
+ def chdir(self):
+ "set this path as the current working directory"
+ os.chdir(self)
+
+ def touch(self, value=None):
+ """update atime and mtime to current time, equivalent to unix ``touch`` command.
+
+ :param value:
+ This may be ``None``, in which case atime & mtime are set to the current time.
+ This may be a single number, in which case atime & mtime will both be set to that value.
+ This may be a pair of numbers, in which case it's interpreted as ``(atime,mtime)``.
+
+ If you want to set atime or mtime alone, just write to the respective filepath attribute,
+ using either ``None`` or a numeric value.
+ """
+ if isinstance(value, (int, float)):
+ os.utime(self, (value, value))
+ else:
+ #assume it's a None, or an (atime,mtime) pair
+ os.utime(self, value)
+
+ #XXX: naming this 'truncate' would fit unix better, but 'clear' fits python better
+ def clear(self):
+ """empties directories, truncates files, raises error if missing or not file/dir.
+
+ Unlike :meth:`remove`, this will leave directory / file which it was called on,
+ it will only delete the contents.
+
+ .. warning::
+ This will recursively delete all of a directory's contents, no questions asked.
+ So be warned, it will do what you told it to do.
+ """
+ if self.isdir:
+ #purge everything from directory, but leave dir itself
+ for root, dirs, files in os.walk(self, topdown=False):
+ for name in files:
+ os.remove(os.path.join(root, name))
+ for name in dirs:
+ os.rmdir(os.path.join(root, name))
+ elif self.isfile:
+ #open file and wipe it
+ fh = self.open("wb")
+ fh.close()
+ elif self.ismissing:
+ raise errors.MissingPathError(filename=self)
+ else:
+ raise NotImplementedError, "filetype not supported: %r" % (self.filetype,)
+
+ def remove(self, recursive=True, parents=False, ensure=False):
+ """remove file or directory if it exists, raise error if missing.
+
+ if passed a directory, contents will be recursively removed first.
+
+ if path is missing, ``OSError(2,"No such file or directory")`` will be raised.
+
+ :param recursive:
+ If set to ``False``, this will stop being recursive,
+ and act just like :func:`os.remove` or :func:`os.rmdir`.
+
+ :param parents:
+ If set to ``True``, removes any intermediate directories
+ up to and including highest directory in path,
+ or highest non-empty directory, whichever comes first.
+
+ :param ensure:
+ By default, :exc:`bps.error.types.MissingPathError` will be raised
+ if the path does not exist. However, if ensure is set to ``True``,
+ this function will silently return without errors.
+
+ .. warning::
+ This will recursively delete the directory and all it's contents,
+ no questions asked. So be warned, it will do what you told it to do.
+
+ Filesystem calls this can replace:
+
+ * ``os.remove()``, ``os.unlink()``, ``os.rmdir()`` can be replaced with ``path.remove(recursive=False)``.
+ * ``shutil.rmtree()`` can be replaced with ``path.remove()``.
+ * ``os.removedirs()`` can be replaced with ``path.remove(recursive=False, parents=True)``.
+
+ """
+ #TODO: could support onerror="raise", "ignore", callable() around each remove all
+ try:
+ if ensure and not self.lexists:
+ if parents:
+ self._remove_parents()
+ return False
+ if self.islink or not self.isdir:
+ os.remove(self)
+ else:
+ if recursive:
+ for root, dirs, files in os.walk(self, topdown=False):
+ for name in files:
+ os.remove(os.path.join(root, name))
+ for name in dirs:
+ os.rmdir(os.path.join(root, name))
+ os.rmdir(self)
+ if parents:
+ self._remove_parents()
+ return True
+ except OSError, err:
+ new_err = errors.translate_os_error(err)
+ if new_err:
+ raise new_err
+ log.warning("unmanaged os error: %r", err)
+ raise
+
+ def _remove_parents(self):
+ "helper to remove all non-empty components of parent path"
+ path = self.dir
+ while path:
+ if path.exists:
+ try:
+ os.rmdir(path)
+ except OSError, err:
+ if err.errno == _errno.ENOTEMPTY:
+ break
+ raise
+ path = path.dir
+
+ def discard(self, recursive=True, parents=False):
+ "call :meth:`remove` and returns True if path exists, does nothing if path is missing"
+ return self.remove(recursive=recursive, parents=parents, ensure=True)
+
+ #=========================================================
+ #symlink manipulation
+ #=========================================================
+ def getlexists(self):
+ "return ``True`` if path exists, *after* resolving any symlinks"
+ return os.path.lexists(self)
+ lexists = property(getlexists)
+
+ def mklink(self, target, force=False, relative=None):
+ """create a symbolic link at *self*, which points to *target*.
+
+ ``os.symlink(src,dst)`` can be replaced by ``dst.mklink(src)``
+ """
+ #TODO: could add a "hard=True" option for making a hard link
+ if force and self.lexists:
+ self.remove()
+ if relative:
+ target = filepath(target).getrelpath(self.parentpath)
+ return os.symlink(target, self)
+
+ def getltarget(self):
+ if self.islink:
+ return os.readlink(self)
+ def setltarget(self, target):
+ if self.islink: #this lets ltarget behave like a writable attr
+ self.remove()
+ self.mklink(target)
+ ltarget = property(getltarget, setltarget, None,
+ """Returns target if path is a symlink, else ``None``.
+
+ If written to, creates symlink at path, pointing to target.
+ """)
+
+ #=========================================================
+ #directory manipulation
+ #=========================================================
+ def listdir(self, full=False, hidden=True):
+ """return listing of directory.
+
+ By default, this returns a list containing
+ the names of the individual files in the directory,
+ *without* the directory path prepended.
+
+ :param full: if ``True``, the directory path will be prepended.
+ :param hidden: if ``False``, any hidden files will be filtered from the list.
+
+ For large directories, consider using :meth:`iterdir` instead.
+
+ Example usage::
+
+ >>> filepath("/home").listdir()
+ [ 'joe', 'sue', 'ftp', 'elic' ]
+ >>> filepath("/home").listdir(full=True)
+ [ '/home/joe', '/home/sue', '/home/ftp', '/home/elic' ]
+
+ .. note::
+ To get a sorted list, use ``sorted(path.iterdir())``.
+
+ .. seealso::
+ :meth:`iterdir`
+ :attr:`dircount`
+ """
+ if full or not hidden:
+ #use the complicated loop for all other choices...
+ return [
+ filepath(os.path.join(self, child)) if full else filepath(child)
+ for child in os.listdir(self)
+ if hidden or not child.startswith(".")
+ ]
+ else:
+ #names only, with hidden files...
+ return [
+ filepath(child)
+ for child in os.listdir(self)
+ ]
+
+ def iterdir(self, full=False, hidden=True):
+ """returns an iterator of the directory contents.
+
+ This function is just like listdir(), but returns an iterator
+ instead of a list.
+
+ This is mainly offerred for large directories, where
+ building a list would be prohibitively expensive.
+
+ Otherwise, it's interface should be exactly the same as :meth:`listdir`.
+
+ Example usage::
+
+ >>> for x in filepath("/home").iterdir()
+ >>> print x
+ joe
+ sue
+ ftp
+ elic
+
+ .. note::
+
+ Currently python does not natively offer iterative directory access.
+ To get around that, BPS will try the following options:
+
+ * for posix: libc's opendir, via ctypes
+ * for windows: win32all's win32file module
+ * fallback to os.listdir()
+
+ If you plan to iterate over very large directories,
+ os.listdir may have a serious performance hit,
+ you may want to verify one of the more efficient options
+ is being used, by checking the ``bps3.fs.iterdir_version`` attribute.
+ """
+ if full or not hidden:
+ #use the complicated loop for all other choices...
+ return (
+ filepath(os.path.join(self, child)) if full else filepath(child)
+ for child in iterdir(self)
+ if hidden or not child.startswith(".")
+ )
+ else:
+ #names only, with hidden files...
+ return (
+ filepath(child)
+ for child in iterdir(self)
+ )
+
+ def mkdir(self, mode=None, parents=False, ensure=False, force=False):
+ if (force or ensure) and self.isdir:
+ #TODO: set mode iff it's not None
+ return False
+ if self.lexists:
+ if force:
+ self.remove()
+ else:
+ raise errors.PathExistsError(strerror="Make Dir: target path already exists (found %s)" % self.lfiletype, filename=self)
+ if mode is None:
+ mode = 0777
+ #TODO: add support for symbolic modes (see bps3.host.utils)
+ if parents:
+ os.makedirs(self, mode)
+ else:
+ os.mkdir(self, mode)
+ return True
+
+ #XXX: not sure about deprecating these two,
+ # they're so commonly used in their own right
+## @deprecated_method("self.mkdir(parents=True)")
+ def makedirs(self, mode=0777):
+ self.mkdir(mode=mode, parents=True)
+
+## @deprecated_method("self.mkdir(parents=True, ensure=True)")
+ def ensuredirs(self):
+ "ensure full directory path is present, creating it if needed"
+ return self.mkdir(parents=True, ensure=True)
+
+ def getdircount(self):
+ "return number of entries in directory"
+ if self.ismissing:
+ raise errors.MissingPathError(filename=self)
+ elif not self.isdir:
+ raise errors.ExpectedDirError(filename=self)
+ return countdir(self) #this is part of iterdir backend system
+ dircount = property(getdircount, None, None, """
+ Returns the number of files in the immediate directory.
+
+ This is the equivalent of ``len(self.listdir())``,
+ except performed much more efficiently where possible.
+ """)
+
+ def walk(self, relroot=False, topdown=True, followlinks=False, onerror=None): ##, dirfilter=None, filefilter=None, pathfilter=None):
+ "wrapper for os.walk"
+ #TODO: document relroot
+ if not self.isdir:
+ raise errors.ExpectedDirError("can't walk over a %s" % self.filetype, filename=self)
+ if relroot:
+ root_prefix = self._path
+ if not root_prefix.endswith(_SEP):
+ root_prefix += _SEP
+ root_prefix_len = len(root_prefix)
+ if sys.version_info < (2, 6):
+ if followlinks:
+ raise NotImplementedError, "os.walk for python < 2.6 doesn't support followlinks, and BPS doesn't have a custom implementation (yet)"
+ walker = os.walk(self, topdown=topdown, onerror=onerror)
+ else:
+ walker = os.walk(self, topdown=topdown, followlinks=followlinks, onerror=onerror)
+ for root, dirs, files in walker:
+ if relroot:
+ if root == self._path:
+ root = _CURDIR
+ else:
+ assert root.startswith(root_prefix)
+ root = root[root_prefix_len:]
+ root = filepath(root)
+ yield root, dirs, files
+
+## def itertree(self, followlinks=False, dirfilter=None, filefilter=None, pathfilter=None):
+## """walk over tree rooted at this path, yielding every entry in top-down manner.
+##
+## .. todo::
+##
+## * document this function's behavior, usage, comparison to os.walk
+## * enhance filter options to accept globs, regexps?
+## * a non-iterative version?
+## * bottom-up mode?
+## * onerror support?
+## """
+## #XXX: would like to expose this publically, but is design sound?
+## #compared to os.walk approach...
+## # pro: can handle large dirs iteratively, better for scanning
+## # pro: can be aborted faster
+## # con: doesn't present useful dir/file lists to user, or root directory
+## # fix: could have "relative" mode where child paths are returned relative to base.
+## if not self.isdir:
+## raise errors.ExpectedDirError("must walk over a directory, not a %s" % self.filetype, filename=self)
+#### if followlinks or dirfilter or filefilter or pathfilter:
+## stack = deque([self])
+## while stack:
+## path = stack.popleft()
+## for child in path.iterdir(full=True):
+## if filefilter and not filefilter(child.name):
+## continue
+## if pathfilter and not pathfilter(child):
+## continue
+## yield child
+## if child.isdir and (followlinks or not child.islink):
+## if dirfilter and not dirfilter(child):
+## continue
+## stack.append(child)
+#### else:
+#### #accel the common case of no filters and not followlinks
+#### stack = deque([self])
+#### while stack:
+#### path = stack.popleft()
+#### for child in path.iterdir(full=True):
+#### yield child
+#### if child.isdir and not child.islink:
+#### stack.append(child)
+
+ #=========================================================
+ #reading/writing files
+ #=========================================================
+ #2009-02-24: 'form' option was removed from open(), it was probably NEVER used
+
+ def open(self, mode="rb", encoding=None):
+ "wrapper for builtin file() method, additionally supports optional encoding"
+ #XXX: should add 'errors' kwd to pass to codecs.open
+ #XXX: should add 'buffering' kwd to pass to both open calls.
+ if encoding:
+ return codecs.open(self._path, mode, encoding)
+ else:
+ return file(self._path, mode)
+
+## def mmap(self, mode="rb"):
+## #NOTE: we could allow rwb / wb+ as well
+## if mode not in ["rb", "wb"]:
+## raise ValueError, "mode not supported: %r" % (mode,)
+## fh = self.open(mode)
+## try:
+## if mode == 'rb':
+## access = mmap.ACCESS_READ
+## elif mode == 'wb':
+## access = mmap.ACCESS_READ | mmap.ACCESS_WRITE
+## return mmap.mmap(fh.fileno(), 0, access=access)
+## finally:
+## fh.close()
+
+ def get(self, default=Undef, encoding=None, text=False):
+ """Helper to quickly get contents of a file as a single string.
+
+ :param default:
+ optional default value if file does not exist.
+ if not set, missing files will cause an IO Error.
+
+ :param text:
+ Set to ``True`` to indicate file should be decoded as text,
+ using universal newline support. Otherwise, binary mode is used.
+
+ :param encoding:
+ Optionally decode contents using specified codec.
+ Automatically enables ``text=True``.
+ """
+ #XXX: what encoding should be used if text=True and encoding=None?
+# If path is a directory, a ``\n`` separated list of the directory
+# contents will be returned.
+## if self.isdir:
+## return "\n".join(self.listdir())
+
+ #TODO: rewrite to use open's encoding kwd (make sure newline works right)
+ if encoding:
+ text = True
+ if text:
+ fh = self.open("rU")
+ else:
+ fh = self.open("rb")
+ try:
+ try:
+ content = fh.read()
+ except IOError:
+ if default is Undef:
+ raise
+ else:
+ log.warning("supressed error while reading file: %r", self, exc_info=True)
+ return default
+ finally:
+ fh.close()
+ if encoding:
+ content = content.decode(encoding)
+ return content
+
+ def getmd5(self):
+ "return md5 hex digest of file"
+ return hashlib.md5(self.get()).hexdigest()
+ md5 = property(getmd5)
+
+## def getsha1(self):
+## "return sha1 hex digest of file"
+## return hashlib.sha1(self.get()).hexdigest()
+## sha1 = property(getsha1)
+
+## def getsha256(self):
+## "return sha256 hex digest of file"
+## return hashlib.sha256(self.get()).hexdigest()
+## sha256 = property(getsha256)
+
+ def set(self, value, encoding=None):
+ """quickly set the contents of a file from a string.
+
+ :param value: string to write to file
+ :param encoding: optional encoding to pass string through
+ """
+ #FIXME: if value is None, do we write empty file, remove it, or error?
+ if value is None:
+ value = ''
+ #TODO: rewrite to use open's encoding kwd
+ if encoding:
+ value = value.encode(encoding)
+ fh = self.open("w", encoding=encoding)
+ else:
+ fh = self.open("wb")
+ try:
+ return fh.write(value)
+ finally:
+ fh.close()
+
+ #NOTE: this is experimental, may remove it in future
+ def getlinecount(self, newline=None):
+ "return number of lines in file"
+ if self.isdir:
+ warn("you should use .dircount for directories, using .linecount for directories is deprecated!", DeprecationWarning, stacklevel=2)
+ return self.dircount
+ if newline is None:
+ #count all possible newlines
+ #XXX: is there a more efficient way to do this?
+ try:
+ fh = self.open("rU")
+ except IOError, err:
+ if err.errno == _errno.ENOENT:
+ raise errors.MissingPathError(_errno.ENOENT, filename=self)
+ raise
+ try:
+ count = 0
+ for row in fh:
+ count += 1
+ return count
+ finally:
+ fh.close()
+ else:
+ #count just the newline type specified
+ assert newline in ("\n", "\r", "\r\n")
+ #XXX: is there a more efficient way to do this?
+ return self.get().count(newline)
+
+ linecount = property(getlinecount, None, None, """
+ Returns the number of lines in the file.
+
+ This function the file was to be opened in text mode,
+ with universal newlines enabled.
+ """)
+
+ #=========================================================
+ #shell utils
+ #=========================================================
+
+ def _norm_cm_target(self, prefix, target, mode, force):
+ "helper used by copy_to / move_to"
+
+ #validate mode
+ if mode not in ("exact", "child", "smart"):
+ raise ValueError, "unknown %s_to mode: %r" % (prefix.lower(), mode,)
+
+ #check source is present (include broken links)
+ if not self.lexists:
+ raise errors.MissingPathError(strerror=prefix + " File: Source path not found", filename=self)
+ target = filepath(target)
+
+ #detect if we're copying/moving source INTO target dir
+ if mode == "child" or mode == "smart":
+ if target.ismissing:
+ if mode == "child":
+ raise errors.MissingPathError(strerror=prefix + " File: Target directory not found", filename=target)
+ else:
+ assert mode == "smart"
+ mode = "exact"
+ elif target.isdir:
+ target = target / self.name
+ mode = "exact"
+ elif mode == "smart":
+ mode = "exact"
+ else:
+ raise errors.ExpectedDirError(strerror=prefix + " File: Target path is not a directory (found %s)" % target.filetype, filename=target)
+
+ #we should now be in "exact" mode, check that target is missing, but parent dir exists
+ assert mode == "exact"
+ if target.lexists and not force:
+ raise errors.PathExistsError(strerror=prefix + " File: Target path already exists (found %s)" % target.lfiletype, filename=target)
+ pt = target.parentpath.filetype
+ if pt == "missing":
+ #XXX: could have flag to create parents if needed
+ raise errors.MissingPathError(strerror=prefix + " File: Target path's parent directory not found", filename=target)
+ elif pt != "dir":
+ raise errors.ExpectedDirError(strerror=prefix + " File: Target path's parent not a directory (found %s)" % pt, filename=target)
+
+ return target
+
+ def _copy_helper(self, target, preserve, followlinks, root=None, force=False):
+ "helper for copy_to / move_to which copies self -> target directly"
+## log.debug("copy helper: %r => %r", self, target)
+ if self.islink:
+ if not followlinks:
+ target.mklink(self.ltarget, force=force)
+ return
+ elif self.ismissing:
+ log.warning("not copying broken link: %r => %r", self, target)
+ return #we were probably called recursively, just skip this broken link
+ if self.isdir:
+ target.mkdir(ensure=True, force=force)
+ for name in self.iterdir():
+ if root and target.samepath(root): #in case we're copying into source directory
+ continue
+ (self/name)._copy_helper(target/name, preserve, followlinks, root, force)
+ elif force:
+ try:
+ shutil.copyfile(self, target)
+ except IOError, err:
+ log.debug("got IOError copying file, removing target first (self=%r target=%r)", self, target, exc_info=True)
+ target.remove()
+ shutil.copyfile(self, target)
+ else:
+ if target.lexists: #shouldn't really get here if force=False
+ raise errors.PathExistsError(strerror="Target path already exists (a %s)" % target.lfiletype, filename=target)
+ shutil.copyfile(self, target)
+ if preserve:
+ if preserve == "mode":
+ shutil.copymode(self, target)
+ else:
+ assert preserve == "all"
+ shutil.copystat(self, target)
+
+ #TODO: update=True kwd to cause cp --update mode,
+ # copies when dest is missing or older mtime
+ def copy_to(self, target, mode="exact", preserve=None, followlinks=False, force=False):
+ """Copy file/dir to a different location.
+
+ This function wraps the :mod:`shutil` copy methods (copyfile, copy, etc),
+ and attempts to unify their behavior under one function.
+
+ :arg self: source path to copy from
+
+ :arg target: target path we're copying to (affected by copy mode, above)
+
+ :param mode:
+ The mode controls how *self* and *target* are interpreted when performing the copy operation.
+ The following values are accepted:
+
+ =============== ====================================================
+ mode description
+ --------------- ----------------------------------------------------
+ ``"exact"`` By default, a copy of *self* will be made
+ located exactly at the path named *target*,
+ not as a child of *target* or any other heuristic
+ method. (Directories will be copied
+ recursively).
+
+ ``"child"`` In this case, target must be an (existing)
+ directory, and self will be copied to
+ ``target / self.name``.
+ (Directories will be copied recursively).
+
+ ``"smart"`` This enables a heuristic algorithm which attempts to
+ "do the right thing" based on whether self and
+ target exist, and what type of path they point to.
+ The exact behavior of this mode is detailed in the
+ next table.
+ =============== ====================================================
+
+ Under smart mode, the decision about what should be copied will be
+ made according to the filetype of *self* and *target*,
+ as listed in the following table:
+
+ ============= =============== ========================================
+ self.filetype target.filetype resulting action
+ ------------- --------------- ----------------------------------------
+ dir missing target is created as dir,
+ and contents are copied from self
+ into target.
+
+ dir file :exc:`bps.error.types.PathExistsError`
+ raised.
+
+ dir dir contents of self are copied into target.
+
+ file missing target is created as file inside
+ the directory ``target.parentpath``.
+
+ file file :exc:`bps.error.types.PathExistsError`
+ raised.
+
+ file dir file named ``target/self.name``
+ is created.
+ ============= =============== ========================================
+
+ :param preserve:
+ * If set to ``None`` (the default), no metadata is preserved.
+ * If set to ``"mode"``, only the permission mode will be preserved.
+ * If set to ``True``, the file timestamps, ownership, and mode
+ will be preserved.
+ * If set to ``"all"``, all possible metadata will be preserved.
+ This is currently the same as ``True``, but may include
+ other metadata in the future. (the goal is to eventually
+ match the unix cp command).
+
+ :type preserve: bool or str
+
+ :param followlinks:
+ Boolean flag controlling whether symlinks should be deferenced.
+ If ``False`` (the default), symlinks will be copied directly.
+ If ``True``, symlinks will be dereferenced, and their contents copied.
+
+ :param force:
+ If set to ``True``, and the target exists, it will be removed if it can't be opened.
+ This operation is not atomic (except when the target is a file).
+ By default (``False``), a :exc:`bps.error.types.PathExistsError` will usually be raised
+ if the target already exists.
+ """
+ #TODO: support more of the unix cp command's options, such as the backup behaviors.
+ #TODO: support more of cp command's "preserve" options.
+ if preserve is True: #equiv to cp -p ... mode,ownership,timestamps
+ preserve = "all" #whereas 'all' may one day encompass some other things
+ ##elif preserve is False or preserve == '':
+ ## preserve = None
+ if preserve not in (None, "all", "mode"):
+ raise ValueError, "unknown preserve value: %r" % (preserve,)
+ target = filepath(target)
+
+ #check copy semantics
+ if self.samepath(target):
+ raise ValueError, "Copy File: cannot copy path %r to self %r" % (self, target)
+ elif target.contained_in_path(self):
+ root = target
+ else:
+ root = None
+
+ #normalize target & validate inputs based on options
+ target = self._norm_cm_target("Copy", target, mode, force)
+
+ #check copy semantics again (in case mode caused target to change)
+ if self.samepath(target):
+ raise ValueError, "Copy File: cannot copy path %r to self %r" % (self, target)
+
+ target.parentpath.mkdir(parents=True, ensure=True)
+ self._copy_helper(target, preserve=preserve, followlinks=followlinks, root=root, force=force)
+
+ def move_to(self, target, mode="exact", force=False):
+ """Move file/dir to a different location.
+
+ This function wraps the :mod:`shutil` move method,
+ and attempts to provide an interface similar to :meth:`copy_to`.
+
+ :param self: source path to move from
+
+ :param target: target path we're moving to (affected by move mode, above)
+
+ :type mode: str
+ :param mode:
+ The mode controls how *self* and *target* are interpreted when
+ performing the move operation.
+ The following values are accepted:
+
+ =============== ====================================================
+ mode description
+ --------------- ----------------------------------------------------
+ ``"exact"`` By default, *self* will be moved to exactly
+ the path named *target*, not as a child of *target*
+ or any other heuristic method.
+
+ ``"child"`` In this case, target must be an (existing)
+ directory, and self will be moved to
+ ``target / self.name``.
+
+ ``"smart"`` This enables a heuristic algorithm which attempts to
+ "do the right thing" based on whether self and
+ target exist, and what type of path they point to.
+ The exact behavior of this mode is detailed in the
+ next table.
+ =============== ====================================================
+
+ Under smart mode, the decision about where self should be moved will
+ be made according to the path type of *self* and *target*,
+ as listed in the following table:
+
+ ============= =============== ========================================
+ self.filetype target.filetype resulting action
+ ------------- --------------- ----------------------------------------
+ dir missing self is moved to a path exactly matching
+ the target.
+
+ dir file :exc:`bps.error.types.PathExistsError`
+ is raised (see *force*)
+
+ dir dir dir named ``target/self.name`` is
+ created.
+
+ file missing self is moved to a path exactly matching
+ the target.
+
+ file file :exc:`bps.error.types.PathExistsError`
+ is raised (see *force*)
+
+ file dir file named ``target/self.name``
+ is created.
+ ============= =============== ========================================
+
+ :param force:
+ If set to ``True``, and the target exists, it will be removed first.
+ This operation is not atomic (except under unix, when the target is a file).
+ By default (``False``), a :exc:`bps.error.types.PathExistsError` will usually be raised
+ if the target already exists.
+
+ Filesystem calls this can replace:
+
+ * ``os.rename(src,dst)`` can be approximated with ``src.move_to(dst)``,
+ except that this version is willing to move across filesystems,
+ and doesn't have varying semantics across OSes.
+
+ * ``shutil.move(src,dst)`` is equivalent to ``src.move_to(dst, mode="smart")``.
+ """
+ #TODO: what about group file moves?
+
+ #catch this early
+ if self.samepath(target):
+ raise ValueError, "Move File: cannot move directory %r to itself %r" % (self, target)
+
+ #normalize target & validate inputs based on options
+ target = self._norm_cm_target("Move", target, mode, force)
+
+ #check directory movement semantics
+ if self.isdir and target.contained_in_path(self):
+ raise ValueError, "Move File: cannot move directory %r into itself %r" % (self, target)
+
+ #try using os.rename
+ if target.lexists and not (target.isfile and os.name == "posix"):
+ #unix rename allows target to be a file,
+ #and takes care of clobbering it for us.
+ target.remove()
+ try:
+ os.rename(self, target)
+ return
+ except OSError:
+ #probably an error renaming across filesystems, but we could check
+ log.debug("move_to(): os.rename returned error, using fallback (self=%r target=%r)", self, target, exc_info=True)
+ pass
+
+ #else fall back to software implementation using shutil
+ target.discard()
+ self._copy_helper(target, preserve="all", followlinks=False)
+ self.remove()
+
+ #=========================================================
+ #mode / ownership
+ #=========================================================
+ def getmode(self):
+ "get permission mode for file, as integer bitmask"
+ #NOTE: we & PERM_BITMASK to strip off the filetype part of 'mode'
+ st = errors.adapt_os_errors(os.stat, self)
+ return st.st_mode & PERM_BITMASK
+
+ def getmodestr(self):
+ "get permission mode for file, rendered to symbolic string"
+ return repr_mode_mask(self.getmode())
+
+ def setmode(self, value):
+ "set permission mode int for file/dir"
+ mode = parse_mode_mask(value)
+ errors.adapt_os_errors(self._apply_mode_mask, mode)
+
+ mode = property(getmode, setmode)
+ modestr = property(getmodestr, setmode)
+
+ def _apply_mode_mask(self, mode):
+ "helper for chmod function, and setmode method"
+ bits, preserve = mode
+ if preserve:
+ if preserve == PERM_BITMASK:
+ return
+ bits |= os.stat(self).st_mode & preserve
+ os.chmod(self, bits)
+
+ #=========================================================
+ #deprecated methods, scheduled for removal 2010-04-01
+ #=========================================================
+ x = "2010-04-01"
+
+ @deprecated_method("self.walk()", removal=x)
+ def walktree(self, *a, **k):
+ return self.walk(*a, **k)
+
+ @deprecated_method("getabspath", removal=x)
+ def geteffpath(self):
+ "Return absolute & normalized path, but with symlinks unresolved"
+ #NOTE: this used to ensure even absolute paths were normalized,
+ # but now .abspath takes care of that.
+ if self.isabs:
+ return self.normpath
+ else:
+ #changes relative to get_cwd()
+ return filepath(os.path.normpath(os.path.abspath(self)))
+ effpath = deprecated_property(geteffpath, new_name="abspath", removal=x)
+
+ @deprecated_method("path.getfiletype(dereference=False)", removal=x)
+ def getftype(self, follow=False):
+ return self.getfiletype(symlinks=not follow)
+ ftype = property(getftype)
+
+ #renamed to discard(), to mimic python's set type
+ @deprecated_method("path.discard()", removal=x)
+ def remove_if_exists(self):
+ return self.discard()
+
+ @deprecated_method("path.remove()", removal=x)
+ def rmfile(self, *args, **kwds): return os.remove(self, *args, **kwds)
+
+ @deprecated_method("path.remove(recursive=False)", removal=x)
+ def rmdir(self): return os.rmdir(self)
+
+ @deprecated_method("path.remove(recursive=False, parents=True)", removal=x)
+ def removedirs(self):
+## return os.removedirs(self)
+ return self.remove(recursive=False, parents=True)
+
+ @deprecated_method("path.move_to(target, mode='smart')", removal=x)
+ def move(self, dst):
+ return self.move_to(dst, mode="smart")
+
+ #NOTE: might not want to ever remove this, just so users don't get suprised by no rename()
+ @deprecated_method("move_to(target)", removal=x)
+ def rename(self, dst):
+ return self.move_to(dst)
+
+ symlink = mklink #deprecate this name?
+
+ del x
+ #=========================================================
+ #eoc
+ #=========================================================
+
+PathType = FilePath #deprecated name for class, scheduled for removal 2009-8-8
+
+#=========================================================
+#other functions
+#=========================================================
+def is_filepath(path):
+ """test if an object is a FilePath or compatible.
+
+ This is preferred over isinstance (at least until python 3.0's
+ abstract base classes) because it tests for a protocol,
+ not inheritance.
+
+ Example usage::
+
+ >>> from bps.fs import filepath, is_filepath
+ >>> path = filepath("/home")
+ >>> is_filepath(path)
+ True
+ >>> is_filepath("/home")
+ False
+
+ .. todo::
+
+ This doesn't test for the entire interface,
+ just a couple of attributes that are likely
+ indicators.
+ """
+ return hasattr(path, "ext") and hasattr(path, "isabs")
+
+isfilepath = relocated_function("isfilepath", is_filepath) #XXX: which of these should we deprecate?
+
+#XXX: rename to getcwd() / setcwd()?
+@deprecated_function(removal="2009-10-01")
+def curpath(path=Undef):
+ "return current directory as filepath"
+ if path is not Undef:
+ os.chdir(path)
+ return filepath(os.getcwd())
+
+def getcwd():
+ "return current directory as filepath object (wraps os.getcwd)"
+ return filepath(os.getcwd())
+
+def setcwd(path):
+ "set current direct (alias for os.chdir, for symetry)"
+ path = filepath(path)
+ if path.ismissing:
+ raise errors.MissingPathError(sterror="directory does not exist", filename=path)
+ if not path.isdir:
+ raise errors.ExpectedDirError(strerror="path is a %s, not a directory" % path.filetype, filename=path)
+ os.chdir(path)
+
+if os.path.sep == '/':
+ def posix_to_local(path):
+ ""
+ return filepath(path)
+ def local_to_posix(path):
+ return str(path)
+else:
+ def posix_to_local(path):
+ if path is None: return path
+ return path.replace('/', os.path.sep)
+ def local_to_posix(path):
+ if path is None: return path
+ return path.replace(os.path.sep, '/')
+posix_to_local.__doc__ = """Convert a relative path using posix separators (``/``) to local separators.
+
+This function is merely a quick helper to allow strings stored in configuration files
+to be stored using the posix separator, but quickly localized. It takes in a string,
+and returns a :class:`FilePath` instance.
+"""
+
+local_to_posix.__doc__ = """Convert a relative local path to one using posix separators (``/``).
+
+This function is merely a quick helper to allow local filepaths to be converted
+to use posix separator, such as when storing in a portable config file.
+It takes in a string or :class:`FilePath` instance, and returns a string.
+"""
+
+def splitsep(path):
+ """split path into it's component parts.
+
+ This acts like a repeated :func:`os.path.split` call,
+ returning a list of all elements in the path,
+ split by any separators present.
+
+ Windows Example::
+
+ >> splitsep(r"c:\Documents and Settings\Administrator\Desktop")
+ [ 'c:\', 'Documents and Settings', 'Administrator', 'Desktop' ]
+
+ .. note::
+
+ Since the general use of this is to examine the individual peices
+ of a path, and not typically to immediate use them as a relative path
+ on the filesystem, this function returns a list of strings,
+ *not* :class:`FilePath` instances.
+
+ """
+ out = []
+ while True:
+ path, tail = os.path.split(path)
+ if tail:
+ out.insert(0, tail)
+ else:
+ out.insert(0, path)
+ return out
+
+@deprecated_function("use filepath(path).get()", removal="2010-04-01")
+def getFile(path, default=Undef):
+ "get contents of file as string"
+ return filepath(path).get(default=default)
+
+@deprecated_function("use filepath(path).set(value)", removal="2010-04-01")
+def setFile(path, value):
+ "set contents of file from string"
+ return filepath(path).set(value)
+
+#=========================================================
+#iterdir
+#=========================================================
+#python doesn't have a native iterdir, so we try our best to provide one
+
+#first, the fallback and common docstring
+def iterdir(path):
+ return iter(os.listdir(path))
+def countdir(path):
+ return len(os.listdir(path))
+iterdir_version = "os.listdir"
+
+if os.name == "nt":
+ #see if we can use the pywin32 backend
+ try:
+ import win32file
+ except ImportError:
+ pass
+ else:
+ import pywintypes
+ def iterdir(path):
+ try:
+ for entry in win32file.FindFilesIterator(os.path.join(path, "*")):
+ #NOTE: entry contains lots of useful stuff... ctimes, mtimes, ???
+ name = entry[8]
+ if name not in (".", ".."):
+ yield name
+ except pywintypes.error, err:
+ if err.args and err.args[0] == 3:
+ #(3, 'FindFirstFileW', 'The system cannot find the path specified.')
+ raise errors.MissingPathError(filename=path)
+ raise
+
+ def countdir(path):
+ #xxx: is there a better way to do this?
+ c = -2
+ try:
+ for entry in win32file.FindFilesIterator(os.path.join(path, "*")):
+ c += 1
+ assert c>=0
+ return c
+ except pywintypes.error, err:
+ if err.args and err.args[0] == 3:
+ #(3, 'FindFirstFileW', 'The system cannot find the path specified.')
+ raise errors.MissingPathError(filename=path)
+ raise
+ iterdir_version = "win32file.FindFilesIterator"
+
+#NOTE: commented this out until it can get more testing...
+#XXX: could have this enable-able via a env var flag, and run a quick test of bps directory.
+# if we get the shape of 'struct dirent' wrong for a host, this won't work at all.
+##elif os.name == "posix":
+## #use ctypes to access posix's opendir suite
+##
+## #TODO: research how much dirent structure varies
+## # across platforms. so we're gonna be paranoid,
+## # and only support linux2 / 32 and 64bit,
+## # and hope that's enough
+##
+## dirent = libc = None
+## if sys.platform == "linux2":
+## from ctypes import *
+## #derived from /usr/include/bits/dirent.h
+## class dirent(Structure):
+## _fields_ = [
+## ('d_ino', c_ulong),
+## ('d_off', c_ulong),
+## ('d_reclen', c_ushort),
+## ('d_type', c_char),
+## ('d_name', c_char * 256),
+## ]
+## try:
+## libc = cdll.LoadLibrary("libc.so.6")
+## except OSError:
+## pass
+##
+## if dirent and libc:
+## dir_p = c_void_p #pointer to a dir handle
+## dirent_p = POINTER(dirent) #pointer to struct dirent
+##
+## opendir = libc.opendir
+## opendir.argtypes = [ c_char_p ]
+## opendir.restype = dir_p
+##
+## readdir = libc.readdir
+## readdir.argtypes = [ dir_p ]
+## readdir.restype = dirent_p
+##
+## closedir = libc.closedir
+## closedir.argtypes = [ dir_p ]
+## closedir.restype = c_int
+##
+## def iterdir(path):
+## dh = opendir(path)
+## if dh is None:
+## raise IOError, "couldn't open dir"
+## try:
+## while True:
+## entry = readdir(dh)
+## if not entry:
+## return
+## name = entry.contents.d_name
+## if name not in (".", ".."):
+## yield name
+## finally:
+## closedir(dh)
+## def countdir(path):
+## dh = opendir(path)
+## if dh is None:
+## raise IOError, "couldn't open dir"
+## try:
+## c = -2 #to account for "." and ".."
+## while True:
+## entry = readdir(dh)
+## if not entry:
+## assert c >= 0
+## return c
+## c += 1
+## finally:
+## closedir(dh)
+## iterdir_version = "libc.readdir"
+
+iterdir.__doc__ = """iterate over a directory.
+
+ This iterates over a directory, returning the raw strings
+ contained in the directory. See :meth:`FilePath.iterdir`
+ for a more fully-featured function.
+
+ Since python has no native iterdir, BPS tries to use
+ various alternate means to implement this function efficiently,
+ falling back on wrapping os.listdir().
+ """
+countdir.__doc__ = """return directory count as efficiently as possible"""
+
+#=========================================================
+#windows shortcut handling
+#=========================================================
+LNK_MAGIC = "\x4C\0\0\0\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x46"
+#NOTE: this code also assumes all windows shortcuts will end in .lnk
+
+def is_shortcut(path):
+ "check if file is a windows shortcut"
+ if not path.lower().endswith(".lnk"):
+ return False
+ with file(path, "rb") as fh:
+ return fh.read(20) == LNK_MAGIC
+ #XXX: could do more validatation if magic fits
+
+def read_shortcut(path):
+ "return target of windows shortcut, or None if not a shortcut"
+ if not path.lower().endswith(".lnk"):
+ return None
+ with file(path, "rb") as fh:
+ if fh.read(20) != LNK_MAGIC:
+ #wrong hlen or guid
+ return None
+ fh.seek(0)
+ data = fh.read()
+ def read_long(idx):
+ return struct.unpack("L", data[idx:idx+4])[0]
+ def read_short(idx):
+ return struct.unpack("H", data[idx:idx+2])[0]
+ flags = read_long(0x14)
+ if not (flags & 2):
+ #doesn't point to a file or directory!
+ #just going to pretend this isn't a shortcut
+ return None
+ offset = 76
+ if flags & 1:
+ #skip id list
+ offset += 2+read_short(offset)
+ #offset now marks beginning of FileLocationInfo
+ tflags = read_long(offset+0x8)
+ if tflags & 1:
+ #local path
+ bp = offset+read_long(offset+0x10)
+ end = data.index("\x00", bp)
+ root = data[bp:end]
+ elif tflags & 2:
+ #network path
+ bp = offset+0x14+read_long(offset+0x14)
+ end = data.index("\x00", bp)
+ root = data[bp:end]
+ else:
+ raise NotImplementedError, "unexpected FileLocationInfo flags: %r" % tflags
+ rp = offset+read_long(offset+0x18)
+ end = data.index('\x00', rp)
+ tail = data[rp:end]
+ if tail:
+ root += "\\" + tail
+ #NOTE: this ignored any arguments added to the shortcut
+ return root
+
+##def parse_shortcut(path):
+## "hacked code used to learn basics of shortcut file, might be useful for expanding in future"
+## path = filepath(path)
+## d = path.get()
+##
+## #header
+## clen = 4+16+4+4+8*3+4+4+4+4+4+4
+## assert clen == 76
+## out = struct.unpack("L16sLL8s8s8sLLLLLL",d[:clen])
+## d = d[clen:]
+##
+## c1, guid, flags, fattrs, t1, t2, t3, flen, icon, shownd, hotkey, c2, c3 = out
+## assert c1 == 76 and c2 == 0 and c3 == 0
+## assert guid == '\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00F'
+## opts = {}
+##
+## #flags
+## source = flags
+## flags = set()
+## if source & (1<<0):
+## flags.add("has_id_list")
+## if source & (1<<1):
+## flags.add("target_fd")
+## if source & (1<<2):
+## flags.add("has_desc")
+## if source & (1<<3):
+## flags.add("has_relpath")
+## if source & (1<<4):
+## flags.add("has_wd")
+## if source & (1<<5):
+## flags.add("has_args")
+## if source & (1<<6):
+## flags.add("has_icon")
+## if source & (1<<7):
+## flags.add("has_unicode")
+## #1<<8 means what?
+## m = (1<<8)-1
+## if source > m:
+## flags.add((source|m)^m)
+##
+## #fattrs
+## source = fattrs
+## fattrs = set()
+## if source & (1<<4):
+## fattrs.add("is_dir")
+## source -= 1<<4
+## if source:
+## fattrs.add(source)
+##
+## #t1,t2,t3 - ctime,mtime,atime
+##
+## #id list
+## if 'has_id_list' in flags:
+## clen, = struct.unpack("H", d[:2])
+## assert clen >= 2 and clen <= len(d)
+## source = d[2:2+clen]
+## print repr(source)
+## d = d[2+clen:]
+##
+## opts['id_list'] = out = []
+## while source:
+## assert len(source) >= 2
+## clen, = struct.unpack("H", source[:2])
+## if clen == 0:
+## source = source[2:]
+## break
+## assert clen >= 2 and len(source) >= clen
+## out.append(source[2:clen])
+## print repr(out[-1])
+## source = source[clen:]
+## assert not source
+## assert clen == 0
+##
+## #target info
+## if 'target_fd' in flags:
+## clen, = struct.unpack("L", d[:4])
+## assert clen >= 4 and clen <= len(d)
+## source = d[:clen]
+## d = d[clen:]
+## ##opts['target_fd'] = source
+## first_offset, tflags, o_lvm, o_bp, o_nvi, o_pth = struct.unpack("6L", source[4:4*7])
+## assert first_offset == 0x1C, "strange first offset"
+## x = tflags
+## tflags = set()
+## if x & 1:
+## tflags.add("local") #o_lvm, o_bp are valid
+## x-=1
+## if x & 2:
+## tflags.add("network") #o_nvi is valid
+## x-=2
+## if x:
+## raise ValueError, "unknown tflag: %r" % (x,)
+## if 'local' in tflags:
+#### #read the lvm
+#### clen, = struct.unpack("H", source[o_bp:o_bp+2])
+#### #NOTE: skipping 2-16, which contains some volume info
+#### #16..clen contains the actual name
+#### volume_name = source[o_bp+16:o_bp+clen]
+#### assert root_name.endswith("\x00")
+#### volumn_name = volume_name[:-1]
+##
+## #read the bp
+## end = source.index("\x00", o_bp)
+## root_path = source[o_bp:end]
+## elif 'network' in x:
+## raise NotImplementedError
+## else:
+## raise ValueError, "missing local & network"
+## end = source.index('\x00', o_pth)
+## tail_path = source[o_pth:end]
+## opts['target'] = dict(
+## source=source,
+## tflags=sorted(tflags),
+## o_lvm=o_lvm,
+## o_bp=o_bp,
+## o_nvi=o_nvi,
+## o_pth=o_pth,
+## root_path=root_path,
+## tail_path=tail_path,
+## )
+##
+## x = dict(
+## flags=sorted(flags),
+## fattrs=sorted(fattrs),
+## t1=t1, t2=t2, t3=t3,
+## flen=flen,
+## icon=icon,
+## shownd=shownd,
+## hotkey=hotkey,
+## tail=d,
+## )
+## x.update(opts)
+## return x
+
+#=========================================================
+#permission mode parsing
+#=========================================================
+
+#constants
+PERM_SCOPES = "ugoa"
+PERM_FLAGS = "rwxst"
+PERM_OPS = "+-="
+
+#NOTE: this _should_ be all the bits specified by stat module.
+PERM_BITMASK = 07777
+
+#mapping of scope name -> bits involved in that scope
+PERM_SCOPE_BITS = dict(
+ u=stat.S_IRWXU | stat.S_ISUID,
+ g=stat.S_IRWXG | stat.S_ISGID,
+ o=stat.S_IRWXO | stat.S_ISVTX,
+ a=PERM_BITMASK,
+
+ o_rwx=stat.S_IRWXO,
+ implicit=PERM_BITMASK ^ (stat.S_IWGRP|stat.S_IWOTH), #special "implict" scope, same as "a" but w/o write for g & o
+ )
+#mapping of flag name -> bits involved with the flag
+PERM_FLAG_BITS = dict(
+ r=stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH,
+ w=stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH,
+ x=stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,
+ s=stat.S_ISUID | stat.S_ISGID,
+ t=stat.S_ISVTX,
+ )
+
+def is_mode_mask(value, valid=True):
+ "check if object looks like mode,mask tuple, optionally validating range"
+ if not (
+ isinstance(value, (list, tuple)) and len(value) == 2 and
+ isinstance(value[0], int) and
+ isinstance(value[1], int)
+ ):
+ return False
+ return (not valid) or (
+ (0 <= value[0] <= PERM_BITMASK) and
+ (0 <= value[1] <= PERM_BITMASK) and
+ (value[0] & value[1] == 0) #'bits' that are set shouldn't be allowed in 'mask'
+ )
+
+def is_mode_int(value, valid=True):
+ "check if object looks like mode integer, optionally validating range"
+ return isinstance(value, (int, long)) and ((not valid) or (0 <= value <= PERM_BITMASK))
+
+#XXX: cache this for speed?
+def parse_mode_mask(value):
+ """parse unix-chmod-style symbolic mode string, returning ``(mode_bits,preserve_mask)``.
+
+ :arg value:
+ The mode mask to parse. Can be any of the following:
+
+ * string containing octal mode, left-padding with zeros till it's four
+ chars long (ex: ``"77"``, ``"0644"``), will be parsed into bits with
+ mask of 0.
+ * string containing symbolic mode operations to perform (ex: ``"u+r,g+w,-x"``),
+ which will be parsed into appropriate bits and mask.
+
+ * integer mode, returned as ``(mode, 0)``.
+ * mode and mask tuple, will be returned unchanged.
+
+ :raises ValueError:
+ If any of the input string are improperly formatted,
+ or input integers are out of range.
+
+ :returns:
+ This parses the various mode symbolic strings recognized by unix chmod,
+ and returns a tuple ``(mode_bits, preserve_mask)``, where mode_bits is
+ the mode bits that should be set in the new mode, and preserve_mask
+ is the bits which should be kept from the path's current mode (if this is 0,
+ the previous mode can be ignored).
+
+ Symbolic mode format
+ --------------------
+ The symbolic mode format is a string describing a set of operations
+ for setting and clearing a file's mode bits. For example:
+
+ * ``"u+r,g-x,o="`` would add 'read' permission for the owning user,
+ remove executable permission for the owning group, and remove
+ all permissions for anyone else.
+
+ The syntax of the symbolic mode format is as follows:
+
+ .. productionlist::
+ mode: `group` [ "," `group` ]* [ "," ]
+ group: `scope`* `operator` `flag`*
+ scope: "u" | "g" | "o" | "a"
+ operator: "+" | "-" | "="
+ flag: "r" | "w" | "x" | "s" | "t"
+
+ .. note::
+ The format this function accepts attempts to be compatible with the
+ unix chmod command's format, with the exception that (for simplicity)
+ this does _not_ support chmod's "g=u" style mode strings.
+ """
+ #get the easy inputs out of the way
+ if value is None: #simulate "preserve all"
+ return (0, PERM_BITMASK)
+ if isinstance(value, int):
+ if not is_mode_int(value):
+ raise ValueError("invalid mode integer: %r" % value)
+ return (value, 0)
+ elif is_mode_mask(value, valid=False):
+ if not is_mode_mask(value):
+ raise ValueError("invalid mode,mask tuple: %r" % (value,))
+ return value
+ elif not isinstance(value, str):
+ raise TypeError, "unexpected type for mode mask: %r" % (value,)
+ #check for octal mode
+ if value.isdigit():
+ try:
+ bits = int(value, 8)
+ except ValueError:
+ pass
+ else:
+ if 0 <= bits <= PERM_BITMASK:
+ return bits
+ raise ValueError("invalid mode: %r" % value)
+ #time for state machine to parse symbolic mode
+ SEPS = ","
+ state = 0 #current state of state machine
+ # 0 - not in group, expecting start of group (scope,op) or whitespace
+ # 1 - saw scope, waiting for op
+ # 2 - saw operator (+,-,=), waiting for flags, op, or end of group
+ scope = 0 #bits enabled for group by state=1
+ op = None #operator (+,-,=) used by state=2
+ bits = 0 #bits we're setting to 1
+ used = 0 #bits we're flipping one way or the other.
+ for c in value:
+ if state == 0:
+ #expecting scope or operator
+ if c in SEPS:
+ continue
+ if c in PERM_SCOPES:
+ state = 1
+ scope = PERM_SCOPE_BITS[c]
+ continue
+ if c in PERM_OPS:
+ state = 2
+ op = c
+ scope = PERM_SCOPE_BITS['implicit']
+ if op == "=": #clear bits for = op
+ bits = (bits|scope) ^ scope
+ used |= scope
+ continue
+ raise ValueError, "invalid mode string: %r" % (value,)
+ elif state == 1:
+ #expecting more scope or operator
+ if c in PERM_SCOPES:
+ scope |= PERM_SCOPE_BITS[c]
+ continue
+ if c in PERM_OPS:
+ state = 2
+ op = c
+ if op == "=": #clear bits for = op
+ bits = (bits|scope) ^ scope
+ used |= scope
+ continue
+ raise ValueError, "invalid mode string: %r" % (value,)
+ else:
+ assert state == 2
+ #expecting end-of-group, new op, or flag
+ if c in SEPS:
+ state = 0
+ continue
+ if c in PERM_OPS:
+ op = c
+ if op == "=": #clear bits for = op
+ bits = (bits|scope) ^ scope
+ used |= scope
+ continue
+ if c in PERM_FLAGS:
+ v = PERM_FLAG_BITS[c] & scope
+ bits |= v
+ used |= v
+ if op == "-":
+ bits ^= v
+ continue
+ raise ValueError, "invalid mode string: %r" % (value,)
+ if state == 1:
+ raise ValueError, "invalid mode string: %r" % (value,)
+ return bits, (PERM_BITMASK ^ used)
+
+def repr_mode_mask(value, octal='never'):
+ """represent mode mask as symbolic string.
+
+ :arg value:
+ * mode integer
+ * (mode bits, preserver bits) tuple
+
+ :param octal:
+ Controls when octal format will be output instead
+ of symbolic output.
+
+ * 'never' - always use symbolic format
+ * 'always' - always use octal format, raising error if mask can't be represented
+ * 'prefer' - use octal when possible, falling back to symbolic.
+
+ :returns:
+ mode as symbolic string, such as is accepted by unix chmod
+ as well as :func:`parse_mode_mask`.
+ """
+ #parse into (bits,used)
+ if isinstance(value, int):
+ if not is_mode_int(value):
+ raise ValueError("invalid mode integer: %r" % value)
+ bits, used = value, PERM_BITMASK
+ elif is_mode_mask(value, valid=False):
+ if not is_mode_mask(value):
+ raise ValueError("invalid mode,mask tuple: %r" % (value,))
+ bits, preserved = value
+ used = PERM_BITMASK ^ preserved
+ elif isinstance(value, str):
+ #normalize any string if passed in
+ bits, preserved = parse_mode_mask(value)
+ used = PERM_BITMASK ^ preserved
+ else:
+ raise TypeError, "unexpected type for mode mask: %r" % (value,)
+ #try to render as octal
+ if octal != "never":
+ if used == PERM_BITMASK:
+ return "%04o" % bits
+ if octal == "always":
+ raise ValueError, "can't represent mask as octal string: %r" % (value,)
+ #
+ #render as symbolic string
+ #
+
+ #XXX: this could probably be done much faster, simpler,
+ # and with more compact output. but this function isn't _that_ important
+
+ #render each section
+ def render_scope(s):
+ scope_mask = PERM_SCOPE_BITS[s]
+ scope_used = scope_mask & used
+ if scope_used == 0:
+ return ""
+ scope_bits = bits & scope_used
+ if scope_bits == 0:
+ if scope_used == scope_mask:
+ return "="
+ #render minus op
+ out = '-'
+ for flag in PERM_FLAGS:
+ flag_mask = PERM_FLAG_BITS[flag]
+ if flag_mask & scope_used and not flag_mask & scope_bits:
+ out += flag
+ return out
+ elif scope_used == scope_mask:
+ #render eq op
+ out = "="
+ for flag in PERM_FLAGS:
+ flag_mask = PERM_FLAG_BITS[flag]
+ if flag_mask & scope_bits:
+ assert flag_mask & scope_used
+ out += flag
+ return out
+ elif scope_bits == scope_used:
+ #render plus op
+ out = "+"
+ for flag in PERM_FLAGS:
+ flag_mask = PERM_FLAG_BITS[flag]
+ if flag_mask & scope_bits:
+ assert flag_mask & scope_used
+ out += flag
+ return out
+ else:
+ #render plus op and minus op
+ outp = "+"
+ outm = "-"
+ for flag in PERM_FLAGS:
+ flag_mask = PERM_FLAG_BITS[flag]
+ if flag_mask & scope_used:
+ if flag_mask & scope_bits:
+ outp += flag
+ else:
+ outm += flag
+ return outp + outm
+
+ us, ut = "u", render_scope("u")
+ gs, gt = "g", render_scope("g")
+ os, ot = "o", render_scope("o_rwx")
+
+ #combine like scopes
+ if ut and gt == ut:
+ gs = us+gs
+ ut = ""
+ if ut and ot == ut:
+ os = us+os
+ ut = ""
+ elif gt and ot == gt:
+ os = gs+os
+ gt = ""
+ if os == "ugo":
+ assert not ut and not gt
+ os = "a"
+
+ #now add stick bit
+ st = ""
+ if used & stat.S_ISVTX:
+ s = bits & stat.S_ISVTX
+ if s:
+ if not ot or ot.startswith("-"):
+ st = "+t"
+ elif '-' in ot:
+ assert ot.startswith("+")
+ idx = ot.index("-")
+ assert idx > 1
+ ot = ot[:idx] + "t" + ot[idx:]
+ else:
+ assert ot.startswith("=") or ot.startswith("+")
+ ot += "t"
+ else:
+ if '-' in ot:
+ ot += "t"
+ elif ot.startswith("="):
+ pass
+ else:
+ st = "-t"
+
+ #create output string
+ if ut:
+ out = us + ut
+ else:
+ out = ""
+ if gt:
+ if out:
+ out += ","
+ out += gs + gt
+ if ot:
+ if out:
+ out += ","
+ out += os + ot
+ if st:
+ if out:
+ out += ","
+ out += st
+ return out
+
+def _is_empty_mode_mask(mask):
+ "check if mask leaves original mode unchanged"
+ return mask[1] == PERM_BITMASK
+
+def _concat_mode_mask(left, right):
+ "concatenate two mode masks together"
+ left_bits, left_preserve = left
+ right_bits, right_preserve = right
+ out_bits = (left_bits & right_preserve) | right_bits
+ out_preserve = left_preserve & right_preserve
+ return out_bits, out_preserve
+
+##def _compile_mode_func(source):
+## "given mode value provided to chmod, return function which sets mode for path"
+## if isinstance(source, dict):
+## allmode = parse_mode_mask(source.get("all"))
+## target = {}
+## if _is_empty_mode_mask(allmode):
+## for k in PERM_FILETYPES:
+## if k in source:
+## target[k] = parse_mode_mask(source[k])
+## else:
+## for k in PERM_FILETYPES:
+## if k in source:
+## target[k] = _concat_mode_mask(allmode, parse_mode_mask(source[k]))
+## def setmode(path):
+## value = target.get(path.filetype)
+## if value:
+## if value[1]:
+## os.chmod(path, value[0]|(os.stat(path).st_mode & value[1]))
+## else:
+## os.chmod(path, value[0])
+## elif callable(source):
+## def setmode(path):
+## value = source(path)
+## if value:
+## bits, mask = parse_mode_mask(value)
+## if mask:
+## os.chmod(path, bits|(os.stat(path).st_mode & mask))
+## else:
+## os.chmod(path, bits)
+## else:
+## bits, mask = parse_mode_mask(mode)
+## if mask:
+## def setmode(path):
+## os.chmod(path, bits|(os.stat(path).st_mode & mask))
+## else:
+## def setmode(path):
+## os.chmod(path, bits)
+## return setmode
+
+def chmod(targets, mode=None, dirmode=None, filemode=None, recursive=False, followlinks=False):
+ """set file permissions, using a syntax that's mostly compatible with GNU chmod.
+
+ :arg targets:
+ This may be either a single path to update the mode for,
+ or a sequence of paths. The paths may be either a string or filepath object,
+ and they may be absolute, or relative to the cwd.
+
+ :arg mode:
+ [optional]
+ The mode to apply to all targets.
+ This can be an integer, symbolic mode string,
+ or anything accepted by :func:`parse_mode_mask`.
+
+ :param dirmode:
+ [optional]
+ The mode to apply to directories only.
+ (Applied after primary *mode*).
+ This can be an integer, symbolic mode string,
+ or anything accepted by :func:`parse_mode_mask`.
+
+ :param filemode:
+ [optional]
+ The mode to apply to files only.
+ (Applied after primary *mode*).
+ This can be an integer, symbolic mode string,
+ or anything accepted by :func:`parse_mode_mask`.
+
+ :param recursive:
+ If ``True``, any targets which are directories
+ will be traversed top-down, and all the above
+ permission policies will be applied to their contents as well.
+
+ :param followlinks:
+ By default, links will not be followed when recursively
+ traversing a target. Set this to ``True`` to follow links.
+ """
+ mode = parse_mode_mask(mode)
+ dirmode = _concat_mode_mask(mode, parse_mode_mask(dirmode))
+ filemode = _concat_mode_mask(mode, parse_mode_mask(filemode))
+ if _is_empty_mode_mask(dirmode) and _is_empty_mode_mask(filemode):
+ return
+ if is_seq(targets):
+ targets = (filepath(path).abspath for path in targets)
+ else:
+ targets = [ filepath(targets).abspath ]
+ for target in targets:
+ if target.isdir:
+ target._apply_mode_mask(dirmode)
+ if recursive:
+ #NOTE: 'target' should be first root returned by walk,
+ # but double setting mode shouldn't hurt, right?
+ for root, dirnames, filenames in target.walk(followlinks=followlinks):
+ root._apply_mode_mask(dirmode)
+ for name in filenames:
+ (root/name)._apply_mode_mask(filemode)
+ else:
+ target._apply_mode_mask(filemode)
+
+def setumask(mode, format="int"):
+ """set/modify current umask.
+
+ :arg mode:
+ New mode to use as umask, or modify existing umask.
+ Can be a int, or any string accepted by :func:`parse_mode_mask`.
+ :param format:
+ Format old mask is reported in (see :func:`getumask`).
+
+ :returns:
+ the previous umask.
+
+ This is just a wrapper for :func:`os.umask`,
+ except that it accepts symbolic mode masks
+ in the format handled by :func:`parse_mode_mask`.
+ """
+ assert format in ("sym", "int")
+ bits, preserve = parse_mode_mask(mode)
+ old = os.umask(bits)
+ if preserve:
+ os.umask(bits | (old & prevserve))
+ if format == 'sym':
+ return repr_mode_mask(old)
+ else:
+ return old
+
+def getumask(format="int"):
+ """read current umask without changing it.
+
+ :param format:
+ Format that umask should be reported in.
+ "int" (the default) returns the integer mask.
+ "sym" returns the symbolic mask.
+
+ .. warning::
+ This is not currently an atomic operation!
+ """
+ assert format in ("sym", "int")
+ #XXX: _wish_ this was atomic, or that we could read umask easily
+ old = os.umask(0022)
+ os.umask(old)
+ if format == "sym":
+ return repr_mode_mask(old)
+ else:
+ return old
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/host/__init__.py b/bps/host/__init__.py
new file mode 100644
index 0000000..f0cf0e2
--- /dev/null
+++ b/bps/host/__init__.py
@@ -0,0 +1,521 @@
+"""bps.host -- Uniform access to host resources"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from logging import getLogger; log = getLogger(__name__)
+import os.path
+import subprocess
+import sys
+from warnings import warn
+#pkg
+from bps.fs import filepath, posix_to_local
+from bps.types import BaseClass
+from bps.warndep import relocated_function, deprecated_function
+#module
+from bps.host.base import UserProfile, EnvPaths, ProgPaths
+from bps.host.const import DESKTOPS, ACTIONS
+#local
+__all__ = [
+ #process management
+ 'get_pid', 'has_pid', 'terminate_pid', 'kill_pid',
+
+ #desktop interaction
+ "get_desktop_name", "desktop_open", "desktop_compose_email",
+
+ #shell interaction
+ "find_exe",
+
+ #application resources
+ ]
+
+#=========================================================
+#choose backend class, create backend singleton
+#=========================================================
+_backend = None
+def _set_backend(cls):
+ global _backend, exe_exts
+ _backend = cls.create()
+ exe_exts = _backend.exe_exts
+
+#=========================================================
+#process management
+#=========================================================
+get_pid = os.getpid #just for symetry
+
+def has_pid(pid):
+ """Check if the specified process *pid* exists: returns ``True`` if found, ``False`` it not"""
+ if pid is None:
+ raise ValueError, "no pid specified"
+ return _backend.has_pid(pid)
+ #TODO: would like to detect process status (running, waiting, zombie, etc) as return richer info on request
+
+def term_pid(pid, retry=None, kill=30, timeout=60):
+ """Send a signal to process *pid* to shut itself down and exit cleanly.
+
+ :Parameters:
+ pid
+ The id of the process to be terminated.
+
+ retry
+ If not set, only one termination signal will be sent.
+
+ If set to a positive number, a new termination signal
+ will be sent every *retry* seconds, unless the process exits first.
+
+ kill
+ If set to a positive number, :func:`kill_pid` will be used
+ to send a kill signal if the process hasn't exited within
+ *kill* seconds of when the first termination signal was sent.
+
+ If set to ``None``, :func:`kill_pid` will never be called.
+
+ timeout
+ If set to a positive number, the attempt to terminate or kill the
+ process will timeout after *timeout* seconds, and this function
+ will give up and return ``False``.
+
+ If set to None, this function will wait forever or until the process exits.
+
+ This is the preferred way to end a process,
+ as :func:`kill_pid` doesn't give the process
+ a chance to shut down cleanly.
+
+ Under ``posix``, this uses the SIGTERM signal.
+ Under windows, a more complicated system is used,
+ involving ``ExitProcess`` and ``WM_CLOSE``.
+ """
+ if pid is None:
+ raise ValueError, "no pid specified"
+ if retry is not None and retry < 0:
+ raise ValueError, "timeout must be None, or number > 0: %r" % (retry,)
+ if kill is not None and kill < 0:
+ raise ValueError, "timeout must be None, or number > 0: %r" % (kill,)
+ if timeout is not None and timeout < 0:
+ raise ValueError, "timeout must be None, or number > 0: %r" % (timeout,)
+ if timeout and kill and timeout <= kill:
+ raise ValueError, "timeout threshold must be > kill threshold: k=%r t=%r" % (kill, timeout)
+ t = kill or timeout
+ if t and retry and retry >= t:
+ log.warning("terminate_pid(): retry value larger than timeout/kill threshold"
+ ", will never fire: r=%r kt=%r", retry, t)
+ retry = None
+ return _backend.terminate_pid(pid, retry, kill, timeout)
+
+terminate_pid = term_pid #alias
+
+def kill_pid(pid, retry=None, timeout=30):
+ """Send a signal to process *pid* to shut itself down **immediately**, without cleaning up first.
+
+ :Parameters:
+ pid
+ The id of the process to be terminated.
+
+ retry
+ If not set, only one kill signal will be sent.
+
+ If set to a positive number, a new kill signal
+ will be sent every *retry* seconds, unless the process exits first.
+
+ timeout
+ If set to a positive number, the attempt to kill the
+ process will timeout after *timeout* seconds, and this function
+ will give up and return ``False``.
+
+ If set to ``None``, this function will wait forever or until the process exits.
+
+ This method of killing the process is more reliable,
+ since the process cannot stop it from happening,
+ but not as clean, since the process cannot shut down first.
+ If you must, it is recommended to use :func:`term_pid` with the
+ ``kill`` option set, so that the program gets a chance to exit cleanly first.
+
+ Under ``posix``, this uses the SIGKILL signal.
+ Under windows, this calls ``TerminateProc()``.
+ """
+ if pid is None:
+ raise ValueError, "no pid specified"
+ if retry is not None and retry < 0:
+ raise ValueError, "timeout must be None, or number > 0: %r" % (retry,)
+ if timeout is not None and timeout < 0:
+ raise ValueError, "timeout must be None, or number > 0: %r" % (timeout,)
+ if timeout and retry and retry >= t:
+ log.warning("terminate_pid(): retry value larger than timeout threshold"
+ ", will never fire: r=%r t=%r", retry, timeout)
+ retry = None
+ return _backend.kill_pid(pid, retry, timeout)
+
+#reload_pid ? ala SIGHUP? is there any remotely equiv thing under windows?
+
+#=========================================================
+#shell interaction
+#=========================================================
+exe_exts = None #filled in by _set_backend()
+
+def find_exe(name, extra_paths=None, paths=None):
+ """
+ Returns path to file which would have been executed if the command *name* was run in a shell,
+ by locating it in host's command search path. Returned path will be absolute.
+ If no command of that name can be found, returns ``None``.
+
+ :Parameters:
+ name
+ The name of the command to search for, *without* any executable suffix
+ added (e.g. ``.exe`` under windows). All known :attr:`exe_exts` will
+ be checked in turn, for every directory in the command search path.
+ extra_paths
+ Optionally, a list of custom paths to be checked if the command
+ can't be found in the host's command search path. This will
+ be appended to the default search path.
+ paths
+ Optionally, a list of paths which will be used *in place of* the default
+ executable search path.
+
+ The environmental variable ``PATH`` is used as the command search path
+ for both windows and posix. This command is the equivalent of the bash ``where`` command.
+
+ For example, under a windows system with python installed::
+
+ >>> #try finding notepad under windows
+ >>> host.find_exe("notepad")
+ 'c:\\windows\\notepad.exe'
+
+ >>> #try finding something not in standard search path
+ >>> host.find_exe("myscript")
+ None
+
+ >>> #try finding something with help of a non-standard extra path
+ >>> host.find_exe("myscript", extra_paths=["c:\\Program Files\\My App"])
+ 'c:\\Program Files\\My App\\myscript.bat'
+ """
+ if isinstance(extra_paths, (str, unicode)):
+ extra_paths = extra_paths.split(os.path.pathsep)
+ if isinstance(paths, (str, unicode)):
+ paths = paths.split(os.path.pathsep)
+ return _backend.find_exe(name, extra_paths, paths)
+
+#TODO: would a find_lib be useful?
+
+#=========================================================
+#desktop interaction
+#=========================================================
+def get_desktop_name():
+ """
+ Return name of desktop environment currently in use.
+ Will be one of ``windows``, ``osx``, ``kde``, ``gnome``, ``xfce``, or None.
+ """
+ value = _backend.get_desktop_name()
+ assert value in DESKTOPS
+ return value
+
+def desktop_open(path, action=None, mimetype=None):
+ """
+ Attempt to open file for user using program chosen by host.
+ this attempts to provide os.startfile-like behavior on other oses.
+
+ :Parameters:
+ path
+ Path to file that should be opened.
+ If missing, an error is raised.
+
+ action
+ Specifies the *action* that should be taken to file.
+ Valid values are listed in the table below.
+
+ mimetype
+ Optionally, a mime type may be specified,
+ which may act as a hint to the desktop,
+ if it can to use it.
+
+ The following actions are generally available,
+ but not supported under all environments:
+
+ =========== ============================================================================
+
+ Action Description
+
+ ----------- ----------------------------------------------------------------------------
+
+ ``open`` Desktop will open file in viewer/editor, chosen at it's discretion.
+ This action is the default, and the one that will be used
+ as a fallback if the specified action isn't supported by the desktop.
+ This action should be supported for all desktops detectable.
+
+ ``view`` Desktop will open file in a viewer if possible, else fall back to an editor.
+ Currently, most desktops will treat this the same as ``open``.
+
+ ``edit`` Desktop will open file in editor if possible, else fall back to a viewer.
+ Currently, most desktops will treat this the same as ``open``.
+
+ ``print`` Desktop will open a print dialog directly if possible, else fall back to a
+ viewer. Currently, this won't work for ANY desktops, and will be treated
+ the same as ``open``.
+
+ ``exec`` Desktop will execute the file using a registered assistant.
+ While this is supported by most desktops, for files where this doesn't make
+ sense, the default is usually to treat it like ``open``.
+
+ ``browse`` Desktop should open a file browser to this path (usually a dir).
+ This is supported by few desktops.
+
+ =========== ============================================================================
+
+ ..
+ TODO: document exactly which desktops support which actions
+ """
+ orig = path
+ path = filepath(orig).abspath
+ if not path.exists:
+ raise ValueError, "path not found: %r" % (orig,)
+ if not action:
+ action = "open"
+ if action not in ACTIONS:
+ raise ValueError, "unknown action: %r" % (action, )
+ return _backend.desktop_open(path, action, mimetype)
+
+import mailclient
+mailclient.find_exe = find_exe
+def desktop_compose_email(*args, **kwds):
+ """tell currently configured email client to open a new "compose email" window,
+ with the specified fields automatically filled in.
+
+ :Parameters:
+ to
+ list of email addrs, or string containing semicolon separated email addrs.
+ cc
+ same format as 'to', but for 'cc' field
+ bcc
+ same format as 'to', but for 'bcc' field
+ subject
+ optional subject text
+ body
+ optional body text (for now, should be text/plain)
+ attachments
+ not implemented: would like to support list of filepaths,
+ as well as dict mapping names => buffers (or filepaths)
+
+ .. note::
+
+ This is merely an alias for :func:`bps.host.mailclient.compose_email`,
+ see that function and it's module for additional features,
+ such as the ability to examine the detected email clients,
+ and setting the preferred email client.
+
+ .. note::
+
+ The mailclient module current supports Thunderbird and Outlook,
+ but uses a driver system which should allow for easy registration
+ of new client drivers, whether internal or external.
+ """
+ return mailclient.compose_email(*args, **kwds)
+
+#=========================================================
+#resource discovery
+#=========================================================
+def get_env_path(path):
+ """Locates various environment-defined paths in an OS-agnostic fashion.
+
+ *path* should specify which one of the pre-defined host resource paths
+ should be returned, for example, ``home_dir`` will return the current
+ user's home directory. For a full list of the predefined path names available,
+ see the documentation for :class:`EnvPaths`.
+
+ These paths are derived from ``os.environ`` and OS-specific conventions.
+ The special path ``all_paths`` will return the :class:`EnvPaths` instance itself,
+ for easier access to multiple paths.
+
+ Example usage::
+ >>> from bps import host
+
+ >>> #locate home directory
+ >>> host.get_env_path("home_dir")
+ 'c:\\Documents and Settings\\James'
+
+ >>> #locate user's desktop
+ >>> host.get_env_path("desktop_dir")
+ 'c:\\Documents and Settings\\James\\Desktop'
+
+ >>> #get env path object to examine later
+ >>> ep = host.get_env_path("all_paths")
+ >>> print ep.home_dir
+ 'c:\\Documents and Settings\\James'
+ >>> print ep.docs_dir
+ 'c:\\Documents and Settings\\James\\My Documents'
+
+ .. seealso:: :class:`EnvPaths`
+ """
+ ep = _backend.get_env_paths()
+ if path == "all_paths":
+ return ep
+ else:
+ return getattr(ep, path)
+
+# get_env_info() - have this return EnvInfo (rename from paths)
+# instead of all_paths option
+
+def find_user(login=None, uid=None, missing="ignore"):
+ """Given either a *login* or a posix *uid*,
+ returns a :class:`UserInfo` object for the specified user,
+ or None if no match was found.
+
+ If *login* begins with a "#", as in ``#101``,
+ this will be treated as an encoded uid, ala apache's ``User`` directive.
+ """
+ assert missing in ["ignore", "error"]
+ if login and uid:
+ raise ValueError, "can't specified login & uid at the same time"
+ if login and login.startswith("#"):
+ uid = int(login[1:])
+ if login:
+ return _backend.user_by_login(login, missing)
+ elif uid:
+ return _backend.user_by_uid(uid, missing)
+ elif missing == "error":
+ raise ValueError, "must specify one of login or uid"
+ else:
+ return None
+
+def get_app_path(name, path):
+ """Returns the default resource paths for given application.
+
+ Given the *name* of your application, this returns a :class:`ProgPaths` instance
+ populated with the default resource paths the application should use,
+ per local OS conventions.
+
+ The returned :class:`ProgPaths` will be fully populated with paths relevant
+ to an application being run by a user (as opposed to :func:`get_service_path`).
+ This paths are only a recommendation based on local host conventions.
+
+ For OSes such as posix, you may wish to do something completely different.
+ """
+ #XXX: support desktop-environment specific paths (eg, kde vs gnome locations?)
+ pp = _backend.get_app_paths(name)
+ if path == "all_paths":
+ return pp
+ else:
+ return getattr(pp, path)
+
+# get_app_info() - have this return AppInfo (rename from paths)
+# instead of all_paths option
+
+def get_service_path(name, path, login=None, home=None):
+ """Returns the default resource paths for given service.
+
+ Given the *name* of your service, this returns a :class:`ProgPaths` instance
+ populated with the default resource paths the service should use,
+ per local OS conventions.
+
+ The returned :class:`ProgPaths` will be fully populated with paths relevant
+ to a service being launched by the operating system (as opposed to :func:`get_app_path`).
+ This paths are only a recommendation based on local host conventions.
+
+ If *login* is specified, it is assumed to be the name of a login account
+ assigned to the service itself, and the service's paths will be subdirs
+ of that account's home directory.
+
+ If *home* is specified, it will act like the home directory of a user
+ assigned via *login*.
+
+ For OSes such as posix, you may wish to do something completely different.
+
+ .. warning::
+
+ This function is relatively new, and has been tested
+ under few real-world use cases, so the current behavior may
+ leave much to be desired, and thus it may be tweaked in the future.
+ """
+ pp = _backend.get_service_paths(name, login, home)
+ if path == "all_paths":
+ return pp
+ else:
+ return getattr(pp, path)
+
+# get_service_info() - have this return ServiceInfo (rename from paths)
+# instead of all_paths option
+
+#=========================================================
+#load correct backend implementation
+#=========================================================
+#NOTE: a singleton object instance was chosen over the
+# "from xxx import *" solution that the os module uses
+# for the reason that inheritance w/ a single scope
+# is simply not available w/ the module based solution,
+# and inheritance greatly increased the common code's utility.
+if os.name == "nt" or os.name == "ce":
+ #NOTE: we might want to check for wine here, at least so user info would be correct
+ #NOTE: combining ce & nt here, but that's just a guess
+ # that it'll behave the same for our purposes
+ from bps.host.windows import WindowsBackend as _Backend
+elif os.name == "posix":
+ #NOTE: we might want to check for wine here, at least so user info would be correct
+ #NOTE: we might want to check for cygwin/mingw here.
+ from bps.host.posix import PosixBackend as _Backend
+else:
+ #TODO: would really like to support more, just don't have access to them.
+ # esp would like Mac OS X support!
+ raise ImportError, "no OS specific bps.host module found: %r" % (os.name,)
+_set_backend(_Backend)
+
+#=========================================================
+#deprecated functions, to be removed in the future
+#=========================================================
+def _grp(key):
+ """return a specified resource path.
+ keys that are always present... and EnvironmentError will be raised if they can't be configured:
+ home - location of user's home directory
+ state - base path for storing application configuration (see getStatePath)
+ start - preferred starting path for file browsing
+ keys that may be present (will return None if missing)...
+ docs - location of user's documents directory
+ on windows, this will usually be the user's "My Documents" directory.
+ desktop - location of user's desktop directory
+ """
+ if key in ("home", "state", "start", "docs", "desktop"):
+ return get_env_path(key + "_dir")
+ raise ValueError, "unknown key: %r" % (key,)
+getResourcePath = relocated_function("getResourcePath", _grp, "get_env_path()")
+get_resource_path = relocated_function("get_resource_path", _grp, "get_env_path()")
+
+def _gsp(CfgName):
+ """
+ given an application prefix,
+ returns a place to store user-specific application state.
+ .. note::
+ if you want to specify subdirectories, such as
+ mycompany / myapp, use forward slashes only,
+ they will be translated.
+ """
+ if '/' in CfgName:
+ if os.path.sep != '/':
+ CfgName = CfgName.replace("/", os.path.sep)
+ CfgName, tail = os.path.split(CfgName)
+ else:
+ tail = None
+ path = get_app_path(CfgName, "state_dir")
+ if tail:
+ path /= tail
+ return path
+getStatePath = relocated_function("getStatePath", _gsp, "get_app_path('state_dir')/xxx or get_service_path('state_dir')/xxx")
+get_state_path = relocated_function("get_state_path", _gsp, "get_app_path('state_dir') or get_service_path('state_dir')")
+
+desktop_name = relocated_function("desktop_name", get_desktop_name)
+
+#deprecated proxy for accessing host info
+class _BackendProxy(object):
+ def __getattr__(self, attr):
+ warn("bps.host.Host.%s() is deprecated, use bps.host.%s() instead" % (attr, attr), DeprecationWarning)
+ if attr == "find_exe":
+ return find_exe
+ #NOTE: no other attrs were ever used.
+ raise AttributeError, "Host.%s not supported" % attr
+Host = _BackendProxy() #helper which proxies currently backend, useful for importing
+
+@deprecated_function("bps.host")
+def get_backend():
+ """this creates (if needed) and returns the
+ single backend instance appropriate for the host"""
+ return _backend
+
+#=========================================================
+#EOC
+#=========================================================
diff --git a/bps/host/base.py b/bps/host/base.py
new file mode 100644
index 0000000..47560ff
--- /dev/null
+++ b/bps/host/base.py
@@ -0,0 +1,631 @@
+"""bps.host.base -- template for all bps.host implementations.
+
+This module provides the template for all bps.host implementations,
+specifying abstract / default implementations for all the functions
+they should provide, as well as internal helper methods.
+
+TODO: under unix, if we're run as root or as user w/o home dir, should probably store state in /var
+TODO: find_exe needs to deal w/ CWD
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from logging import getLogger
+import os
+import subprocess
+import sys
+from warnings import warn
+import time
+#pkg
+from bps.types import BaseClass
+from bps.fs import filepath, posix_to_local
+from bps.host.const import DESKTOPS
+from bps.meta import abstractmethod
+from bps.warndep import deprecated_method
+#local
+log = getLogger(__name__)
+
+#=========================================================
+#primary interface
+#=========================================================
+class BackendInterface(BaseClass):
+ """this is minimum interface that all backend implementations must adhere to.
+ the names correspond to the ``bps3.host`` functions, and are documented there.
+ """
+ #=========================================================
+ #creation
+ #=========================================================
+ @classmethod
+ def create(cls):
+ "create new backend handler"
+ #NOTE: this is provided so backend classes can return
+ #an instance of another backend class if they wish
+ #(such as PosixBackend detecting and returning a CygywinBackend)
+ return cls()
+
+ #=========================================================
+ #process management
+ #=========================================================
+ def get_pid(self):
+ "wrapper for os.getpid, for symetry"
+ return os.getpid()
+
+ @abstractmethod
+ def terminate_pid(self, pid, retry, kill, timeout):
+ """send hard-kill signal to *pid*,
+ repeating every *retry* seconds if retry defined,
+ handing off to kill_pid after *kill* seconds if defined,
+ and giving up returning False after *timeout* seconds if timeout defined.
+ """
+
+ @abstractmethod
+ def kill_pid(self, pid, retry, timeout):
+ """send hard-kill signal to *pid*,
+ repeating every *retry* seconds if retry defined,
+ and giving up returning False after *timeout* seconds if timeout defined
+ """
+
+ @abstractmethod
+ def has_pid(self, pid):
+ "Return True if the process id *pid* exits, False if it doesn't."
+ #TODO: would like to detect process status (running, waiting, zombie, etc)
+ # as return richer info on request
+
+ #=========================================================
+ #shell interaction
+ #=========================================================
+ exe_exts = None #list of exe extensions used by host
+
+ @abstractmethod
+ def find_exe(self, name, extra_paths, paths=None):
+ "find exe by name in PATH, or return None"
+
+ #=========================================================
+ #desktop interaction
+ #=========================================================
+ def get_desktop_name(self):
+ "return name of desktop environment, one of the DESKTOP_TYPE strings"
+
+ def desktop_open(self, path, action, mimetype):
+ "attempt to open file using specified action via desktop environment"
+
+ #=========================================================
+ #resource discovery
+ #=========================================================
+ @abstractmethod
+ def user_by_login(self, login):
+ "return UserInfo for user w/ matching login"
+
+ @abstractmethod
+ def user_by_uid(self, uid):
+ "return UserInfo for user w/ matching uid"
+
+ @abstractmethod
+ def get_env_paths(self):
+ "return UserInfo built from current environment"
+
+ @abstractmethod
+ def get_app_paths(self, name):
+ "return ProgPaths for application"
+
+ @abstractmethod
+ def get_service_paths(self, name, login, home):
+ "return ProgPaths for service"
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+#=========================================================
+#helper classes
+#=========================================================
+class _Info(BaseClass):
+ def __init__(self, **kwds):
+ for k, v in kwds.iteritems():
+ if k.endswith("_file") or k.endswith("_dir") or k.endswith("_path"):
+ v = filepath(v)
+ setattr(self, k, v)
+
+class UserProfile(_Info):
+ """This class represents all the information about a given user account,
+ as returned by :func:`find_user`.
+
+ All :class:`UserProfile<>` instances will have the following attributes:
+
+ .. attribute:: login
+
+ The login name of the user.
+
+ .. attribute:: name
+
+ The display name of the user, as a string.
+
+ .. attribute:: home_dir
+
+ Path to the user's home directory.
+ Should always be defined & exist.
+
+ .. attribute:: desktop_dir
+
+ Path to the user's desktop. Will be defined IFF it exists.
+
+ .. attribute:: docs_dir
+
+ Path to user's documents directory. Will be defined IFF it exists.
+
+ .. NOTE::
+ The logic of this directory's selection is currently a little hackneyed.
+
+ .. attribute:: start_dir
+
+ Chosen from one of the above, this should always be a good directory
+ to open a file browser into.
+
+ .. attribute:: state_dir
+
+ Directory applications should use to store persistent application state.
+ This uses ``APPDATA`` under windows, and ``~/.config`` under posix.
+
+ These attributes will only be defined under a ``posix`` environment,
+ they will be set to ``None`` for all others:
+
+ .. attribute:: uid
+
+ Integer uid assigned to account.
+
+ .. attribute:: gid
+
+ Integer gid assigned to account's primary group.
+
+ .. attribute:: shell_file
+
+ Path to user's default shell.
+ """
+ #=========================================================
+ #os independant
+ #=========================================================
+
+ #-----------------------------------------------
+ #user stats
+ #-----------------------------------------------
+ name = None #display name of user
+ login = None #login name of user
+
+ #-----------------------------------------------
+ #resource paths
+ #-----------------------------------------------
+ home_dir = None #path to home directory (should always be defined & exist)
+ desktop_dir = None #path to desktop directory (should always be defined IF exists)
+ docs_dir = None #path to user's documents (should always be defined IF exists)
+
+ #path filebrowsers should start in (should always be defined)
+ def _get_start_dir(self):
+ return self.desktop_dir or self.home_dir
+ start_dir = property(_get_start_dir)
+
+ #-----------------------------------------------
+ #app info helpers
+ #-----------------------------------------------
+ #XXX: could list some desktop stuff here
+ state_dir = None #path where apps should store config (AppData under win32, .config under posix)
+
+ #=========================================================
+ #posix
+ #=========================================================
+ uid = None #uid of user
+ gid = None #uid of user's primary group
+
+ shell_file = None #path to user's default shell
+
+ #=========================================================
+ #win32
+ #=========================================================
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+class EnvPaths(_Info):
+ """This class represents all the information about the current environment's
+ resource paths, as returned by :func:`get_env_paths`. It should not be instantiated directly.
+ Any values not defined under the current environment will be set to `None`.
+
+ All :class:`EnvPaths<>` instances will have the following attributes:
+
+ .. attribute:: login
+
+ The login name of the user account we were run from.
+ May not always be defined.
+
+ .. attribute:: home_dir
+
+ Path to the user's home directory.
+ Will always be present, unless your script is being run from ``/etc/init.d``.
+
+ .. attribute:: desktop_dir
+
+ Path to the user's desktop.
+ Will be defined if and only if it exists.
+
+ .. attribute:: docs_dir
+
+ Path to user's documents directory.
+ Will be defined if and only if it exists.
+
+ .. warning::
+ The logic of this directory's selection is currently a little hackneyed
+ under posix.
+
+ .. attribute:: start_dir
+
+ Chosen from one of the above, this should always be a good directory
+ to open a file browser into.
+
+ .. attribute:: state_dir
+
+ Directory applications should use to store configuration.
+ This uses ``%APPDATA%`` under windows, and ``~/.config`` under posix.
+
+ The following attributes will only be defined for posix,
+ they will be set to ``None`` for all all other OSes:
+
+ .. attribute:: shell_file
+
+ Path to the shell we were run under.
+
+ .. note::
+
+ This class contains a subset of same attributes as :class:`UserProfile`,
+ but the contents of this class are derived from ``os.environ``, whereas
+ the contents of that class are derived from the host's user account database.
+ Thus, while they will frequently be in agreement, this is not a guarantee.
+ """
+ #=========================================================
+ #os independant
+ #=========================================================
+
+ #-----------------------------------------------
+ #user stats
+ #-----------------------------------------------
+ login = None #login name of user
+
+ #-----------------------------------------------
+ #resource paths
+ #-----------------------------------------------
+ home_dir = None #path to home directory (should always be defined & exist)
+ desktop_dir = None #path to desktop directory (should always be defined IF exists)
+ docs_dir = None #path to user's documents (should always be defined IF exists)
+
+ #path filebrowsers should start in (should always be defined)
+ def _get_start_dir(self):
+ return self.desktop_dir or self.home_dir
+ start_dir = property(_get_start_dir)
+
+ #-----------------------------------------------
+ #app info helpers
+ #-----------------------------------------------
+ #XXX: could list some desktop stuff here
+ state_dir = None #path where apps should store config (AppData under win32, .config under posix)
+
+ #=========================================================
+ #posix
+ #=========================================================
+ shell_file = None #path to user's default shell
+
+ #=========================================================
+ #win32
+ #=========================================================
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+class ProgPaths(_Info):
+ """This class is used to hold the results of a :func:`get_app_paths` call.
+ See that function for more details.
+
+ Each :class:`ProgInfo<>` object contains the following attributes:
+
+ .. attribute:: name
+
+ The name of the application, as passed into :func:`get_app_paths`,
+ after being normalized for host naming conventions.
+
+ .. attribute:: state_dir
+
+ The application may use this directory to store any persistent data
+ which should be kept between invocations of the application,
+ and should survive system reboots, etc.
+
+ Under windows, this will point to the ``%APPDATA%/{name}`` directory,
+ and under posix, this will point to ``~/.config/{name}``
+
+ .. note::
+ For services, this will generally be a read-only directory, eg ``/etc``
+
+ .. attribute:: run_dir
+
+ The application may use this directory to store any data
+ which does not need to persist past the current invocation of the program.
+ Normally, this is set to the same value as ``state_dir``.
+
+ .. attribute:: cache_dir
+
+ Directory to stored cached data. Usually defaults to ``{state_dir}/cache``.
+
+ .. attribute:: lock_file
+
+ Recommending location for application's lock file.
+ Usually defaults to ``{run_dir}/{name}.lock``
+
+ For profiles generated by :func:`get_service_paths`, the following
+ attributes will also be set:
+
+ .. attribute:: config_dir
+
+ This should point to (usually read-only) default configuration
+ for the service. Eg, this is ``/etc/{name}`` under posix.
+
+ .. attribute:: log_file
+
+ Suggested log file for service.
+ """
+ name = None #name of application (used to fill in some paths)
+ first_name = None
+
+ state_dir = None #path to persistent state directory
+ run_dir = None #path to run-time state directory
+ cache_dir = None #path to cache directory (usually state_dir / cache)
+ lock_file = None #path to lock file (usually run_dir / pid.lock)
+
+ config_dir = None #path to config directory
+ log_file = None #path to log file
+
+#=========================================================
+#base backend class
+#=========================================================
+class BaseBackend(BackendInterface):
+ "base backend class which provides helpers needed by most implementations"
+ #=========================================================
+ #class attrs
+ #=========================================================
+ pid_check_refresh = .1 #delay in terminate/kill_pid loop
+
+ #=========================================================
+ #instance attrs
+ #=========================================================
+
+ #desktop interaction attrs
+ desktop_loaded = False #set to True when desktop discover run
+ desktop_name = None #desktop name when loaded
+
+ #resource discovery attrs
+ resources_loaded = False
+ env = None #EnvProfile filled out by load_resources
+
+ #=========================================================
+ #creation
+ #=========================================================
+
+ def __init__(self, **kwds):
+ self.__super.__init__(**kwds)
+ if not isinstance(self.exe_exts, tuple):
+ self.exe_exts = tuple(self.exe_exts)
+
+ #=========================================================
+ #process management
+ #=========================================================
+ def terminate_pid(self, pid, retry, kill, timeout):
+ """wraps _terminate_pid() and provides the complex behavior host.terminate_pid requires"""
+ if not self._terminate_pid(pid):
+ return True
+ now = time.time()
+ retry_after = now + retry if retry else None
+ kill_after = now + kill if kill else None
+ timeout_after = now + timeout if timeout and timeout > kill else None
+ delay = self.pid_check_refresh
+ while True:
+ time.sleep(delay)
+ if not self.has_pid(pid):
+ return True
+ now = time.time()
+ if retry_after and retry_after <= now:
+ if not self._terminate_pid(pid):
+ return True
+ retry_after = now + retry
+ if kill_after and kill_after <= now:
+ #NOTE: we decrease timeout since it measures TOTAL amount of time spent signalling.
+ if timeout:
+ if timeout > kill:
+ timeout -= kill
+ else:
+ log.warning("terminate_pid(): timeout window less than kill window: %r vs %r", timeout, kill)
+ timeout = 30
+ return self.kill_pid(pid, retry, timeout)
+ if timeout_after and timeout_after <= now:
+ return False
+
+ def kill_pid(self, pid, retry, timeout):
+ """wraps _kill_pid() and provides the complex behavior host.kill_pid requires"""
+ if not self._kill_pid(pid):
+ return True
+ now = time.time()
+ retry_after = now + retry if retry else None
+ timeout_after = now + timeout if timeout else None
+ delay = self.pid_check_refresh
+ while True:
+ time.sleep(delay)
+ if not self.has_pid(pid):
+ return True
+ now = time.time()
+ if retry_after and retry_after <= now:
+ if not self._kill_pid(pid):
+ return True
+ retry_after = now + retry
+ if timeout_after and timeout_after <= now:
+ return False
+
+ #---------------------------------------------------------
+ #subclass interface
+ #---------------------------------------------------------
+ @abstractmethod
+ def _terminate_pid(self, pid):
+ "helper to terminate specified process"
+ #returns True if signal sent, False if pid not found (ala has_pid)
+
+ @abstractmethod
+ def _kill_pid(self, pid):
+ "helper to kill specified process"
+ #returns True if signal sent, False if pid not found (ala has_pid)
+
+ #=========================================================
+ #shell interaction
+ #=========================================================
+ def find_exe(self, name, extra_paths=None, paths=None):
+ "scan host os's exe path for binary with specified name"
+ #NOTE: for most OS's, this won't need to be overridden.
+ if paths is None:
+ paths = self.get_exe_paths()
+ elif extra_paths:
+ paths = list(paths) #don't let extra paths modify original
+ if extra_paths:
+ paths.extend(extra_paths)
+ #FIXME: should CWD be included?
+ for prefix in paths:
+ #XXX: will expandvars work right under nt?
+ prefix = os.path.expanduser(os.path.expandvars(prefix))
+ for ext in self.exe_exts:
+ path = os.path.join(prefix, "%s%s" % (name, ext))
+ if os.path.exists(path):
+ return filepath(path)
+ return None
+
+ def get_exe_paths(self):
+ "[find_exe helper] return list of paths to check"
+ #XXX: this strips null directories out... should they be treated as '.', or ignored?
+ return [ path for path in os.environ["PATH"].split(os.path.pathsep) if path ]
+
+ #=========================================================
+ #desktop interaction
+ #=========================================================
+
+ #---------------------------------------------------------
+ #desktop discovery helpers
+ #---------------------------------------------------------
+ def init_desktop(self):
+ "delays desktop discovery if needed"
+ if not self.desktop_loaded:
+ self.load_desktop()
+ assert self.desktop_name in DESKTOPS
+ self.desktop_loaded = True
+
+ def load_desktop(self):
+ self.desktop_name = self.detect_desktop()
+
+ def detect_desktop(self):
+ "helper to detect what type of desktop environment is in use, returning id string or None"
+ get = os.environ.get
+ has = os.environ.__contains__
+
+ #check for windows desktop
+ if os.name == "nt" or os.name == "ce":
+ return "windows"
+
+ #check for osx
+ if os.name == "osx":
+ return "osx"
+
+ #check for X11 environments
+ if os.environ.get("DISPLAY"):
+ ds = get("DESKTOP_SESSION")
+
+ #check for kde
+ #XXX: we could check for kde4 before windows/macosx
+ if ds == "kde" or has('KDE_SESSION_UID') or has('KDE_FULL_SESSION'):
+ #XXX: distinguish kde3 / kde4?
+ return "kde"
+
+ #check for gnome
+ if ds == "gnome" or has('GNOME_SESSION_ID'):
+ return "gnome"
+
+ #check for xfce
+ if self.find_exe("xprop"):
+ #XXX: is there a easier way to detect xfce?
+ p = subprocess.Popen(['xprop', '-root', '_DT_SAVE_MODE'], stdout=subprocess.PIPE)
+ p.wait()
+ v = p.stdout.read().strip()
+ if v == '_DT_SAVE_MODE(STRING) = "xfce4"':
+ return "xfce"
+
+ #give up
+ return None
+
+ #---------------------------------------------------------
+ #interface helpers
+ #---------------------------------------------------------
+ def get_desktop_name(self):
+ #subclasses shouldn't need to override this (tweak detect_desktop instead)
+ self.init_desktop()
+ return self.desktop_name
+
+ def desktop_open(self, path, action, mimetype):
+ #subclasses will want to override this, thus is just a stub
+ #they will also want to call self.init_desktop()
+ return self.stub_open(path, action, mimetype)
+
+ def stub_open(self, path, action, mimetype):
+ "stub for when no opener is available"
+ warn("stub desktop open(): file not opened: path=%r action=%r mimetype=%r" %
+ (path, action, mimetype))
+ return None
+
+ #=========================================================
+ #resource discovery
+ #=========================================================
+ #NOTE: this code assumed env profile is built up once, and used by other bits
+
+ def init_resources(self):
+ if not self.resources_loaded:
+ self.load_resources()
+ self.resources_loaded = True
+
+ def load_resources(self):
+ self.env = EnvPaths()
+
+ def get_env_paths(self):
+ "return EnvPaths instance"
+ self.init_resources()
+ return self.env
+
+ def norm_prog_name(self, name):
+ "helper for normalized program names"
+ name = name.replace(" ", "-")
+ if '/' in name:
+ tail = name.rsplit("/", 1)[1]
+ name = posix_to_local(name)
+ else:
+ tail = name
+ return name, tail
+
+ def get_app_paths(self, name):
+ "standard app path creator, works for windows and posix"
+ env = self.get_env_paths()
+ name, tail = self.norm_prog_name(name)
+ state_dir = run_dir = env.state_dir / name
+ return ProgPaths(
+ name=name, first_name=tail,
+ state_dir=state_dir,
+ run_dir=run_dir,
+ cache_dir=state_dir / "cache",
+ lock_file=run_dir / (tail + ".pid"),
+ )
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+#=========================================================
+#EOC
+#=========================================================
diff --git a/bps/host/const.py b/bps/host/const.py
new file mode 100644
index 0000000..b3891df
--- /dev/null
+++ b/bps/host/const.py
@@ -0,0 +1,9 @@
+"""host resource constants"""
+#=========================================================
+#constants
+#=========================================================
+ACTIONS = [ "open", "view", "edit", "print", "exec", "browse" ] #: known actions for s_file
+DESKTOPS = [ None, "windows", "kde", "gnome", "xfce", "osx" ] #: known desktop environments
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/host/mailclient.py b/bps/host/mailclient.py
new file mode 100644
index 0000000..17997c6
--- /dev/null
+++ b/bps/host/mailclient.py
@@ -0,0 +1,549 @@
+"""
+Email Client Interface - (c) 2005-2008 Assurance Technologies, LLC
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import os
+import re
+import subprocess
+#site
+from bps import *
+from bps.stream import get_input_type
+#lib
+#pkg
+find_exe = None #filled in by bps.host.__init__ to prevent cyclic loop
+#local
+__all__ = [
+ #frontends
+ 'compose_email',
+
+ #choosing
+ 'list_drivers', 'set_default_driver',
+
+ #lookup
+ 'get_driver', 'has_driver', 'unload_drivers',
+
+]
+
+#=========================================================
+#frontends
+#=========================================================
+def compose_email(*args, **kwds):
+ "open mail client 'compose' window, raises EnvironmentError if something goes wrong talking to client"
+ name = kwds.pop("client", "any")
+ #XXX: rename "client" kwd to "driver_name" ? do same for get_driver()?
+ return get_driver(name, missing="error").compose_email(*args, **kwds)
+
+#=========================================================
+#registry
+#=========================================================
+_default = "any" #name of explicitly chosen default driver, or None
+_last_default = None #if default is "any", this caches last detected driver
+_names = [] #list of driver names, in order they were registered
+_classes = {} #map of driver name -> class
+_instances = {} #map of driver name -> instance | None
+
+def get_driver(name="any", missing="ignore"):
+ """return named/default client or None if not found.
+
+ *name* specifies the name of the client to load.
+ If the named client is known, and can be located & contacted,
+ the driver object for that client will be returned.
+ If the client name is not known, or the specified client cannot be detected
+ on the current system, ``None`` is returned.
+
+ If no name is specified, the default client is chosen.
+ If no suitable default can be found, ``None`` is returned.
+
+ If *missing* is set to ``error`` instead of ``ignore``,
+ a :exc:`KeyError` will be raised if the specified instead
+ of returning ``None`` in any of the above cases.
+ """
+ assert missing in ("ignore", "error")
+ global _default, _last_default
+ if name is None:
+ warn("name=None passed in, please use name='any' instead", stacklevel=2)
+ name = "any"
+
+ #check if driver was explicitly named
+ if name != "any":
+ driver = _load_driver(name)
+ if driver:
+ return driver
+ if missing == "ignore":
+ return None
+ elif driver is False:
+ raise KeyError, "email client %r not supported" % name
+ else:
+ raise KeyError, "email client %r not present" % name
+ #else use default driver
+
+ #check if default was disabled
+ if _default is None:
+ if missing == "ignore":
+ return None
+ else:
+ raise KeyError, "default email client disabled"
+
+ #try explicitly named default before picking first available
+ if _default != "any":
+ driver = _load_driver(_default)
+ if driver:
+ return driver
+ log.warning("default email client not present: %r", _default)
+
+ #check for cached value
+ if _last_default:
+ driver = _load_driver(_last_default)
+ if driver:
+ return driver
+
+ #pick the first one we find
+ for name in _names:
+ driver = _load_driver(name)
+ if driver:
+ #remember for next time
+ _last_default = driver.name
+ return driver
+
+ #give up
+ if missing == "ignore":
+ return None
+ else:
+ raise KeyError, "no known email clients are present"
+
+def has_driver(name="any"):
+ "check if driver for client is available & loadable"
+ return bool(get_driver(name))
+
+##def list_clients(known=False):
+## "lists names of known clients (may not all be present on system)"
+## if known:
+## return list(_names)
+## else:
+## return [ driver.name for driver in list_drivers() ]
+
+def list_drivers():
+ """return list of drivers for all known clients which can be detected on host"""
+ global _names
+ out = []
+ for name in _names:
+ driver = _load_driver(name)
+ if driver:
+ out.append(driver)
+ return out
+
+#TODO: update interface to allow app to set list of preferred clients,
+# or get by list of preferred clients, instead of just a single one.
+
+def set_default_driver(name):
+ """explicitly choose which driver to use as the default.
+ You may pass in the name of a driver, the driver object itself,
+ ``"any"`` to allow the default to be automatically chosen,
+ or ``None`` to explictly choose that there should be no default.
+ """
+ "set the default client to use"
+ global _default
+ if name == "any" or name is None:
+ _default = name
+ return
+ if hasattr(name, "name"): #it's a driver class/instance
+ name = name.name
+ else:
+ assert isinstance(name, str)
+## driver = _load_driver(name)
+## if driver:
+ _default = name
+## elif driver is None:
+## raise ValueError, "%r mail client not present" % (name,)
+## else:
+## raise ValueError, "unknown mail client: %r" % (name,)
+
+def register_driver(cls):
+ "register a new driver class"
+ name = cls.name
+ if not name:
+ raise RuntimeError, "no name specified"
+ if not getattr(cls, "title", None):
+ cls.title = name
+ global _names, _classes
+ _names.append(name)
+ _classes[name] = cls
+
+def _load_driver(name):
+ """internal helper to load driver given name.
+ returns driver if successful,
+ None if driver known but not present,
+ and False if driver is not known.
+ """
+ global _instances, _classes
+ if name in _instances: #driver is loaded / was disabled (None)
+ return _instances[name]
+ elif name in _classes: #try to load driver
+ cls = _classes[name]
+ try:
+ driver = cls()
+ except EnvironmentError, err:
+ log.info("%r driver not present: %r", name, err)
+ driver = None
+ else:
+ log.debug("loaded %r driver", name)
+ assert driver, "loaded drivers must be boolean True" #just to be nice
+ _instances[name] = driver
+ return driver
+ else:
+ #unknown driver name
+ return False
+
+def unload_drivers():
+ "flushes any loaded drivers, causing them to be redetected next time they are needed"
+ global _instances, _last_default
+ drivers = _instances.values()
+ _last_default = None
+ _instances.clear()
+ for driver in drivers:
+ if hasattr(driver, "close"):
+ driver.close()
+
+#=========================================================
+#
+#=========================================================
+class BaseDriver(BaseClass):
+ "base interface to mail clients"
+ name = None #name to refer to driver by
+ title = None #display name of driver
+
+ #NOTE: driver's init method should do host-env detection,
+ #and raise EnvironmentError if driver can't run.
+
+ def compose_email(self, to=None, cc=None, bcc=None, subject=None, body=None, attachments=None, invalid="error"):
+ """tell client to open a new compose-email window.
+
+ :Parameters:
+ to
+ list of email addrs, or string containing semicolon separated email addrs.
+ cc
+ same format as 'to', but for 'cc' field
+ bcc
+ same format as 'to', but for 'bcc' field
+ subject
+ optional subject text
+ body
+ optional body text (for now, should be text/plain)
+ attachments
+ not implemented: would like to support list of filepaths,
+ as well as dict mapping names => buffers (or filepaths)
+ invalid
+ policy for invalid email addrs: "error", "ignore", "keep", callable
+ """
+ raise NotImplementedError
+
+#=========================================================
+#thunderbird interface
+#=========================================================
+class ThunderbirdDriver(BaseDriver):
+ name = "thunderbird"
+ title = "Mozilla Thunderbird"
+
+ path = None #path to thunderbird exe
+
+ def __init__(self):
+ self.__super.__init__()
+ self.path = find_exe("thunderbird")
+ if self.path is None:
+ log.info("thunderbird exe can't be found")
+ raise EnvironmentError, "thunderbird exe not found"
+ log.info("thunderbird exe detected in path: %r", self.path)
+
+ def compose_email(self, to=None, cc=None, bcc=None, subject=None, body=None, attachments=None, invalid="error"):
+ to = norm_addrs(to, invalid=invalid)
+ cc = norm_addrs(cc, invalid=invalid)
+ bcc = norm_addrs(bcc, invalid=invalid)
+ attachments = norm_attachments(attachments)
+
+ #NOTE: thunderbird 1.5 has a bug where it can't parse these fields,
+ #have to use the mailto:// url argument format instead.
+
+ #TODO: check if "'" or other values are present, and deal with them.
+ out = []
+ if to:
+ out.append("to='%s'" % ",".join(to))
+ if cc:
+ out.append("cc='%s'" % ",".join(cc))
+ if bcc:
+ out.append("bcc='%s'" % ",".join(bcc))
+ if subject:
+ out.append("subject='%s'" % subject)
+ if body:
+ out.append("body='%s'" % body)
+ if attachments:
+ #NOTE: 'name' attribute isn't supported.
+ #we _could_ copy everything to properly named temp files, but ICH.
+ #TODO: should urlencode path
+ parts = ",".join(
+ "file://%s" % path
+ for name, path in attachments
+ )
+ out.append("attachment='%s'" % parts)
+ opts = ",".join(out)
+
+ log.debug("calling thunderbird to compose email: %r", opts)
+ proc = subprocess.Popen([self.path, "-compose", opts])
+ #NOTE: if thunderbird is already running, proc will exit w/ rc=0 immediately
+ #else, will wait till user closes compose window, then exit w/ rc=0.
+ #since the semantics of this call are NOWAIT, we just ignore 'proc'
+ return True
+register_driver(ThunderbirdDriver)
+
+
+#=========================================================
+#Google Apps "desktop client" interface
+# e.g.
+# %PROGFILES%\Google\Google Apps\googleapps.exe --domain=caapdocs.com --mail.google.com
+#=========================================================
+class GoogleAppsDriver(BaseDriver):
+ name = "googleapps"
+ title = "Google Apps Email"
+
+ path = None #path to googleapps.exe
+
+ def __init__(self):
+ self.__super.__init__()
+ #additional windows paths to search for exe
+ extra_paths = ['%PROGRAMFILES%\\Google\\Google Apps\\',] if os.name == 'nt' else []
+ self.path = find_exe("googleapps", extra_paths=extra_paths)
+ if self.path is None:
+ log.info("GoogleApps exe can't be found")
+ raise EnvironmentError, "GoogleApps exe not found"
+ log.info("GoogleApps exe detected in path: %r", self.path)
+
+ def compose_email(self, to=None, cc=None, bcc=None, subject=None, body=None, attachments=None, invalid="error"):
+ to = norm_addrs(to, invalid=invalid)
+ cc = norm_addrs(cc, invalid=invalid)
+ bcc = norm_addrs(bcc, invalid=invalid)
+ attachments = norm_attachments(attachments)
+
+ #NOTE: thunderbird 1.5 has a bug where it can't parse these fields,
+ #have to use the mailto:// url argument format instead.
+
+ #TODO: check if "'" or other values are present, and deal with them.
+ out = []
+ if to:
+ out.append("to='%s'" % ",".join(to))
+ if cc:
+ out.append("cc='%s'" % ",".join(cc))
+ if bcc:
+ out.append("bcc='%s'" % ",".join(bcc))
+ if subject:
+ out.append("subject='%s'" % subject)
+ if body:
+ out.append("body='%s'" % body)
+ if attachments:
+ #NOTE: 'name' attribute isn't supported.
+ #we _could_ copy everything to properly named temp files, but ICH.
+ #TODO: should urlencode path
+ parts = ",".join(
+ "file://%s" % path
+ for name, path in attachments
+ )
+ out.append("attachment='%s'" % parts)
+ opts = ",".join(out)
+
+ log.debug("calling GoogleApps to compose email: %r", opts)
+ proc = subprocess.Popen([self.path, "-compose", opts])
+ #NOTE: if thunderbird is already running, proc will exit w/ rc=0 immediately
+ #else, will wait till user closes compose window, then exit w/ rc=0.
+ #since the semantics of this call are NOWAIT, we just ignore 'proc'
+ return True
+register_driver(GoogleAppsDriver)
+
+#=========================================================
+#outlook interface
+#=========================================================
+if os.name == "nt":
+ #-----------------------------------------------
+ #outlook imports
+ #-----------------------------------------------
+ from bps.host.windows import detect_outlook ##, detect_outlook_express
+ try:
+ import win32com
+ from pywintypes import com_error
+ except ImportError:
+ win32com = None
+
+ #-----------------------------------------------
+ #OUTLOOK CONSTANTS
+ #-----------------------------------------------
+ #OlItemTypes
+ OL_MAILITEM = 0
+
+ #OlMailRecipientType
+ ##OL_TO = 1
+ ##OL_CC = 2
+ ##OL_BCC = 3
+
+ #OlAttachmentTypes
+ OL_BYVALUE = 1
+
+ #-----------------------------------------------
+ #outlook driver
+ #-----------------------------------------------
+ class OutlookDriver(BaseDriver):
+ name = "outlook"
+ title = "Microsoft Outlook"
+
+ com = None #outlook com reference
+
+ def __init__(self):
+ self.__super.__init__()
+ if not win32com:
+ raise EnvironmentError, "win32com module required for Outlook integration"
+ self.outlook = detect_outlook()
+ if not self.outlook:
+ #FIXME: should raise env error, but want to make sure we're detecting right first
+ log.critical("MS Outlook not installed!")
+ else:
+ log.debug("MS Outlook detected in registry: %r", self.outlook)
+## self.title += " (%s)" % self.outlook['vstr']
+
+ def compose_email(self, to=None, cc=None, bcc=None, subject=None, body=None, attachments=None, invalid="error"):
+ to = norm_addrs(to, invalid=invalid)
+ cc = norm_addrs(cc, invalid=invalid)
+ bcc = norm_addrs(bcc, invalid=invalid)
+ attachments = norm_attachments(attachments)
+ app = email = None
+ try:
+ app = win32com.client.DispatchEx("Outlook.Application")
+ email = app.CreateItem(OL_MAILITEM)
+ if to:
+ email.To = "; ".join(to)
+ if cc: #NOTE: haven't tested this com attr
+ email.Cc = "; ".join(cc)
+ if bcc: #NOTE: haven't tested this com attr
+ email.Bcc = "; ".join(bcc)
+ email.Subject = subject or ''
+ if body:
+ raise NotImplementedError, "body not implemented"
+ for name, path in attachments:
+ email.Attachments.Add(path, OL_BYVALUE, 1, name)
+ email.Display()
+ except com_error, err:
+ #XXX: close the email? go ahead and display it?
+ log.critical("unexpected com error from outlook: %r", err, exc_info=True)
+ raise EnvironmentError, "an error occurred while opening Outlook"
+ return True
+
+ register_driver(OutlookDriver)
+
+#=========================================================
+#util functions
+#=========================================================
+_re_email_title = re.compile("""
+ ^
+ \s* (.+?) \s*
+ <
+ \s* (.+?) \s*
+ > \s*
+ $
+ """, re.X)
+
+_re_email_addr = re.compile("""
+ ^
+ \s* (.+?) @ (.+?) \s*
+ $
+ """, re.X)
+
+def norm_addrs(value, invalid="error"):
+ """
+ parses input argument for compose's to/cc/bcc
+
+ input value can be a string of addrs separated by "," or ";"
+ or a list of addrs.
+
+ addrs can be "a@b.c" or "Name <a@b.c>"
+ """
+ if not value:
+ return []
+ if isinstance(value, str):
+ value = value.split(",")
+ if len(value) == 1:
+ value = value[0].split(";")
+ tmp = ( norm_addr(elem, invalid=invalid) for elem in value ) #normalize
+ return [ elem for elem in tmp if elem ] #strip empty addrs
+
+def norm_addr(value, invalid="error"):
+ """
+ norm a single email addr
+ addrs can be "a@b.c" or "Name <a@b.c>"
+ """
+ if not callable(invalid):
+ if invalid == "error":
+ def invalid(value):
+ raise ValueError, "not an email address: %r" % value
+ elif invalid == "ignore":
+ def invalid(value):
+ return None
+ else:
+ assert invalid == "keep"
+ def invalid(value):
+ return value
+ orig = value
+ m = _re_email_title.match(value)
+ if m:
+ name, value = m.group(1, 2)
+ else:
+ name = None
+ m = _re_email_addr.match(value)
+ if m:
+ local, domain = m.group(1, 2)
+ addr = "%s@%s" % (local, domain)
+ else:
+ return invalid(orig)
+ if name:
+ return "%s <%s>" % (name, addr)
+ else:
+ return addr
+
+def norm_attachments(value):
+ """
+ normalize attachment input.
+ can be list or dict.
+
+ returns list of (name,filepath) pairs
+ """
+ #TODO: have this check the filepaths exist,
+ #and issue a warning (dropping the path) if they don't
+ if not value:
+ return []
+ if isinstance(value, (list, tuple)):
+ return [
+ norm_attachment(None, source)
+ for source in value
+ ]
+ else:
+ return [
+ norm_attachment(name, source)
+ for name, source in value.iteritems()
+ ]
+
+def norm_attachment(name, source):
+ if isinstance(source, tuple):
+ alt_name, source = source
+ if name is None:
+ name = alt_name
+ stype = get_input_type(source)
+ if stype == "raw" or stype == "stream":
+ raise NotImplementedError, "need to store string/buffer in temp file"
+ else:
+ assert stype == "path"
+ path = filepath(source).abspath
+ if name is None:
+ name = path.name
+ return name, path
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/host/posix.py b/bps/host/posix.py
new file mode 100644
index 0000000..09968fe
--- /dev/null
+++ b/bps/host/posix.py
@@ -0,0 +1,531 @@
+"""
+bps.host - functions for discovering host resources.
+
+TODO: under unix, if we're run as root or as user w/o home dir, should probably store state in /var
+TODO: find_exe needs to deal w/ CWD
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from logging import getLogger
+import os
+import subprocess
+import sys
+from warnings import warn
+import errno
+import signal as sigmod
+import time
+import re
+import stat
+#pkg
+from bps.meta import is_seq
+from bps.types import Undef, BaseClass
+from bps.fs import filepath, posix_to_local
+from bps.host.base import BaseBackend, UserProfile, ProgPaths
+#local
+log = getLogger(__name__)
+#=========================================================
+#posix
+#=========================================================
+class PosixBackend(BaseBackend):
+ #=========================================================
+ #options
+ #=========================================================
+ prefer_xdg = False #: if true, xdg will be used over kde/gnome
+ kde_prefer_exec = True #: if true, kfmclient's "exec" mode will be used instead of "openURL"
+ #this gives better behavior, but the safety/semantics seem to be under debate
+
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ _default_open = None #preferred desktop opener, set by load_desktop()
+
+ #=========================================================
+ #process management
+ #=========================================================
+ def _terminate_pid(self, pid):
+ return _send_signal(pid, sigmod.SIGTERM)
+
+ def _kill_pid(self, pid):
+ return _send_signal(pid, sigmod.SIGKILL)
+
+ def has_pid(self, pid):
+ "check if process exists - true if yes, false if no"
+ try:
+ os.kill(pid, 0)
+ except os.error, detail:
+ #FIXME: this may raise errno.EPERM if we don't have perm to signal proc.
+ #might be better to check for "/proc" first
+ if detail.errno == errno.ESRCH: #no such process
+ return False
+ else:
+ raise
+ return True
+
+ #=========================================================
+ #shell interaction
+ #=========================================================
+ exe_exts = ("",)
+
+ #=========================================================
+ #desktop interaction
+ #=========================================================
+ def load_desktop(self):
+ self.__super.load_desktop()
+ self._default_open = self.choose_default_opener()
+
+ def choose_default_opener(self):
+ if self.prefer_xdg and self.has_xdg_open():
+ return self.xdg_open
+ if self.desktop_name == "kde" and self.has_kde_open():
+ return self.kde_open
+ if self.desktop_name == "gnome" and self.has_gnome_open():
+ return self.gnome_open
+ if not self.prefer_xdg and self.has_xdg_open():
+ return self.xdg_open
+ return self.stub_open
+
+ def desktop_open(self, *args):
+ "launch file using specified action"
+ self.init_desktop()
+ return self._default_open(*args)
+
+ #-----------------------------------------------
+ #kde
+ #-----------------------------------------------
+ def has_kde_open(self):
+ return bool(self.find_exe("kfmclient"))
+
+ def kde_open(self, path, action, mimetype):
+ #kfmclient only supports two of this library's actions,
+ # "openURL" (which acts sort like view/open)
+ # and "exec" (which acts more like exec/edit)
+ if action == "exec":
+ kaction = "exec"
+ elif action == "browse":
+ kaction = "openURL"
+ elif self.kde_prefer_exec:
+ kaction = "exec"
+ else:
+ kaction = "openURL"
+ args = [ 'kfmclient', kaction, path]
+ if kaction == "openURL" and mimetype:
+ args.append(mimetype)
+ log.debug("launching opener: %r", args)
+ subprocess.Popen(args)
+
+ #-----------------------------------------------
+ #gnome
+ #-----------------------------------------------
+ def has_gnome_open(self):
+ return bool(self.find_exe("gnome-open"))
+
+ def gnome_open(self, path, action, mimetype):
+ subprocess.Popen(['gnome-open', path])
+
+ #-----------------------------------------------
+ #xdg
+ #-----------------------------------------------
+ def has_xdg_open(self):
+ return bool(self.find_exe("xdg-open"))
+
+ def xdg_open(self, path, action, mimetype):
+ subprocess.Popen(['xdg-open', path])
+
+ #=========================================================
+ #resource discovery
+ #=========================================================
+ def load_resources(self):
+ self.__super.load_resources()
+ env = self.env
+ get = os.environ.get
+
+ #FIXME: if there's an APPDATA, we're probably running under mingw / cygwin, might wanna deal w/ that
+
+ #detect if we're running under a user
+ home_dir = filepath(get("HOME"))
+ if home_dir and home_dir.exists:
+ env.login = get("USER") #XXX: is this the right one? USERNAME also defined.
+ env.shell_file=filepath(get("SHELL"))
+ env.home_dir=home_dir
+ env.mode = "user"
+ self._fill_user_info(env, is_env=True)
+ return
+
+ #check if we're running under initd
+ #XXX: should be a more reliable way
+ if 'HOME' not in os.environ and os.getuid() == 0:
+ #we're probably launched from an initd script,
+ #or a similarly restricted environment.
+ #so assume we're using root's home dir.
+ env.mode = "initd"
+ return
+
+ raise NotImplementedError, "bps3.host doesn't understand environment"
+
+ #-----------------------------------------------
+ #user related info
+ #-----------------------------------------------
+ def user_by_login(self, login, missing):
+ for info in iter_user_info():
+ if info['name'] == login:
+ return self._build_user_info(info)
+ if missing == "ignore":
+ return None
+ else:
+ raise KeyError, "no user with login: %r" % login
+
+ def user_by_uid(self, uid, missing):
+ for info in iter_user_info():
+ if info['uid'] == uid:
+ return self._build_user_info(info)
+ if missing == "ignore":
+ return None
+ else:
+ raise KeyError, "no user with uid: %r" % uid
+
+ def _build_user_info(info):
+ "build user info out of passwd info"
+ stats = info['stats']
+ user = UserProfile(
+ login=info['name'],
+ name=stats.split(',', 1)[0].strip() if ',' in stats else stats, #XXX: is this right?
+ uid=info['uid'],
+ gid=info['gid'],
+ shell_file=info['shell_file'],
+ home_dir=info['home_dir'],
+ )
+ self._fill_user_info(user)
+ return user
+
+ def _fill_user_info(self, info, is_env=False):
+ "fill out common dirs in user info"
+ #find desktop...
+ for name in ("Desktop", "DESKTOP"):
+ path = info.home_dir / name
+ if path.exists:
+ info.desktop_dir = path
+ break
+
+ #find documents...
+ for name in ("docs", "Docs", "documents", "Documents", "My Documents"):
+ path = info.home_dir / name
+ if path.exists:
+ info.docs_dir = path
+ break
+
+ info.state_dir = info.home_dir / ".config"
+ return info
+
+ #-----------------------------------------------
+ #program related resources
+ #-----------------------------------------------
+ def get_app_paths(self, name):
+ self.init_resources()
+ if self.env.mode == "initd":
+ raise RuntimeError, "applications should not be run from init.d"
+ return self.__super.get_app_paths(name)
+
+ def get_service_paths(self, name, login, home):
+ env = self.get_env_paths()
+ name, tail = self.norm_prog_name(name)
+ if login:
+ #if a login account has been assigned to service, use it's home directory
+ for info in iter_user_info():
+ if info['user'] == login:
+ home = info['home']
+ break
+ else:
+ raise KeyError, "user not found: %r" % (login,)
+ if home:
+ home = filepath(home)
+ state_dir = run_dir = home / "state" #XXX: don't like this name
+ return ProgPaths(
+ name=name, first_name=tail,
+ state_dir=state_dir,
+ run_dir=run_dir,
+ cache_dir=home / "cache",
+ lock_file=run_dir / (tail + ".pid"),
+ config_dir=home / "etc",
+ log_file=home / "log" / (tail + ".log"),
+ )
+ state_dir = filepath("/var/lib", name)
+ run_dir = filepath("/var/run", name)
+ return ServiceProfile(
+ name=name, first_name=tail,
+ state_dir=state_dir,
+ run_dir=run_dir,
+ cache_dir=filepath("/var/cache",name),
+ lock_file=run_dir / (tail + ".pid"),
+ config_dir=filepath("/etc", name),
+ log_file=filepath("/var/log", name + ".log"),
+ )
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+#=========================================================
+#posix-specific helper functions
+#=========================================================
+DEFAULT_PASSWD = "/etc/passwd"
+DEFAULT_SHADOW = "/etc/shadow"
+PASSWD_COLS = [ "name", None, "uid", "gid", "stats", "home", "shell" ]
+SHADOW_COLS = [ "name", "hash", "last_changed", "must_change", "warn_expire", "disabled_after", "disabled", None]
+ #ex: "root:!:14251:0:99999:7:::"
+ # xxx: see 'man shadow' for details on fields
+ #invalid hash (eg * or !) means no login permitted
+
+def _parse_passwd_row(row):
+ info = dict(entry for entry in zip(PASSWD_COLS, row.split(":")) if entry[0])
+ #FIXME: 'stats' contains name,phone, some other stuff, separated by ','
+ info['uid'] = int(info['uid'])
+ info['gid'] = int(info['gid'])
+ info['home'] = filepath(info['home'])
+ info['shell'] = filepath(info['shell'])
+ return info
+
+def iter_user_info(passwd=DEFAULT_PASSWD, shadow=DEFAULT_SHADOW):
+ "iterate through unix-style passwd & shadow files, returning rows as dict"
+ try:
+ ph = file(passwd, "r")
+ except IOError, err:
+ if err.errno == 2: #no such file/dir
+ log.warning("no such passwd file: %r", passwd)
+ return
+ raise
+ try:
+## sh = None
+## if shadow:
+## try:
+## sh = file(passwd, "r")
+## except IOError, err:
+## if err.errno == 2: #no such file/dir
+## log.warning("no such shadow file: %r", shadow)
+## elif err.errno == 13: #perm denied
+## log.debug("not permitted to open shadow file: %r", shadow)
+## else:
+## raise
+ for row in ph:
+ if row:
+ yield _parse_passwd_row(row.rstrip())
+ finally:
+ ph.close()
+
+DEFAULT_GROUP = "/etc/group"
+GROUP_COLS = ['name', None, 'gid', 'members']
+
+def _parse_group_row(row):
+ info = dict(entry for entry in zip(GROUP_COLS, row.split(":")) if entry[0])
+ #FIXME: 'stats' contains name,phone, some other stuff, separated by ','
+ info['gid'] = int(info['gid'])
+ info['members'] = [ m.strip() for m in info['members'].split(",") if m.strip() ]
+ return info
+
+def iter_group_info(group=DEFAULT_GROUP):
+ "iterate through unix-style group files, returning rows as dict"
+ try:
+ ph = file(group, "r")
+ except IOError, err:
+ if err.errno == 2: #no such file/dir
+ log.warning("no such group file: %r", group)
+ return
+ raise
+ try:
+## sh = None
+## if shadow:
+## try:
+## sh = file(passwd, "r")
+## except IOError, err:
+## if err.errno == 2: #no such file/dir
+## log.warning("no such shadow file: %r", shadow)
+## elif err.errno == 13: #perm denied
+## log.debug("not permitted to open shadow file: %r", shadow)
+## else:
+## raise
+ for row in ph:
+ if row:
+ yield _parse_group_row(row.rstrip())
+ finally:
+ ph.close()
+
+def resolve_uid(value, default=Undef, validate=True):
+ "given a user login string, or string containing a uid, returns matching uid as integer"
+ def helper():
+ for info in iter_user_info():
+ yield info['name'], info['uid']
+ return _resolve_id("user", helper, value, default=default, validate=validate)
+
+def resolve_gid(value, default=Undef, validate=True):
+ "given a group name as a string, or string containing a gid, returns matching gid as integer"
+ def helper():
+ for info in iter_group_info():
+ yield info['name'], info['gid']
+ return _resolve_id("group", helper, value, default=default, validate=validate)
+
+def _resolve_id(name, helper, value, default=Undef, validate=True):
+ if validate:
+ def vf(value):
+ for n, v in helper():
+ if v == value:
+ return value
+ if default is Undef:
+ raise KeyError, "unknown %s id: %r" % (name, value)
+ else:
+ return default
+ else:
+ def vf(value):
+ return value
+ if value is None or value == -1:
+ return -1
+ elif isinstance(value, int):
+ return vf(value)
+ elif isinstance(value, str):
+ if value.startswith("#"):
+ return vf(int(value[1:]))
+ else:
+ for n, v in helper():
+ if n == value:
+ return v
+ if default is Undef:
+ raise KeyError, "unknown %s name: %r" % (name, value)
+ else:
+ return default
+ else:
+ raise TypeError, "%s name/id must be None, int, or string: %r" % (name, value)
+
+def resolve_user(value, default=Undef):
+ "given a uid, return user login string"
+ def helper():
+ for info in iter_user_info():
+ yield info['uid'], info['name']
+ return _resolve_name("user", helper, value, default=default)
+
+def resolve_group(value, default=Undef):
+ "given a gid, return group string"
+ def helper():
+ for info in iter_group_info():
+ yield info['gid'], info['name']
+ return _resolve_name("group", helper, value, default=default)
+
+def _resolve_name(name, helper, value, default=Undef):
+ if value is None or value == -1:
+ return None
+ if isinstance(value, str):
+ try:
+ if value.startswith("#"):
+ value = int(value[1:])
+ else:
+ value = int(value)
+ except ValueError:
+ #try to resolve name
+ for i, n in helper():
+ if n == value:
+ return n
+ if default is Undef:
+ raise KeyError, "unknown %s id: %r" % (name, value)
+ else:
+ return default
+ #else it's now an int
+ if isinstance(value, int):
+ for i, n in helper():
+ if i == value:
+ return n
+ if default is Undef:
+ raise KeyError, "unknown %s id: %r" % (name, value)
+ else:
+ return default
+ else:
+ raise TypeError, "%s name/id must be None, int, or string: %r" % (name, value)
+
+
+#=========================================================
+#
+#=========================================================
+
+#TODO: move this to fs.py ?
+def chown(targets, user=None, group=None, recursive=False):
+ "set permissions for an entire tree"
+ uid = resolve_uid(user)
+ gid = resolve_gid(group)
+ if uid == -1 and gid == -1:
+ return
+ if is_seq(targets):
+ targets = (filepath(path).abspath for path in targets)
+ else:
+ targets = [ filepath(targets).abspath ]
+ for root in targets:
+ if root.isdir and recursive:
+ for base, dirnames, filenames in root.walk():
+ os.chown(base, uid, gid)
+ for name in filenames:
+ os.chown(base / name, uid, gid)
+ else:
+ os.chown(root, uid, gid)
+
+#=========================================================
+#signals
+#=========================================================
+##try:
+## import signal as sigmod
+##except ImportError:
+## sigmod = None
+
+def _send_signal(pid, signal):
+ "helper for sending a signal, checking if it was received"
+ try:
+ os.kill(pid, signal)
+ except os.error, detail:
+ if detail.errno == errno.ESRCH: #no such process
+ return False
+ else:
+ raise
+ return True
+
+##def send_signal(pid, signal, check, retry=None, timeout=None):
+## """helper for sending signals, with retry & timeout ability.
+##
+## :Parameters:
+## pid
+## pid to send signal to
+## signal
+## name / value of signal
+## check
+## function to check if signal was received.
+## prototype is ``check(pid,signal) -> bool``, should
+## return True if loop should continue trying to send the signal.
+## ``host.has_pid`` is used here.
+## retry
+## number of seconds between send attempts,
+## or None if only 1 try.
+## timeout
+## number of seconds before giving up.
+##
+## :Returns:
+## Returns True if signal was received,
+## False if timeout occurred.
+##
+## XXX: this is an experimental func, it may be removed in the future.
+## """
+## #NOTE: this code is borrowed straight from BaseBackend.kill_pid
+## signal = _resolve_signum(signal)
+## _send_signal(pid, signal)
+## now = time.time()
+## retry_after = now + retry if retry else None
+## timeout_after = now + timeout if timeout else None
+## delay = .1
+## while True:
+## time.sleep(delay)
+## if not check(pid, signal):
+## return True
+## now = time.time()
+## if retry_after and retry_after <= now:
+## _send_signal(pid, signal)
+## retry_after = now + retry
+## if timeout_after and timeout_after <= now:
+## return False
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/host/utils.py b/bps/host/utils.py
new file mode 100644
index 0000000..322e2d0
--- /dev/null
+++ b/bps/host/utils.py
@@ -0,0 +1,526 @@
+"""utility functions used by bps.host"""
+#==================================================================================
+#imports
+#==================================================================================
+#core
+import os
+import re
+import stat
+import signal as sigmod
+import sys
+#pkg
+from bps.fs import filepath
+from bps.cache import cached_function
+from bps.warndep import relocated_function, deprecated_function
+#local
+__all__ = [
+ #chmod wrappers
+ 'compile_mode', 'compile_mode_mask', 'repr_mode', 'chmod',
+
+ #umask wrappers
+ 'setumask', 'getumask',
+
+ #signal wrappers
+ 'has_signal', 'add_signal_handler', 'remove_signal_handler',
+ 'adapt_sig_term',
+]
+#==================================================================================
+#mode parsing utilities
+#==================================================================================
+
+#TODO: this should be moved to fs.py
+
+#---------------------------------------------------------------------
+#constants used for parsing & validating modes
+#---------------------------------------------------------------------
+
+###short-lived custom format
+##rwx_str = "( r -{0,2} | -? w -? | -{0,2} x | rw -? | -? wx | r -? x | rwx | -{1,3} )"
+##_line_re = re.compile(r"""
+## ^ \s*
+## (
+## (?P<special> u -? | -? g | ug | -{{1,2}} )
+## \s+
+## )?
+## (?P<u> {rwx}) \s+
+## (?P<g> {rwx}) \s+
+## (?P<o> {rwx})
+## \s* $
+##""".format(rwx=rwx_str), re.X)
+
+#note: this doesn't support chmod's 'X' and 't' bits, or some other bits of it's grammar
+_sym_re = re.compile(r"""
+ ^ \s*
+ (
+ [ugoa]*
+ [+-=]
+ ( [rwxs]* | [ugo] )
+ (\s*,\s* | \s+ | \s*(,\s*)? $)
+ )*
+ $
+ """, re.X)
+
+_sym_elem_re = re.compile(r"""
+ \s*
+ (?P<scope>[ugoa]*)
+ (?P<action>[+-=])
+ (?P<flags> [rwxs]* | [ugo] )
+ (\s*,\s* | \s+ | \s*(,\s*)? $)
+ """, re.X)
+
+#NOTE: under windows, only IRUSR and IWUSR are applied
+PERM_BITLIST = (
+ #scope char, rbit, wbit, xbit, sbit
+ ('u', stat.S_IRUSR, stat.S_IWUSR, stat.S_IXUSR, stat.S_ISUID),
+ ('g', stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_ISGID),
+ ('o', stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH, 0),
+ )
+
+#NOTE: this _should_ be all the bits specified by stat module.
+PERM_BITMASK = 07777
+
+PERM_CHARLIST = [
+ #scope char, ('r',rbit), ('w',wbit), ('x',xbit), ('s',sbit)
+ [row[0]] + zip("rwxs", row[1:])
+ for row in PERM_BITLIST
+ ]
+
+#---------------------------------------------------------------------
+#recognizers
+#---------------------------------------------------------------------
+@deprecated_function("bps.fs.is_mode_mask", removal="2009-10-01")
+def is_mode_mask(value):
+ return (
+ isinstance(value, (list, tuple)) and
+ len(value) == 2 and
+ isinstance(value[0], int) and (0 <= v <= PERM_BITMASK) and
+ isinstance(value[1], int) and (0 <= v <= PERM_BITMASK) and
+ (v[0] & v[1] == 0) #'bits' that are set shouldn't be allowed in 'mask'
+ )
+
+def _is_mode_mask_type(value):
+ return (
+ isinstance(value, (list, tuple)) and
+ len(value) == 2 and
+ isinstance(value[0], int) and
+ isinstance(value[1], int)
+ )
+
+@deprecated_function("bps.fs.is_mode_int", removal="2009-10-01")
+def is_mode_int(value):
+ return isinstance(value, (int, long)) and (0 <= v <= PERM_BITMASK)
+
+#---------------------------------------------------------------------
+#converting -> int form
+#---------------------------------------------------------------------
+@deprecated_function("bps.fs.parse_mode_mask()[0]", removal="2009-10-01")
+def compile_mode(mode):
+ """Compile a symbolic mode string into a mode integer,
+ build out of ``stat.S_Ixxx`` bits *or*\ ed together.
+
+ The input string is of the format accepted by gnu's *chmod* implementation.
+
+ ``repr_mode(bits)`` is the inverse of this function.
+ """
+ return compile_mode_mask(mode)[0]
+
+@deprecated_function("bps.fs.parse_mode_mask", removal="2009-10-01")
+def compile_mode_mask(mode):
+ """version of compile_mode which returns ``(bits,mask)``
+ where *bits* is the bits which are to be set in the final mode,
+ and *mask* is the bits which are to be preserved from the current mode
+ set on the file. This is needed to accurately reproduce mode strings
+ such as "u-rw", which requires knowledge of the previous mode.
+
+ The final mode is determined by the formula ``bits | (oldbits & mask)``,
+ where *oldbits* is the previous mode set on the file, as returned by ``os.stat(path).st_mode``.
+ Note that any bits not set in *bits* or *mask* will be set to 0.
+ Note also that if *mask* is 0, the original *oldbits* do not need to be retrieved.
+ """
+ if isinstance(mode, int):
+ if not is_mode_int(mode):
+ raise ValueError, "mode integer out of range" % (mode,)
+ return value
+ if _is_mode_mask_type(mode):
+ if not is_mode_mask(mode):
+ raise ValueError, "mode pair out of range/invalid: %r" % (mode,)
+ return mode
+ if not isinstance(mode, str):
+ raise TypeError, "mode must be int, pair of ints, or string: %r" % (mode,)
+ value = mode
+
+## #check for solid permission format
+## m = _line_re.match(value)
+## if m:
+## pos = 0
+## value = m.group("special") or ''
+## if 'u' in value:
+## pos |= stat.S_ISUID
+## if 'g' in value:
+## pos |= stat.S_ISGID
+## for row in PERM_CHARLIST:
+## flags = m.group(row[0]) #'u', 'g', or 'o' => rwxs string
+## for pchar, pbit in row[1:]:
+## if pchar in flags:
+## pos |= pbit
+## return pos, 0
+
+ #detect chmod-style symbolic string
+ if _sym_re.match(value.replace(",", " ")):
+ pos = 0 #bits set
+ used = 0 #bits set or cleared
+ for m in _sym_elem_re.finditer(value.replace(",", " ")):
+ scope, action, flags = m.group("scope","action","flags")
+ if not scope:
+ scope = "ugo"
+ #XXX: under correct chmod behavior, bits set in umask aren't affected in this case
+ elif 'a' in scope:
+ scope = "ugo"
+ if len(flags) == 1 and flags in "ugo":
+ #TODO: supporting this feature would require introspection of the file perm,
+ #making our compiled return value more complicated.
+ #unless this becomes needed, not going to bother.
+ raise NotImplementedError, "[+-=]ugo is not supported"
+ for row in PERM_CHARLIST:
+ if row[0] in scope:
+ pairs = row[1:]
+ if action == "+":
+ for pchar, pbit in pairs:
+ if pchar in flags:
+ pos |= pbit
+ used |= pbit
+ elif action == "-":
+ for pchar, pbit in pairs:
+ if pchar in flags:
+ pos = (pos|pbit) ^ pbit
+ used |= pbit
+ else:
+ assert action == "="
+ for pchar, pbit in pairs:
+ if pchar in flags:
+ pos |= pbit
+ else:
+ pos = (pos|pbit) ^ pbit
+ used |= pbit
+ return pos, PERM_BITMASK ^ used
+
+ #todo: detect octal-mode format
+
+ #can't parse format
+ raise ValueError, "can't parse mode string: %r" % (mode,)
+
+#---------------------------------------------------------------------
+#converting -> symbolic form
+#---------------------------------------------------------------------
+@deprecated_function("bps.fs.repr_mode_mask", removal="2009-10-01")
+def repr_mode(mode):
+ """given mode int (made of bits from :mod:``stat``), returns symbolic representation
+ in string form, ala gnu chmod's symbolic format.
+
+ ``compile_mode(mode_string)`` is the inverse of this function.
+ """
+ if _is_mode_mask_type(mode):
+ #FIXME: would like to have this work with mask bits
+ mode, mask = mode
+ else:
+ mask = 0
+ if isinstance(mode, int):
+ out = []
+ for row in PERM_CHARLIST:
+ #row: [ ugoa str, (pchar, pbit), ... ]
+ start = True
+ part = ''.join(
+ pchar
+ for pchar, pbit in row[1:]
+ if pbit and (mode & pbit)
+ )
+ if part:
+ out.append("%s=%s" % (row[0], part))
+ return ",".join(out)
+ raise TypeError, "unexpected value for mode: %r" % (mode,)
+
+#---------------------------------------------------------------------
+#helpers for using (bits,mask) version of mode.
+#---------------------------------------------------------------------
+def _compile_mode_func(mode):
+ "helper used by chmod, returns function that modifying path's mode according to directive"
+ if not mode:
+ return lambda path: None
+ bits, mask = compile_mode_mask(mode)
+ if mask:
+ def setmode(path):
+ cur = os.stat(path).st_mode
+ os.chmod(path, bits|(cur & mask))
+ else:
+ def setmode(path):
+ os.chmod(path, bits)
+ return setmode
+
+##def _setmode(path, bits, mask):
+## if mask:
+## cur = os.stat(path).st_mode
+## os.chmod(path, bits|(cur&mask))
+## else:
+## os.chmod(path, bits)
+
+#==================================================================================
+#chmod & umask
+#==================================================================================
+
+@deprecated_function("bps.fs.chmod (note call syntax change)", removal="2009-10-01")
+def chmod(target, mode, recursive=False):
+ """set file permissions, using a syntax that's mostly compatible with GNU chmod.
+
+ *source* may be either a path, or a sequence of paths.
+
+ If *recursive* is True, *source* (or any path listed in it)
+ which is a directory will be recursively tranversed,
+ and the mode applied to all of it's contents in turn.
+
+ *mode* must be a string containing a comma-separated series
+ of symbolic permission operations. Each operation
+ is of the form ``[ugoa]?[+-=]r?w?x?s?``.
+
+ .. todo::
+
+ Given some usuage examples of the various mode formats.
+
+ *mode* may also be a dict, which specifies
+ different modes depending on the type of file.
+ This allows setting a different mode for dirs and for files,
+ and in the following example::
+
+ >> chmod("/home/user/tmp", dict(file="=rw", dir="=rwx"), recursive=True)
+
+ *mode* may also be a callable, in which case,
+ the callable should have the prototype ``mode_func(absolute_path) -> mode string``.
+ This allows for much greater customization of security policies.
+
+ .. todo::
+
+ Fix symbolic link behavior (followlinks, etc)
+ """
+ if isinstance(mode, (str, int)):
+ setdir = setfile = _compile_mode_func(mode)
+ elif isinstance(mode, dict):
+ allmode = (mode.get("all") or "") + ","
+ setfile = _compile_mode_func(allmode + (mode.get("file") or ""))
+ setdir = _compile_mode_func(allmode + (mode.get("dir") or ""))
+ else:
+ def setfile(path):
+ value = mode(path)
+ if value:
+ bits, mask = compile_mode_mask(value)
+ if mask:
+ os.chmod(path, bits|(os.stat(path).st_mode&mask))
+ else:
+ os.chmod(path, bits)
+ setdir = setfile
+
+ #run through loop
+ for root in _norm_path_list(target):
+ if root.isfile:
+ setfile(root)
+ elif recursive:
+ for base, dirnames, filenames in os.walk(root.abspath, topdown=True):
+ setdir(base)
+ for name in filenames:
+ setfile(os.path.join(base, name))
+ else:
+ assert root.isdir
+ setdir(root)
+
+def _norm_path_list(source):
+ "helper for chmod/chown"
+ if isinstance(source, (tuple, list)):
+ return [ filepath(path).abspath for path in source ]
+ else:
+ return [ filepath(source).abspath ]
+
+@deprecated_function("bps.fs.setumask (note format change)", removal="2010-04-01")
+def setumask(mode, format="sym"):
+ "like os.umask, but accepts symbolic mode strings"
+ from bps.fs import parse_mode_mask, repr_mode_mask
+ bits, mask = parse_mode_mask(mode)
+ #XXX: _wish_ this was atomic
+ old = os.umask(bits)
+ if mask:
+ os.umask(bits | (old & mask))
+ if format == 'symbolic':
+ return repr_mode_mask(old)
+ return old
+
+@deprecated_function("bps.fs.getumask (note format change)", removal="2010-04-01")
+def getumask(format="sym"):
+ #XXX: _wish_ this was atomic, or that we could read umask easily
+ from bps.fs import repr_mode_mask
+ old = os.umask(0022)
+ os.umask(old)
+ if format == "sym":
+ return repr_mode_mask(old)
+ else:
+ return old
+
+#==================================================================================
+#signals
+#==================================================================================
+#TODO: where should this be moved? misc?
+
+#TODO: would like to raise error when trying to attach handler to SIGTERM under nt,
+# since it can't actually be caught.
+
+def _resolve_signum(signum):
+ "resolve signal name to os-specific value, raises ValueError if name is unknown"
+ if isinstance(signum, str):
+ try:
+ return int(signum)
+ except ValueError:
+ try:
+ signum = getattr(sigmod, signum.upper())
+ except AttributeError:
+ raise ValueError, "unknown signal name: %r" % (signum,)
+ if not isinstance(signum, int):
+ raise TypeError, "signum must be int"
+ return signum
+
+def has_signal(name):
+ "check if specific signal is available for OS"
+## if not sigmod:
+## warning("`signal` module not available, can't check for signal", RuntimeWarning)
+## return False
+ return hasattr(sigmod, name.upper())
+
+_master_signal_handlers = {} #map of signum -> master handler func
+def _get_master_handler(signum, create=True):
+ "helper which returns master handler function, with chain stored as attr, for specified signal"
+ global _master_signal_handlers
+ assert isinstance(signum, int)
+ if signum in _master_signal_handlers:
+ return _master_signal_handlers[signum]
+ if not create:
+ return None
+ chain = []
+ def master(s, f):
+ assert s == signum, "handler attached to wrong signal!"
+ exc_info = None
+ for handler in chain:
+ #XXX: could have a signal to trap errors?
+ #but decided trapping all by default is bad policy
+## try:
+ handler(s, f)
+ #XXX: could have True result => don't call any more handlers
+## except SystemExit:
+## exc_info = sys.exc_info()
+## except:
+## import traceback
+## print >> sys.stderr, "Error in signal handler: signum=%r handler=%r" % (signum, handler)
+## traceback.print_exc()
+## exc_info = sys.exc_info()
+## if exc_info is not None:
+## raise exc_info[0], exc_info[1], exc_info[2]
+ master.chain = chain
+ _master_signal_handlers[signum] = master
+ return master
+
+def add_signal_handler(signal, handler, prepend=False):
+ """attach a new handler to the specified signal.
+
+ when the signal is raised, all handlers are called
+ in the order they were attached, until one of them
+ returns ``True``, at which point, the signal is assumed
+ to be handled, and no other handlers are called.
+
+ :Parameters:
+ signal
+ Signal name (resolve from signal module), or number.
+ handler
+ A callback, with the prototype ``handler(signum,frame) -> bool success``.
+ If it returns ``True``, no more handlers will be called.
+ Otherwise, it may returns ``None`` or ``False``.
+ prepend
+ if True, handler will be put first in line to be called,
+ instead of last in line.
+
+ .. note::
+
+ If another signal handler has been attached directly when this function
+ is called, that handler will be removed, and automatically placed on the chain
+ before your handler is added.
+ """
+## if not sigmod:
+## warning("`signal` module not available, can't attach signal handler", RuntimeWarning)
+## return
+ signum = _resolve_signum(signal)
+
+ #attach master handler
+ master = _get_master_handler(signum)
+ cur = sigmod.getsignal(signum)
+ if cur is not master:
+ if not isinstance(cur, int): #disregarding SIG_DFL SIG_IGN
+ master.chain.append(cur)
+ sigmod.signal(signum, master)
+
+ #add our handler
+ if prepend:
+ master.chain.insert(0, handler)
+ else:
+ master.chain.append(handler)
+ return True
+register_signal_handler = relocated_function("register_signal_handler", add_signal_handler)
+
+def remove_signal_handler(signal, handler):
+ """remove a handler attached to the specified signal.
+
+ * Returns True if handler successfully removed.
+ * Returns None if :mod:`signal` module not present.
+ * Raises a :exc:`KeyError` if the handler isn't attached to the signal,
+ either directly, or in a chain.
+ """
+## if not sigmod:
+## warning("`signal` module not available, can't remove signal handler", RuntimeWarning)
+## return
+ signum = _resolve_signum(signal)
+
+ #check if handler is attached to master
+ master = _get_master_handler(signum, create=False)
+ if master and handler in master.chain:
+ master.chain.remove(handler)
+ if not master.chain: #remove master once chain is empty
+ cur = sigmod.getsignal(signum)
+ if cur is master:
+ sigmod.signal(signum, sigmod.SIG_DFL)
+ return True
+
+ #check if handler is attached directly
+ cur = sigmod.getsignal(signum)
+ if handler is cur:
+ if master and master.chain: #re-attach master if it's active
+ sigmod.signal(signum, master)
+ else:
+ sigmod.signal(signum, sigmod.SIG_DFL)
+ return True
+
+ #give up
+ raise KeyError, "handler not attached to signal!"
+
+_adapted = False
+def adapt_sig_term(value=1):
+ """This attaches a handler to SIGTERM which adapts it into a ``SystemExit(1)`` error,
+ so that atexit functions properly when SIGTERM is sent to the process.
+ The optional value keyword lets you override the exit code used.
+
+ .. note::
+
+ If SIGTERM is not defined for the OS, this function will silently perform a NOOP.
+ """
+ if not has_signal("SIGTERM") or os.name == "nt":
+ #NOTE: nt's SIGTERM cannot be caught
+ return False
+ def handler(signum, frame):
+ raise SystemExit(value)
+ add_signal_handler("SIGTERM", handler)
+ return True
+
+#==================================================================================
+#EOF
+#==================================================================================
diff --git a/bps/host/windows.py b/bps/host/windows.py
new file mode 100644
index 0000000..8addb49
--- /dev/null
+++ b/bps/host/windows.py
@@ -0,0 +1,928 @@
+"""windows backend"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+from logging import getLogger
+import os.path
+import subprocess
+import sys
+from warnings import warn
+try:
+ import _winreg as winreg
+except ImportError:
+ winreg = None #doing this so docs can be built from linux
+from contextlib import contextmanager
+import threading
+#pkg
+from bps.types import BaseClass, Undef
+from bps.refs import SoftValueDict
+from bps.fs import filepath, posix_to_local
+from bps.host.base import BaseBackend, UserProfile
+#local
+log = getLogger(__name__)
+
+if hasattr(winreg, "ExpandEnvironmentStrings"): #added in py26
+ _ExpandEnvironmentStrings = winreg.ExpandEnvironmentStrings
+else:
+ _ExpandEnvironmentStrings = os.path.expandvars
+
+#=========================================================
+#nt
+#=========================================================
+#TODO: should implement a CygWin backend, and have NT_Backend check for it
+#TODO: kde4 can run under windows, should check for it.
+
+class WindowsBackend(BaseBackend):
+
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ profile = None #profile name (nt, 95) determining resource path locations
+
+ #resource discovery
+ env_user = None #environ_user info
+
+ wine = False #are we running under wine?
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, **kwds):
+ self.__super.__init__(**kwds)
+
+ #check for wine
+## #this might not be a reliable test...
+## try: #keeping this protected till regpath gets more testing
+## if regpath("/local_machine/software/wine/drives").exists:
+## self.wine = True
+## log.info("WINE detected")
+## except:
+## log.error("error in regpath:", exc_info=True)
+ #TODO: if wine _is_ present, would like to choose user's real home dir / config dir etc
+
+ # need to check if cygwin sets same key,
+ # and it might be cleared anyways
+## pwd = os.environ.get("PWD")
+## if pwd and pwd.startswith("/"):
+## #best guess, we're probably running under wine
+## pass
+
+
+ #determine profile
+ major, minor, build, platform, text = version = sys.getwindowsversion()
+ if platform == 2: #nt/2k/xp
+ self.profile = "nt"
+ elif platform == 1: #windows 95/98/ME
+ self.profile = "95"
+ else:
+ #FIXME: don't have access to 3.1(platform=0) or CE(platform=3) for testing
+ raise EnvironmentError, "unsupported windows version: %r" % (version,)
+
+ #load custom exe_exts...
+ #first, load PATHEXT if present...
+ #';' separated list of extensions (eg: ".EXE;.BAT") which will be searched for by cmd.exe
+ exts = os.environ.get("PATHEXT")
+ if exts:
+ self.exe_exts = tuple(exts.split(os.path.pathsep))
+
+ #=========================================================
+ #process management
+ #=========================================================
+ def terminate_pid(self, pid, retry, kill, timeout):
+ #FIXME: would like to try _something_ for _termiante_pid()
+ #SEE: http://www.ddj.com/windows/184416547 for ideas about how
+ warn("stub: terminate_pid() not implemented for win32, using kill_pid()", RuntimeWarning)
+ return self.kill_pid(pid, retry, timeout)
+
+ def _kill_pid(self, pid):
+ #FIXME: need to pass sig in
+ #os.kill isn't available for win32, so use win32api
+ import win32api
+ handle = win32api.OpenProcess(1, 0, pid)
+ #TODO: log an error/warning if terminate fails, maybe even figure out why
+ #XXX: make sure this returns 'True' if signal sent, and 'False' if process not found
+
+ if win32api.TerminateProcess(handle, 0) == 0:
+ #XXX: MS sez check GetLastError() to find out why.
+ return False #could check, but assuming the signal failed to be sent cause proc was gone.
+ else: #assume it was sent
+ return True
+
+ def has_pid(self, pid):
+ #found at http://mail.python.org/pipermail/spambayes-checkins/2003-December/002383.html
+ import win32process
+ import win32con
+ try:
+ rc = win32process.GetExitCodeProcess(pid)
+ return rc == win32con.STILL_ACTIVE
+ except win32process.error:
+ return False
+
+ #=========================================================
+ #shell integration
+ #=========================================================
+ exe_exts = (".com", ".exe", ".bat",)
+
+ #TODO: find_exe() - saw something that said windows will only
+ # consider exact match if extension is specified (eg "app.exe", "app.bat"),
+ # but will look at all exe_exts if no extension is present.
+ # need to verify this, as well as what "foo.bar" would do
+
+ #=========================================================
+ #desktop interaction
+ #=========================================================
+ def detect_desktop(self):
+ #XXX: use orig when kde detection is integrated
+ return "windows"
+
+ def desktop_open(self, path, action, mimetype):
+ #XXX: if desktop not windows, need to use alt handler
+ #TODO: have code hunt down available actions in registry, for better choosing
+ waction = {
+ "open": "open", #duh
+ "view": "open", #view never really registered
+ "edit": "open", #edit never really registered
+ #print - rare
+ #exec - not seen
+ "browse": "explore",
+ }.get(action, action)
+ if waction == "exec":
+ log.warning("'exec' action has not been tested much on win32")
+ try:
+ os.startfile(path, waction)
+ except WindowsError, err:
+ if err.args[0] == 1155: # No application is associated with the specified file for this operation
+ if action is not None:
+ #fallback to default action
+ log.warning("no handler for action: action=%r path=%r", action, path)
+ os.startfile(path)
+ #FIXME: what if startfile fails again?
+ # should we raise special error, or just return false?
+ # _can_ it ever raise an error in this context?
+ return
+ raise
+
+ #=========================================================
+ #resource discovery
+ #=========================================================
+ def load_resources(self):
+ self.__super.load_resources()
+ if self.profile == "nt":
+ self.load_nt_resources()
+ else:
+ assert self.profile == "95"
+ self.load_95_resources()
+
+ def load_95_resources(self):
+ #FIXME: don't have this OS available for testing.
+ #where is everyting, with or w/o user profiles?
+ #provided a cheap guess here
+ WinDir = filepath(os.environ.get("WINDIR",None))
+ if not WinDir or WinDir.ismissing:
+ raise EnvironmentError, "can't find windows install"
+ profile = self.env
+ profile.state_dir = WinDir / "Application Data"
+ if profile.state_dir.ismissing:
+ profile.state_dir.makedirs()
+ profile.home_dir = WinDir #FIXME: could do better than this, even under win95
+
+ def load_nt_resources(self):
+ profile = self.env
+ get = os.environ.get
+ profile.login = get("USERNAME") #XXX: could guess defaults based on login
+
+ #find home directory... store in HomePath
+ while True:
+ #check for user profile
+ HomePath = filepath(get("USERPROFILE"))
+ if HomePath and HomePath.exists:
+ break
+ #check for HomeDrive / HomePath
+ drive = get("HOMEDRIVE")
+ path = get("HOMEPATH")
+ if drive and path:
+ HomePath = filepath(drive, path)
+ if HomePath.exists:
+ break
+ #give up
+ raise EnvironmentError, "can't find user's home directory"
+ profile.home_dir = HomePath
+
+ #check for appdata...
+ AppDataPath = filepath(get("APPDATA"))
+ if AppDataPath and AppDataPath.exists:
+ profile.state_dir = AppDataPath
+ else:
+ #TODO: like to try something else before we fall back to pre-nt behavior...
+ profile.state_dir = HomePath / "Application Data"
+ if profile.state_dir.ismissing:
+ profile.state_dir.makedirs()
+
+ self._fill_user_info(profile, is_env=True)
+
+ #-----------------------------------------------
+ #user related info
+ #-----------------------------------------------
+ def user_by_login(self, login, missing):
+ "return UserInfo for user w/ matching login"
+ raise NotImplementedError, "implement me!"
+
+ def user_by_uid(self, uid, missing):
+ "return UserInfo for user w/ matching uid"
+ if missing == "error":
+ raise KeyError, "UIDs are not assigned by windows: %r" % uid
+ else:
+ warn("UIDs are not assigned by windows, so no match is possible: %r" % uid)
+ return None
+
+ def _fill_user_info(self, info, is_env=False):
+ "fill out common dirs in user info"
+ #NOTE: if is_env
+ HomePath = info.home_dir
+
+ if self.profile == "nt":
+ #XXX: we could check reg if not is_env
+
+ #check for documents
+ for docs in (
+ filepath(os.environ.get("Documents",None)), #non-standard, used by LiteStep
+ #FIXME: like to get path from windows registry at this point.
+ HomePath / "My Documents",
+ ):
+ if docs and docs.exists:
+ info.docs_dir = docs
+ break
+
+ #XXX: we could check reg if not is_env
+
+ #check for desktop
+ for desktop in (
+ filepath(os.environ.get("Desktop", None)), #non-standard, used by LiteStep
+ #FIXME: like to get path from windows registry at this point
+ HomePath / "Desktop",
+ ):
+ if desktop and desktop.exists:
+ info.desktop_dir = desktop
+ break
+
+ #XXX: fill state_dir w/ app data?
+ return info
+
+ #-----------------------------------------------
+ #program related resources
+ #-----------------------------------------------
+ def get_service_paths(self, name, login, home):
+ raise NotImplementError, "no preset paths available for windows services"
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+#some code I saw for searching through windows processes by name
+##cmd = os.popen('query process')
+##x = cmd.readlines()
+##for y in x:
+## p = y.find('openoffice')
+## if p >= 0: # process running
+
+#=========================================================
+#registry helpers
+#=========================================================
+#build map of hkey name -> value
+_hkey_values = {}
+_hkey_names = {}
+def _build_hkey_maps():
+ for k in dir(winreg):
+ if k.startswith("HKEY_"):
+ s = k[5:].lower()
+ v = getattr(winreg, k)
+ _hkey_values[s] = v
+ _hkey_names[v] = s
+_build_hkey_maps()
+
+_rp_cache_lock = threading.Lock()
+_rp_cache = SoftValueDict(expire=300, flush=150) #cache of existing regpaths, keyed by str
+
+def regpath(path, format=None):
+ """\
+ given a registry path, returns an object which wraps the registry path,
+ and allows easy introspection of it, in much the same way the :func:`bps.fs.filepath` works.
+
+ This attempts to wrap the windows registry path convention
+ using a normalized unix-path style, just to make it easier
+ to refer to and compare various registry paths.
+ The normalized path format is:
+
+ ``[//{system}]/{root}/...``
+
+ Where *system* is the name of the system being connected to,
+ and *root* is the ``HKEY_XXX`` constant, but *without* the ``HKEY_`` prefix,
+ e.g. ``local_machine`` instead of ``HKEY_LOCAL_MACHINE``.
+
+ .. warning::
+
+ This function, and the RegPath object, are still a design experiment.
+ The interface and methods are subject to change or be removed.
+ """
+ #basically this is just a wrapper for RegistryPath,
+ #except it caches old instances based on the *path*
+ #this isn't required by the class itself,
+ #it just saves time & memory
+ global _rp_cache, _rp_cache_lock
+ if isinstance(path, RegistryPath) or path is None:
+ return path
+ if format == "ms":
+ #assume it's a backslash separated string, beginning with hkey_xxx
+ #(ie, the format return by RegistryPath.raw_path)
+ assert '/' not in path, "not sure how to normalize path with / in name: %r" % (path,)
+ path = path.replace("\\", "/")
+ #when presented in this format, we'll allow strings to start with hkey_xxx instead of slash
+ temp = path.lower()
+ if not temp.startswith("hkey_"):
+ raise ValueError, "ms format reg paths must start with HKEY constant: %r" % (path,)
+ if temp[5:temp.find("/")] not in _hkey_values:
+ raise ValueError, "ms format reg paths must start with known hkey constant: %r" % (path,)
+ path = "/" + path
+ elif format and format != "bps":
+ raise ValueError, "unknown format: %r" % (format,)
+ _rp_cache_lock.acquire()
+ try:
+ if path in _rp_cache:
+ return _rp_cache[path]
+ _rp_cache.flush()
+ obj = _rp_cache[path] = RegistryPath(path)
+ return obj
+ finally:
+ _rp_cache_lock.release()
+
+class RegistryPath(BaseClass):
+ """this represents a path in the registry.
+
+
+ This class attempts to mimic the interface for filepaths
+ provided by :mod:`bps.fs`.
+
+ .. note::
+
+ This does *not* represent a handle to the registry
+ (see :class:`RegistryHandle` for that), but it's
+ various methods may open them.
+ """
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ host = None #name of host, or None if local system
+
+ root = None #name of root key, always defined (lower case - local_machine, current_user, etc)
+ raw_root = None #name of root key, always defined (upper case with hkey - HKEY_LOCAL_MACHINE, etc)
+ raw_root_value = None #int value of winreg.HKEY_XXX for root
+
+ subpath = None #subpath relative to root, using "/" as separator
+ raw_subpath = None #subpath relative to root, using "\\" as separator
+
+ raw_path = None #full path "HKEY_XXX\..." in windows style.
+
+ _rootstr = None #root+host portion of path as a string
+ _str = None #normalized string of path, returned by __str__
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, path):
+ #normalize path
+ orig = path
+ path = path.lower()
+
+ #parse host
+ if path.startswith("//"):
+ idx = path.find("/", 2)
+ if idx == -1:
+ host = path[2:]
+ path = "/local_machine"
+ else:
+ host = path[2:idx]
+ path = path[idx:]
+ if host.lower() == "localhost":
+ host = None
+ else:
+ host = None
+ self.host = host
+
+ #parse root
+ if path.startswith("/"):
+ parts = path[1:].split("/")
+ if not parts:
+ raise ValueError, "no registry root specified in path: %r" % (orig,)
+ root = parts.pop(0)
+ else:
+ raise NotImplementedError, "relative paths not supported: %r" % orig
+ if root.startswith("hkey_"):
+ root = root[5:]
+ if root not in _hkey_values:
+ raise ValueError, "unknown registry root in path: %r" % orig
+ self.root = root
+ self.raw_root = "HKEY_" + root.upper()
+ self.raw_root_value = _hkey_values[root]
+
+ #parse remaining path
+ while '' in parts:
+ parts.remove('')
+ self.subpath = "/".join(parts)
+ self.raw_subpath = "\\".join(parts)
+
+ #build full path
+ if parts:
+ self.raw_path = self.raw_root + "\\" + self.raw_subpath
+ else:
+ self.raw_path = self.raw_root
+
+ #build string
+ out = ""
+ if self.host:
+ out += "//" + self.host
+ out += "/" + self.root
+ self._rootstr = out
+ if parts:
+ out += "/" + self.subpath
+ self._str = out
+
+ def __str__(self):
+ return self._str
+
+ def __repr__(self):
+ return "regpath(%r)" % self._str
+
+ def __eq__(self, other):
+ if isinstance(other, RegistryPath):
+ return self._str == other._str
+ elif isinstance(other, (str, unicode)):
+ try:
+ other = regpath(other)
+ except NotImplementedError, ValueError:
+ return False
+ return self._str == other._str
+ else:
+ return False
+
+ #=========================================================
+ #construction
+ #=========================================================
+ def __div__(self, other):
+ if isinstance(other, (list,tuple)):
+ if len(other) == 0:
+ return self
+ other = "/".join(other)
+ elif not other:
+ return self
+ return regpath(self._str + "/" + other)
+
+ __truediv__ = __div__
+
+ #'root' stored directly as attribute
+
+ def _getparent(self):
+ path = self.subpath
+ if path:
+ idx = path.rfind("/")
+ if idx == -1:
+ return regpath(self._rootstr)
+ else:
+ return regpath(self._rootstr + "/" + path[:idx])
+ else:
+ #XXX: return 'self' instead?
+ return None
+ parent = property(_getparent)
+
+ def _getname(self):
+ path = self.subpath
+ if path:
+ idx = path.rfind("/")
+ if idx == -1:
+ return path
+ else:
+ return path[idx+1:]
+ else:
+ return self.root
+ name = property(_getname)
+
+ #=========================================================
+ #registry tree navigation
+ #=========================================================
+ def _getexists(self):
+ h = self.open(missing='ignore')
+ if h is None:
+ return False
+ else:
+ h.close()
+ return True
+ exists = property(_getexists)
+
+ def _getismissing(self):
+ return not self.exists
+ ismissing = property(_getismissing)
+
+ #--------------------------------------------------------
+ #proxied from RegHandle object
+ #--------------------------------------------------------
+ def iterdir(self, full=False):
+ #NOTE: we have to proxy iterator so handle stays open
+ with self.open() as handle:
+ for elem in handle.iterdir(full=full):
+ yield elem
+
+ def listdir(self, full=False):
+ with self.open() as handle:
+ return handle.listdir(full=full)
+
+ def _getmtime(self):
+ with self.open() as handle:
+ return handle.mtime
+ mtime = property(_getmtime)
+
+ #TODO: remove() discard()
+## def remove(self, recursive=True):
+ ##winreg.DeleteKey()
+## def discard(self, **kwds):
+## if self.exists:
+## self.remove(**kwds)
+
+ #=========================================================
+ #registry values
+ #=========================================================
+
+ #--------------------------------------------------------
+ #proxied from RegHandle object
+ #--------------------------------------------------------
+ def iterkeys(self):
+ #NOTE: we have to proxy iterator so handle stays open
+ with self.open() as handle:
+ for key in handle:
+ yield key
+
+ def keys(self):
+ with self.open() as handle:
+ return handle.keys()
+
+ def get(self, key, default=None, missing="ignore", expand=False):
+ with self.open() as handle:
+ return handle.get(key, default, missing=missing, expand=expand)
+
+ def get_as_path(self, key, default=None, missing="ignore", expand=True):
+ with self.open() as handle:
+ return handle.get_as_path(key, default, missing=missing, expand=expand)
+
+ #=========================================================
+ #lowlevel access
+ #=========================================================
+ def open(self, mode="r", missing='error'):
+ "return a raw winreg handle"
+ try:
+ return reghandle(self, mode)
+ except WindowsError, err:
+ if missing == 'ignore' and err.args[0] == 2: #cannot find file specified
+ return None
+ else:
+ raise
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#registry handle (winreg.HKEYType replacement)
+#=========================================================
+def reghandle(path, mode="r"):
+ """given a registry path, as accepted by :func:`regpath`, return a handle
+ to it opened with the specified mode.
+ """
+ return RegistryHandle(path, mode)
+
+class RegistryHandle(BaseClass):
+ """replacment for winreg.HKEYType.
+
+ * uses close() instead of Close(), etc
+ * provides context manager support
+ * provides regpath attribute to retreive original path
+ * provides closed attribute
+
+ * supports file()-like mode strings in addition to registry constants.
+
+ ======= ==================================================
+ Mode Description
+ ------- --------------------------------------------------
+ ``r`` all read perms, equivalent to KEY_READ
+
+ ``w`` all write perms, equivalent to KEY_WRITE
+
+ ``rw`` all read & write permissions
+
+ ``*`` all permissions, equivlanet to KEY_ALL_ACCESS.
+ for most uses, ``rw`` will be sufficient
+ ======= ==================================================
+ """
+ #XXX: if there was a way to close a detached handle int,
+ # we wouldn't have to keep _hkey around and proxy it
+ _hkey = None #internal HKey we're using
+ _rkey = None #key to root of remote registry, if host is defined and subpath exists
+
+ def __init__(self, path, mode="r"):
+ self.path = path = regpath(path)
+ if not mode:
+ mode = "r"
+ if isinstance(mode, int): #allow user to pass in raw winreg.KEY_* values
+ self.mode = self.mode_value = mode
+ elif isinstance(mode, str):
+ if any(c not in "*rw" for c in mode):
+ raise ValueError, "unknown characters in mode: %r" % (mode,)
+ mstr = ''
+ mval = 0
+ if '*' in mode:
+ mstr = '*'
+ mval = winreg.KEY_ALL_ACCESS
+ else:
+ if 'r' in mode:
+ mstr += 'r'
+ mval |= winreg.KEY_READ
+ if 'w' in mode:
+ mstr += 'w'
+ mval |= winreg.KEY_WRITE
+ if not mval:
+ raise ValueError, "no mode specified: %r" (mode,)
+ self.mode = mstr
+ self.mode_value = mval
+ else:
+ raise TypeError, "mode must be str or int: %r" % (mode,)
+ log.debug("opening registry key: path=%r mode=%r", path, self.mode or self.mode_value)
+ if path.host:
+ #would probably want to keep a cache of the registry connections,
+ #so that we can re-use them
+ h = winreg.ConnectRegistry(path.host, path.raw_root_value)
+ if path.raw_subpath:
+ try:
+ self._hkey = winreg.OpenKey(h, path.raw_subpath, 0, self.mode_value)
+ finally:
+ h.Close() #FIXME: is this right? haven't tested this far
+ else:
+ self._hkey = h
+ else:
+ self._hkey = winreg.OpenKey(path.raw_root_value, path.raw_subpath, 0, self.mode_value)
+
+ def detach(self):
+ return self._hkey.Detach()
+ Detach = detach #for backwards compat
+
+ def close(self):
+ return self._hkey.Close()
+ Close = close #for backwards compat
+
+ def _gethandle(self):
+ return self._hkey.handle
+ handle = property(_gethandle)
+
+ def _getclosed(self):
+ return self._hkey.handle == 0
+ closed = property(_getclosed)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, v, c, t):
+ self.close()
+
+ def iterdir(self, full=False):
+ h = self.handle
+ idx = 0
+ try:
+ while True:
+ child = winreg.EnumKey(h, idx)
+ if full:
+ yield self.path / child
+ else:
+ yield child
+ idx += 1
+ except WindowsError, err:
+ if err.args[0] == 259: #no more data available
+ return
+ raise
+
+ def listdir(self, **kwds):
+ return list(self.iterdir(**kwds))
+
+ def __iter__(self):
+ return self.iterkeys()
+
+ def iterkeys(self):
+ "iterate through all keys under this path"
+ h = self.handle
+ idx = 0
+ try:
+ while True:
+ yield winreg.EnumValue(h, idx)[0]
+ idx += 1
+ except WindowsError, err:
+ if err.args[0] == 259: #no more data available
+ return
+ raise
+
+ def keys(self):
+ return list(self.iterkeys())
+
+ def iteritems(self):
+ for key in self:
+ yield key, self.get(key)
+
+ def items(self):
+ return list(self.iteritems())
+
+ def __contains__(self, key):
+ try:
+ winreg.QueryValueEx(self.handle, key)
+ return True
+ except WindowsError, err:
+ if err.args[0] == 2: #file not found
+ return False
+ raise
+
+ def __getitem__(self, key):
+ return self.get(key, missing="error")
+
+ def __setitem__(self, key, value):
+ return self.set(key, value)
+
+ def __delitem__(self, key):
+ try:
+ winreg.DeleteValue(self, key)
+ except WindowsError, err:
+ if err.args[0] == 2: #file not found
+ raise KeyError, "key not found: %r" % key
+ raise
+
+ def set(self, key, value, type=None):
+ #NOTE: this is not final call syntax
+ if type is None:
+ #TODO: this auto-type detection code could be more intelligent
+ if value is None:
+ type = winreg.REG_NONE
+ elif isinstance(value, (int, long)):
+ type = winreg.REG_DWORD
+ #XXX: we _could_ check previous stored type
+ elif isinstance(value, (str, unicode)):
+ if '\x00' in value:
+ type = winreg.REG_BINARY
+ else:
+ type = winreg.REG_SZ
+ else:
+ raise TypeError, "can't guess type from value: %r" % (value,)
+ #XXX: should we add some sanity checking here to make sure values match?
+ winreg.SetValueEx(self.handle, key, None, type, value)
+
+ def raw_get(self, key, default=None, missing="ignore"):
+ "raw get: returns ``(value,dt)`` or ``(default,None)`` if key is missing"
+ assert missing in ("ignore", "error")
+ #NOTE: this is not final call syntax
+ try:
+ return winreg.QueryValueEx(self.handle, key)
+ except WindowsError, err:
+ if err.args[0] == 2: #file not found
+ if missing == "ignore":
+ return default, None
+ else:
+ raise KeyError, "key not found in registry: path=%r key=%r" % (self.path, key)
+ raise
+
+ def get(self, key, default=None, missing="ignore", expand=False):
+ value, dt = self.raw_get(key, missing=missing)
+ if dt is None:
+ return default
+ if dt in (winreg.REG_SZ, winreg.REG_BINARY):
+ assert isinstance(value, (str, unicode)), value
+ return value
+ elif dt == winreg.REG_EXPAND_SZ:
+ assert isinstance(value, (str, unicode)), value
+ if expand and value:
+ value = _ExpandEnvironmentStrings(value)
+ return value
+ elif dt in (winreg.REG_DWORD, winreg.REG_DWORD_BIG_ENDIAN,
+ winreg.REG_DWORD_LITTLE_ENDIAN):
+ assert isinstance(value, (int, long)), value
+ return value
+ elif dt == winreg.REG_NONE:
+ #err, is that what this means?
+ assert value is None, value
+ return None
+ else:
+ #LINK, MULTI_SZ are the known ones we haven't implemented
+ raise NotImplementedError, "registry type not implemented: %r %r" % (value, dt)
+
+ def get_as_path(self, key, default=None, expand=True, **kwds):
+ value = self.get(key, default, expand=expand, **kwds)
+ return filepath(value)
+
+ def get_as_pathlist(self, key, default=None, expand=True, **kwds):
+ value = self.get(key, default, expand=expand, **kwds)
+ if value:
+ return [ filepath(elem) for elem in value.split(os.path.pathsep)]
+ else:
+ return []
+
+ def _getmtime(self):
+ subfiles, subkeys, wmtime = winreg.QueryInfoKey(self.handle)
+ if not wmtime:
+ return 0
+ #wmtime - int of 100s of nanoseconds since Jan 1, 1600
+ #this converts to epoch... constant at end is ~ 369.24 years,
+ #derived by setting reg key, and comparing mtime to time.time() when call returned.
+ return wmtime * 1.0e-7 - 11644473600
+ mtime = property(_getmtime)
+
+#=========================================================
+#ms office helpers
+#=========================================================
+def _hid():
+ "attempt at a host id"
+ return regpath("/local_machine/software/microsoft/windows/currentversion").get("ProductId")
+
+def detect_outlook():
+ "detect outlook version & root"
+ return detect_office_app("outlook")
+
+def detect_outlook_express():
+ "detect outlook express version & root"
+ if regpath("/local_machine").ismissing:
+ log.warning("couldn't connect to windows registry")
+ return None
+ outlook = regpath("/local_machine/software/microsoft/outlook express")
+ if outlook.ismissing:
+ log.info("microsoft outlook express not found in registry")
+ return None
+ path = outlook.get_as_path("InstallRoot")
+ if not path:
+ log.warning("outlook express %r has bad InstallRoot")
+ return None
+ if path.ismissing:
+ log.info("outlook express install path missing: %r", path)
+ return None
+ vstr = outlook.get("MediaVer")
+ if vstr:
+ version=tuple(int(v) for v in vstr.split(","))
+ else:
+ version = None
+ return dict(
+ vstr=vstr,
+ version=version,
+ path=path,
+ )
+
+def detect_office_app(app):
+ """detect ms office application, returning path to exe of newest version.
+
+ *app* should be one of ``outlook``, ``word``, ``excel``... er, what others?
+
+ .. note::
+
+ Whatever happens to the registry interface code,
+ this function's interface should remain constant.
+
+ .. todo::
+
+ This could probably be expanded to return a lot more of the info it can gather.
+ """
+ #make sure registry is there...
+ if regpath("/local_machine").ismissing:
+ log.warning("couldn't connect to windows registry")
+ return None
+ #check for office...
+ office = regpath("/local_machine/software/microsoft/office")
+ if office.ismissing:
+ log.info("microsoft office not found in registry")
+ return None
+ best = None #best we're found
+ for vstr in office.iterdir():
+ try:
+ version = float(vstr)
+ except ValueError:
+ #dir should contain just office version numbers,
+ #but also contains some dirs named "Common" and "Dispatch"
+ continue
+ log.debug("found microsoft office version %r", vstr)
+ if best and version < best['version']: #skip older versions
+ continue
+ install = office / vstr / app / "installroot"
+ if install.ismissing:
+ log.debug("%s %r not installed", app, vstr)
+ continue
+ path = install.get_as_path("path")
+ if not path:
+ log.warning("%s %r has bad InstallRoot", app, vstr)
+ if path.ismissing:
+ log.info("%s %r install path missing: %r", app, vstr, path)
+ continue
+ best = dict(version=version, vstr=vstr, path=path)
+ log.info("%s %r found at %r", app, vstr, path)
+ return best
+
+#=========================================================
+#EOC
+#=========================================================
diff --git a/bps/logs/__init__.py b/bps/logs/__init__.py
new file mode 100644
index 0000000..ee4759f
--- /dev/null
+++ b/bps/logs/__init__.py
@@ -0,0 +1,53 @@
+"""bps.logs -- logging system extensions"""
+#=========================================================
+#imports
+#=========================================================
+#import for monkeypatching
+import bps.logs.loggers
+import bps.logs.capture
+
+#config utilties
+from bps.logs.config import setup_lib_logging, setup_std_logging, \
+ config_logging, add_handler
+##... parse_config
+
+#logger utilities
+from bps.logs.loggers import get_logger
+##from bps.logs.loggers import is_logger, parse_level_name, get_level_name
+
+#handler utilities
+##from bps.logs.handlers import is_handler, purge_handlers, has_default_handler
+
+#formatter utilities
+##from bps.logs.formatters import is_formatter
+
+#proxy logging object
+from bps.logs.proxy_logger import log, multilog, classlog
+
+#register ourselves as a library to quiet the log files
+setup_lib_logging("bps")
+setup_lib_logging("bps3")
+
+#=========================================================
+#
+#=========================================================
+__all__ = [
+ #preset configuration
+ 'setup_lib_logging', 'setup_std_logging',
+
+ #general configuration
+ 'config_logging',
+ 'add_handler',
+
+ #logger proxies
+ 'log', 'multilog', 'classlog',
+
+ #utility functions
+ 'get_logger',
+ #XXX: there are a LOT more utilities funcs, tucked away in the submodules,
+ # should we import them all to this module?
+]
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/logs/capture.py b/bps/logs/capture.py
new file mode 100644
index 0000000..5583724
--- /dev/null
+++ b/bps/logs/capture.py
@@ -0,0 +1,444 @@
+"""
+This module contains functions for capturing various information sources,
+and rerouting them through the logging system.
+
+sys.stderr
+==========
+By calling the `captureStdErr` function,
+the default ``sys.stderr`` stream can be replaced with a `Stream_Wrapper`
+instance which will collect anything written to stderr,
+and redirect it to a logger named ``stderr``.
+
+If logging is to done to stderr, `captureStdErr` should be called
+AFTER the logging handlers have been set up, so that the handlers
+are attached to the original sys.stderr.
+
+NOTE:
+ As it stands now, once invoked, the redirection is permanent for
+ the life of the process, but no known technical reason is stopping the code
+ from being extended to remedy this.
+
+sys.stdout
+===========
+Like stderr, by calling the `captureStdOut` function,
+the default ``sys.stdout`` can be replaced
+with a `Stream_Wrapper` instance which will collect anything written to stdout,
+and redirect it to a logger named ``stdout``.
+
+This is a niche function, mainly useful only on win32 graphical applications,
+whose libraries may try to print things via ``print`` etc.
+In this case, developers may wish to see this output in the log files.
+
+NOTE:
+ As it stands now, once invoked, the redirection is permanent for
+ the life of the process, but no known technical reason is stopping the code
+ from being extended to remedy this.
+
+Warnings
+=======
+By calling `captureWarnings`, any warnings issued by the `warnings` module
+will be redirected to display via the logger named "warnings".
+This may be undone by calling `releaseWarnings`.
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import atexit
+from cStringIO import StringIO
+import sys
+import os
+import inspect
+import logging
+from time import time as get_time
+import threading
+import warnings
+#site
+#pkg
+from bps.logs.loggers import RAW, parse_level_name as parse_level
+from bps.cache import cached_function
+from bps.logs import loggers as logger_module
+from bps.meta import lookup_module
+#local
+log = logging.getLogger(__name__)
+__all__ = [
+ "capture_stdout", "release_stdout", "check_stdout",
+ "capture_stderr", "release_stderr", "check_stderr",
+ "capture_warnings", "release_warnings", "check_warnings",
+ "flush_buffers",
+]
+
+#=========================================================
+#HACK to flush stdout/stderr buffers whenever something is logged
+#this has the effect of keeping stdout/stderr writes grouped together naturally,
+#while still ordering them correctly with other log events.
+#=========================================================
+_flush_streams = set() #set of streams that need flushing before certain events (logging, etc)
+_flush_lock = threading.RLock()
+def flush_buffers():
+ """thread-safe helper to flush all capture buffers...
+ called by BpsLogger before any message is logged,
+ but can be called pretty much anywhere.
+ """
+ if _flush_lock.acquire(False):
+ for stream in _flush_streams:
+ stream.flush(force=True)
+ _flush_lock.release()
+logger_module.flush_buffers = flush_buffers
+
+#=========================================================
+#stream wrapper
+#=========================================================
+class StreamWrapper(object):
+ """stream like object which proxies all it's writes to a specified logger.
+ TODO: need to document & clean up the buffering logic.
+ """
+ #=========================================================
+ #instance constants
+ #=========================================================
+ name = None #: name of stream to use when logging
+ header = "unmanaged logging output:" #: header to preface all writes with
+ flush_threshold = 10 #number of seconds between flush calls before autoflushing
+ write_threshold = 1 #max write delay to disable autoflush
+
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ buf = None #buffer
+ last_write = 0
+ last_flush = 0
+
+ broken = False
+ #set when flush was forced to break on a non-linebreak character
+ #cleared by write when it adds content to the (now empty) buffer,
+ #along with a '...' indicating this was a continuation
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, name=None):
+ self.name = name or source_attr
+ assert isinstance(self.flush_threshold, (int,float))
+ assert isinstance(self.write_threshold, (int,float))
+ self.log = logging.getLogger(self.name)
+ self.broken = False
+ self.last_write = 0
+ self.last_flush = 0
+ self.buf = StringIO()
+
+ #=========================================================
+ #flushing
+ #=========================================================
+ def flush(self, force=False):
+ "flush any complete lines out of buffer"
+ #XXX: should capturing honor os.linesep? or assume it's always "\n",
+ # using universal-newline style?
+
+ #NOTE: it's important for recursion purposes that _write() be called after buffer state is set
+ #read buffer
+ self.last_flush = get_time()
+ buf = self.buf
+ content = buf.getvalue()
+ #check if we're empty
+ if content == '':
+ return None
+## assert not self.broken, "if there's content, write() should have cleared broken flag"
+ #check if we have a complete line
+ if content[-1] == '\n':
+ buf.reset()
+ buf.truncate()
+ self._write(content)
+ return True
+ #check if we have to force a flush
+ if force:
+ buf.reset()
+ buf.truncate()
+ self.broken = True
+ self._write(content + "...\n")
+ return True
+ #just flush to end of last complete line
+ idx = content.rfind('\n')+1
+ if idx == 0:
+ return False
+ buf.reset()
+ buf.truncate()
+ buf.write(content[idx:])
+ self._write(content[:idx])
+ return True
+
+ def _write(self, content):
+ "backend method controlling where output goes... always receives full lines of some type"
+ self.log.log(RAW, "%s\n%s\n", self.header, content)
+
+ #=========================================================
+ #writing
+ #=========================================================
+ def write(self, chunk):
+ #autoflush if we haven't since last write, and last write was long enough ago...
+## self._write(chunk)
+## return len(chunk)
+ cur = get_time()
+ if self._calc_autoflush(cur):
+ self.flush()
+ self.last_write = cur
+ if not chunk:
+ return 0
+ if self.broken:
+ self.buf.write("...")
+ self.broken = False
+ return self.buf.write(chunk)
+
+ def _calc_autoflush(self, cur):
+ #if we had a write w/in write_threshold time,
+ #assume they're grouped together, and don't autoflush yet.
+ if self.last_write + self.write_threshold >= cur:
+ return False
+ #make sure we've flushed w/in flush_threshold time...
+ if self.last_flush + self.flush_threshold < cur:
+ return True
+ #else we flushed recently enough
+ return False
+
+ #=========================================================
+ #EOF
+ #=========================================================
+
+#=========================================================
+#sys.stderr capturing
+#=========================================================
+_orig_stderr = None
+_proxy_stderr = None
+
+def capture_stderr():
+ "reroute sys.stderr to logging system, see module documentation for details"
+ global _orig_stderr, _proxy_stderr, _flush_streams, _flush_lock
+ if _orig_stderr is None:
+ _flush_lock.acquire()
+ try:
+ if _proxy_stderr is None:
+ _proxy_stderr = StreamWrapper(name="sys.stderr")
+ #would like to just call flush_buffers() at exit, but it's config is gone when atexit runs :(
+ atexit.register(_proxy_stderr.flush, force=True)
+ _flush_streams.add(_proxy_stderr)
+ _orig_stderr = sys.stderr
+ sys.stderr = _proxy_stderr
+ finally:
+ _flush_lock.release()
+
+def release_stderr():
+ "stop capturing of stderr"
+ global _orig_stderr, _proxy_stderr
+ if _orig_stderr:
+ _flush_lock.acquire()
+ try:
+ assert _proxy_stderr
+ if sys.stderr is not _proxy_stderr:
+ raise RuntimeError, "can't release: sys.stderr was modified since it was captured"
+ _proxy_stderr.flush(force=True)
+ sys.stderr = _orig_stderr
+ _orig_stderr = None
+ _flush_streams.discard(_proxy_stderr)
+ #NOTE: would like to undo the atexit call
+ finally:
+ _flush_lock.release()
+
+def check_stderr():
+ "return True if stdout is begin captured"
+ global _orig_stderr
+ return _orig_stderr is not None
+
+#=========================================================
+#sys.stdout capturing
+#=========================================================
+#TODO: could use a stacked proxy object, so once orig is captured,
+# we can release/capture again w/o conflicting with subsequent overrides
+# from other apps.
+
+_orig_stdout = None
+_proxy_stdout = None
+
+def capture_stdout():
+ "reroute sys.stdout to logging system, see module documentation for details"
+ global _orig_stdout, _proxy_stdout, _flush_streams, _flush_lock
+ if _orig_stdout is None:
+ _flush_lock.acquire()
+ try:
+ if _proxy_stdout is None:
+ _proxy_stdout = StreamWrapper(name="sys.stdout")
+ #would like to just call flush_buffers() at exit, but it's config is gone when atexit runs :(
+ atexit.register(_proxy_stdout.flush, force=True)
+ _flush_streams.add(_proxy_stdout)
+ _orig_stdout = sys.stdout
+ sys.stdout = _proxy_stdout
+ finally:
+ _flush_lock.release()
+
+def release_stdout():
+ "stop capturing of stdout"
+ global _orig_stdout, _proxy_stdout
+ if _orig_stdout:
+ _flush_lock.acquire()
+ try:
+ assert _proxy_stdout
+ if sys.stdout is not _proxy_stdout:
+ raise RuntimeError, "can't release: sys.stdout was modified since it was captured"
+ _proxy_stdout.flush(force=True)
+ sys.stdout = _orig_stdout
+ _orig_stdout = None
+ _flush_streams.discard(_proxy_stdout)
+ #NOTE: would like to undo the atexit call
+ finally:
+ _flush_lock.release()
+
+def check_stdout():
+ "return True if stdout is begin captured"
+ global _orig_stdout
+ return _orig_stdout is not None
+
+#=========================================================
+#python warnings system
+#=========================================================
+warning_target = "%(source)s"
+warning_fmt = "%(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s"
+
+_inspect_filename = False
+_orig_showwarning = None #: original warnings.showwarning stored if captureWarnings enabled.
+
+def capture_warnings(fmt=None, target=None):
+ """redirect all warnings through logging system via logger named 'warnings'.
+
+ :Parameters:
+ fmt
+ format string controlling how warnings are printed out.
+ the default simulates the original warning.formatwarning().
+ format string should use the "%(keyword)s" format,
+ available keywords are described below.
+
+ For example, this string mimics the style of ``warnings.formatwarning``:
+ "%(filename)s:%(lineno)s: %(category)s: %(message)s"
+
+ By default this uses a multiline format.
+
+ target
+ Format string defining name of logger to send message to.
+ this uses the same keywords as 'fmt'. This can also be a callable,
+ which will be passed in all the same keywords, and should
+ return the name of the logger to use.
+
+ For example, this string sends all warnings to the warnings module:
+ "warnings"
+
+ By default, the following string is used,
+ which uses a logger named after the module:
+ "%(source)s"
+
+ ``fmt`` and ``target`` strings will have the following keywords defined:
+
+ message
+ content of warning text, from the warning object.
+ category
+ __name__ of warning object's class
+ filename
+ filepath of module warning was issued in
+ lineno
+ line number in file where warning was issued
+ modulepath
+ full path of module (package + module name),
+ or empty string if not derivable from filename
+ modulename
+ just the module name of the module
+ source
+ same as module name,
+ but returns "warnings" instead of empty string.
+ This keyword is probably a little more useful as a logger target
+ than module is.
+ """
+ global _orig_showwarning, warning_fmt, warning_target, _inspect_filename
+ if _orig_showwarning is None:
+ _orig_showwarning = warnings.showwarning
+ warnings.showwarning = _showWarning
+ if fmt is not None:
+ warning_fmt = fmt
+ if target is not None:
+ warning_target = target
+ #check if we need to inspect the filename
+ if callable(warning_target):
+ _inspect_filename = True
+ else:
+ _inspect_filename = any(
+ any(
+ "%%(%s)" % key in fmt
+ for key in ("modulepath", "modulename", "source")
+ )
+ for fmt in (warning_fmt, warning_target)
+ )
+
+whl = (sys.version_info >= (2, 6)) #warnings-has-line? introduced in py26
+
+def _showWarning(message, category, filename, lineno, file=None, line=None):
+ #NOTE: 'line' added in py26
+ global warning_fmt, warning_target, _inspect_filename
+ if file is not None:
+ #use old version if writing to a file somewhere, can't use logging system for this
+ if whl:
+ #fixme: what if incompatible app patched this before us?
+ return _orig_showwarning(message, category, filename, lineno, file, line)
+ else:
+ return _orig_showwarning(message, category, filename, lineno, file)
+ #TODO: fill in default for 'line' like 26's showwarning does
+ kwds = dict(
+ message=message,
+ category=category.__name__,
+ filename=filename,
+ lineno=lineno,
+ line=line,
+ )
+ if _inspect_filename:
+ path = _guess_module_from_path(filename)
+ if path:
+ if '.' in path:
+ name = path.rsplit(".", 1)[1]
+ else:
+ name = path
+ kwds.update(
+ modulepath=path,
+ modulename=name,
+ source=path,
+ )
+ else:
+ kwds.update(
+ modulepath='',
+ modulename='',
+ source='sys.warnings',
+ )
+ text = warning_fmt % kwds
+ if callable(warning_target):
+ name = warning_target(**kwds)
+ else:
+ name = warning_target % kwds
+ logging.getLogger(name).warning(text)
+
+@cached_function(args=1)
+def _guess_module_from_path(path):
+ "guess full module name (w/ package) from filepath"
+ return lookup_module(path, name=True)
+
+def release_warnings():
+ global _orig_showwarning
+ if _orig_showwarning is None:
+ return
+ if warnings.showwarning != _showWarning:
+ log.error("releaseWarnings() failed, another application has overridden warnings.showwarning")
+ return
+ warnings.showwarning = _orig_showwarning
+ _orig_showwarning = None
+
+def check_warnings():
+ "return True if stdout is begin captured"
+ global _orig_showwarning
+ return _orig_showwarning is not None
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/logs/config.py b/bps/logs/config.py
new file mode 100644
index 0000000..2dc3192
--- /dev/null
+++ b/bps/logs/config.py
@@ -0,0 +1,1495 @@
+"""bps.logs.config -- configuration parsing"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from warnings import warn
+import os
+import logging
+import ConfigParser
+#site
+#lib
+from bps.undef import Undef
+from bps.error.types import ParseError, InputError, ParamError, MissingPathError
+from bps.meta import is_str, is_seq, is_oseq, Params
+from bps.parsing.config import read_into_parser, parser_get_section, unescape_string
+from bps.stream import get_input_type
+from bps.text import asbool
+from bps.types import BaseClass
+#pkg
+from bps.logs import capture
+from bps.logs.formatters import is_formatter
+from bps.logs.handlers import set_startup_msg, null_handler, \
+ has_default_handler, purge_handlers, is_handler
+from bps.logs.loggers import parse_level_name, ROOT, is_logger, get_managed_loggers
+#local
+log = logging.getLogger(__name__)
+__all__ = [
+ #main frontends
+ 'setup_lib_logging',
+ 'setup_std_logging',
+ 'config_logging',
+ 'add_handler',
+ 'parse_config',
+
+## #helpers
+## 'patch_paste',
+]
+
+#=========================================================
+#misc
+#=========================================================
+#disabled till we figure out a way to call it before .ini loads :(
+##def patch_paste():
+## """monkeypatch Paste to use config_logging() instead of default logging system.
+## this lets you use BPS style logging config directives,
+## while still (hopefully) retaining backward compatibility with
+## the old format.
+## """
+## from bps.meta import monkeypatch
+## from paste.script.command import Command
+## @monkeypatch(Command)
+## def logging_file_config(self, config_file):
+## config_logging(config_file)
+
+#=========================================================
+#config frontends
+#=========================================================
+def setup_lib_logging(name):
+ """Suppress "No handler cound be found" message
+ by telling the logging system to silently ignore unhandled
+ messages for the specified logger.
+
+ This is done by attaching a null handler to the specified logger.
+ This handler will not prevent any messages from being passed
+ to other handlers, it merely prevents any messages passing through
+ the named logger from appearing as "unhandled".
+
+ It is recommended to call this function at the top
+ of the root module of a library, so that any messages
+ that are logged while the library is loading will not result
+ in annoying error messages.
+ """
+ logging.getLogger(name).addHandler(null_handler)
+
+def setup_std_logging(force=True, level=None, dev=False):
+ """Quickly set up stderr logging for your application.
+
+ This is mainly for setting up logging at the beginning of a command line script,
+ before verbosity, levels, or handlers have been loaded.
+
+ :Parameters:
+ force
+ If True (the default), clear out any existing default handlers,
+ and set up our own. If False, aborts if a default handler exists.
+ level
+ The default logging level to set for the logging system.
+ If not specified, defaults to "ERROR" (though see dev mode below).
+ dev
+ If devel mode is set, default level is upped to "WARNING",
+ and a colorized formatter is used, both to aid in software development.
+
+ If not set (the default), a uncolorized (and hence machine parseable) formatter
+ is used, and the default level is set to "ERROR".
+
+ This is essentially a stripped down version of :func:`logging.basicConfig`,
+ designed to do the one core job efficiently. For more complex
+ logging configuration, call :func:`config_logging`
+ directly, it is a much more powerful function than this or basicConfig.
+ If this is the case for you, see the source code for this function
+ as a starting point, it's only 16 lines long.
+ """
+ if not force and has_default_handler():
+ return None
+ if dev:
+ handler = "dev-console"
+ if level is None:
+ level = "WARNING"
+ else:
+ handler = "std-console"
+ if level is None:
+ level = "ERROR"
+ config_logging(
+ level=level,
+ capture_warnings=True,
+ default_handler=handler,
+ stacklevel=2,
+ )
+ return True
+
+def config_logging(source=None, source_format=None, stacklevel=1, **kwds):
+ config = parse_config(source=source, source_format=source_format, stacklevel=stacklevel+1, **kwds)
+ if config is None:
+ return False
+ else:
+ config.apply()
+ return True #xxx: could return False is config is noop
+
+def add_handler(logger="<root>", propagate=None, add=True, **kwds):
+ "add handler to logger by description"
+ config = LoggingConfig(stacklevel=2)
+ if isinstance(kwds.get("formatter"), dict):
+ config.set_formatter("custom-fmt", **config.parse_formatter_desc(kwds['formatter']))
+ kwds['formatter'] = "custom-fmt"
+ config.set_handler("custom-hnd", **config.parse_handler_desc(kwds))
+ config.set_propagate(logger, propagate)
+ if add:
+ config.add_outputs(logger, ['custom-hnd'])
+ else:
+ config.set_outputs(logger, ['custom-hnd'])
+ config.apply()
+
+#=========================================================
+#parser frontend
+#=========================================================
+def parse_config(
+ #source input
+ source=None, source_format=None, source_scope=None,
+
+ #controls
+## restrict=None,
+ errors="log",
+ stacklevel=1,
+
+ #kwd input
+ **kwds):
+ """parse logging configuration from file, string, or dictionary
+
+ :param errors:
+ Policy for dealing with errors.
+
+ * ``"raise"`` -- all errors are raised
+ * ``"log"`` -- the default, causes errors to be logged internally, and ``None`` returned
+
+ :returns:
+ This returns an instance of the :class:`LoggingConfig` subclass
+ which handles parsing whichever input format the configuration
+ was provided in.
+
+ If ``errors='log'`` and an error occurrs, returns ``None``.
+ """
+ #parse / normalize source
+ if source is None:
+ config = LoggingConfig(scope=source_scope, stacklevel=stacklevel+1)
+ elif isinstance(source, LoggingConfig):
+ config = LoggingConfig(scope=source_scope, stacklevel=stacklevel+1)
+ config.add_kwds(source.__dict__)
+ else:
+ try:
+ config = _parse_from_source(source, source_format, source_scope, errors, stacklevel+1)
+ except (ParseError, InputError), err:
+ if errors == "raise":
+ raise
+ log.warning(str(err))
+ return None
+ if config is None:
+ return None
+
+ #merge kwds
+ if kwds:
+ config.add_kwds(kwds)
+
+ #done!
+ return config
+
+def _parse_from_source(source, source_format, scope, errors, stacklevel):
+ #detect what type of input 'source' is...
+ stype = get_input_type(source)
+ assert stype in ("path", "raw", "stream")
+ if stype == "path":
+ if not os.path.exists(source):
+ if errors == "raise":
+ raise MissingPathError(filename=source)
+ else:
+ log.error("config file not found: filename=%r", source)
+ return None
+
+ #detect what format 'source' is...
+ if source_format is None:
+ source_format = _detect_source_format(source, stype)
+
+ #dispatch to format-specific parser
+ if source_format == "standard":
+ return StandardConfig(source, stacklevel=stacklevel+1)
+ elif source_format == "compact":
+ return CompactConfig(source, scope=scope, stacklevel=stacklevel+1)
+ else:
+ raise InputError("unknown logging config format: %r" % format)
+
+def _detect_source_format(source, stype):
+ "helper for parse_config... tries to guess format used by source"
+ #try to load as ini file
+ try:
+ parser = read_into_parser(source, errors="log", reset=True)
+ except ConfigParser.MissingSectionHeaderError:
+ parser = None
+ if parser:
+ if CompactConfig.detect_cp(parser):
+ return "compact"
+ if StandardConfig.detect_cp(parser):
+ return "standard"
+ #give up
+ if stype == "path":
+ txt = "couldn't determine logging config format: filename=%r" % (source,)
+ else:
+ txt = "couldn't determine logging config format: stype=%r" % (stype,)
+ raise InputError(txt)
+
+#=========================================================
+#logging config object - represents a parsed logging config snippet
+#=========================================================
+class LoggingConfig(BaseClass):
+ """class which represents a parsed logging config file.
+
+ it's mainly used as a framework for building up a parsed configured
+ via the subclasses later in this file.
+ """
+ #=========================================================
+ #class constants
+ #=========================================================
+
+ #default handler presets defined for all configurations
+ default_handlers = {
+ 'console': dict(
+ klass="StreamHandler",
+ args="(sys.stderr,)",
+ #XXX: would like to auto-detect if console is interactive, and choose formatter accordingly
+ formatter="std-console",
+ ),
+
+ 'dev-console': dict(
+ klass="bps.logs.handlers.ConsoleHandler",
+ args="(sys.stderr,)",
+ formatter="dev-console",
+ ),
+
+ 'std-console': dict(
+ klass="StreamHandler",
+ args="(sys.stderr,)",
+ formatter="std-console",
+ ),
+
+ 'null': dict(
+ klass='bps.logs.handlers.NullHandler'
+ ),
+ }
+
+ #list of prefixes to try when resolving handler names
+ handler_prefixes = ['logging', 'logging.handlers', 'bps.logs.handlers']
+
+ #default formatter presets defined for all configurations.
+ default_formatters = {
+ 'dev-console': dict(
+ klass="bps.logs.formatters.DevFormatter",
+ ),
+ 'std-console': dict(
+ klass="bps.logs.formatters.StdFormatter",
+ ),
+ 'std-file': dict(
+ klass="bps.logs.formatters.FileFormatter",
+ ),
+ }
+
+ #list of module prefixes which will be searched when
+ # resolving formatter class names
+ formatter_prefixes = [ 'logging', 'bps.logs.formatters' ]
+
+ #name used by add_default_handler
+ DEFAULT_HANDLER_NAME = "__default_handler__"
+
+ #global options which are boolean
+ BOOL_OPTIONS = ("disable_existing_loggers",
+ "reset_handlers", "reset_loggers",
+ "capture_stdout", "capture_stderr", "capture_warnings")
+
+ #all global options
+ ALL_OPTIONS = ("warning_fmt", "warning_target") + BOOL_OPTIONS
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, scope=None, stacklevel=1):
+ self.stacklevel = stacklevel #stacklevel for warnings
+ self.options = {}
+ self.loggers = {}
+ self.formatters = {}
+ self.handlers = {}
+ self.scope = vars(logging)
+ if scope:
+ self.scope.update(scope)
+
+ #=========================================================
+ #global options
+ #=========================================================
+ options = None # option name -> option value
+
+ def set_option(self, name, value):
+ assert name in self.ALL_OPTIONS, "unknown option: %r" % (name, )
+ self.options[name] = value
+
+ def get_option(self, name):
+ value = self.options.get(name)
+ if name == "reset_handlers":
+ if not value and self.get_option("reset_loggers"):
+ value = True
+ elif name == "reset_loggers":
+ if not value and self.get_option("disable_existing_loggers"):
+ value = True
+ return value
+
+ #=========================================================
+ #logger configuration
+ #=========================================================
+ loggers = None #dict mapping logger name -> dict of logger options.
+ #optional keys in logger options dict:
+ # propagate - True/False - change value of propagate flag
+ # level - string/int - change current log level
+ # outputs - list of handler names / instances
+ # add - if True, handlers should be added, not replacing existing.
+ #NOTE: if handlers is not None and add is not True, existing handlers are purged.
+
+ def get_logger_config(self, name, default=Undef, create=False):
+ "retrieve dict with config for specified logger"
+ name = parse_logger_name(name)
+ if name in self.loggers:
+ return self.loggers[name]
+ elif create:
+ self.loggers[name] = config = {}
+ return config
+ elif default is Undef:
+ raise KeyError, "logger config not found: %r" % (name,)
+ else:
+ return default
+
+ def set_level(self, name, level):
+ "set a given logging level"
+ level = parse_level_name(level)
+ config = self.get_logger_config(name, create=True)
+ config['level'] = level
+
+ def set_propagate(self, name, value):
+ "set propagate flag for logger"
+ if value is None:
+ return
+ config = self.get_logger_config(name, create=True)
+ config['propagate'] = value
+
+ def _check_outputs(self, handlers):
+ "validate outputs list"
+ if not all(
+ is_str(h) or is_handler(h)
+ for h in handlers
+ ):
+ raise ValueError, "output list must contain handler names or instances: %r" % (handlers,)
+
+ def set_outputs(self, name, handlers):
+ "set list of handlers for logger, replacing existing ones"
+ self._check_outputs(handlers)
+ config = self.get_logger_config(name, create=True)
+ config['outputs'] = list(handlers)
+ if 'add' in config:
+ del config['add']
+
+ def add_outputs(self, name, handlers):
+ "add list of handlers to logger, appending to existing ones"
+ self._check_outputs(handlers)
+ if not handlers:
+ return
+ config = self.get_logger_config(name, create=True)
+ if 'outputs' in config:
+ config['outputs'].extend(handlers)
+ #keep existing mode flag
+ else:
+ config['outputs'] = list(handlers)
+ config['add'] = True
+
+ def clear_outputs(self, name):
+ "remove all handlers from logger"
+ config = self.get_logger_config(name, create=True)
+ config['outputs'] = []
+ if 'add' in config:
+ del config['add']
+
+ def validate_loggers(self):
+ "make sure output names all exist"
+ for name, config in self.loggers.iteritems():
+ if 'outputs' in config:
+ for hname in config['outputs']:
+ if self.get_handler_config(hname, None) is None:
+ raise ValueError, "%s: unknown handler %r" % (name, hname)
+
+ #=========================================================
+ #handlers
+ #=========================================================
+ handlers = None #map of handler name -> handler constructor kwds
+ scope = None #dict used as global scope for evaluating various strings
+
+ def get_handler_config(self, name, default=Undef):
+ "return dict w/ config for handler, or raise KeyError"
+ name = parse_handler_name(name)
+ if name in self.handlers:
+ return self.handlers[name]
+ elif name in self.default_handlers:
+ config = self.parse_handler_desc(self.default_handlers[name])
+ self._check_handler_config(config)
+ return config
+ elif default is Undef:
+ raise KeyError, "no handler named %r" % name
+ else:
+ return default
+
+ def _check_handler_config(self, kwds):
+ "validate handler config dict"
+ klass = kwds["klass"]
+ if not is_handler(klass, False):
+ raise ValueError, "%s: class keyword not a handler class: %r" % (name, klass,)
+ if 'args' in kwds:
+ kwds['args'] = Params.normalize(kwds['args'])
+ if 'formatter' in kwds:
+ f = kwds.get("formatter")
+ if not is_str(f) and not is_formatter(f, True):
+ raise ValueError, "%s: formatter keyword not formatter instance or name: %r" % (name, f)
+ #XXX: validate any more keys?
+ #XXX: raise error if unknown keys found?
+ # everything here is eventually passed to create_handler()
+
+ def set_handler(self, name, **kwds):
+ "add a handler configuration"
+ name = parse_handler_name(name)
+ #TODO: parse 'level' kwd?
+ resolve_class_aliases(kwds)
+ self._check_handler_config(kwds)
+ self.handlers[name] = kwds
+
+ def validate_handlers(self):
+ "make sure formatter & target names all exist"
+ for name, config in self.handlers.iteritems():
+ f = config.get("formatter")
+ if is_str(f) and self.get_formatter_config(f, None) is None:
+ raise ValueError, "%s: unknown formatter %r" % (name, f)
+ t = config.get("target")
+ if t and self.get_handler_config(t, None) is None:
+ raise ValueError, "%s: unknown target handler %r" % (name, t)
+
+ #=========================================================
+ #formatters
+ #=========================================================
+ formatters = None #map of formatter name -> formatter constructor kwds
+
+ def get_formatter_config(self, name, default=Undef):
+ "return dict w/ config for formatter, or raise KeyError"
+ if name in self.formatters:
+ return self.formatters[name]
+ if name in self.default_formatters:
+ config = self.parse_formatter_desc(self.default_formatters[name])
+ self._check_formatter_config(config)
+ return config
+ if default is Undef:
+ raise KeyError, "no formatter named %r" % name
+ else:
+ return default
+
+ def _check_formatter_config(self, kwds):
+ klass = kwds.get("klass")
+ if klass and not is_formatter(klass, False):
+ raise ValueError, "%s: class keyword not a formatter class: %r" % (name, klass)
+ #all other kwds passed to constructor ("format" passed as first positional arg if present)
+
+ def set_formatter(self, name, **kwds):
+ "add a formatter configuration"
+ name = parse_formatter_name(name)
+ #TODO: parse 'level' kwd?
+ resolve_class_aliases(kwds)
+ self._check_formatter_config(kwds)
+ self.formatters[name] = kwds
+
+ #=========================================================
+ #applying configuration to logging system
+ #=========================================================
+ def validate(self):
+ self.validate_loggers()
+ self.validate_handlers()
+
+ def apply(self):
+ "apply configuration to logging system"
+ self._handlers = {}
+ self._formatters = {}
+
+ #lock logging module while we're doing all this
+ logging._acquireLock()
+ try:
+ self.raw_apply()
+ finally:
+ logging._releaseLock()
+
+ def raw_apply(self):
+ #load option flags
+ root = logging.getLogger("")
+ dxl = self.get_option("disable_existing_loggers")
+ rl = self.get_option("reset_loggers") #reset all loggers to default state?
+ rh = self.get_option("reset_handlers") #purge all handlers?
+ if rl and not rh:
+ #NOTE: get_option() should prevent this from ever happening.
+ raise NotImplementedError, "reset_loggers w/o reset_handlers is not supported"
+
+ #TODO: reimplement most of this using util functions
+ # eg purge_all_handlers(), reset_all_loggers()
+
+ #grab list of existing loggers
+ if rh or rl or dxl:
+ existing = get_managed_loggers()
+
+ #do global purge of all handlers
+ if rh or rl:
+ for logger in existing:
+ if rh and logger.handlers:
+ purge_handlers(logger) #removed handlers from logger, calls close
+ if rl:
+ logger.setLevel(logging.NOTSET)
+ logger.propagate = 1
+ logger.disabled = 0
+
+ #update logger's levels, etc
+ for name, config in self.loggers.iteritems():
+ self.raw_apply_logger(name, **config)
+
+ #replicate fileConfig()'s disable_existing_loggers behavior.
+ #to replicate fileConfig w/ disable_existing_loggers, set reset_handlers=True
+ if dxl:
+ #since dxl implies rl + rh, we've already reset the level,prop,and disabled flags.
+ #so all we have to do is set disabled=1 for any loggers with no configured parents.
+ configured = self.loggers.keys()
+ for logger in existing:
+ name = logger.name
+ if name in configured:
+ continue
+ for test in configured:
+ if name.startswith(test + "."):
+ break
+ else:
+ logger.disabled = 1
+
+ #update capture options, but AFTER we've added our handlers
+ self.raw_apply_capture_options()
+
+ def raw_apply_logger(self, name, level=None, propagate=None, outputs=None, add=None):
+ "apply any configuration changes to logger object"
+ logger = logging.getLogger(name)
+ if level is not None:
+ logger.setLevel(level)
+ if propagate is not None:
+ logger.propagate = int(propagate)
+ if outputs is not None:
+ if not add:
+ purge_handlers(logger)
+ for name in outputs:
+ if is_handler(name, True):
+ handler = name
+ else:
+ handler = self.get_handler(name)
+ logger.addHandler(handler)
+ logger.disabled = 0
+
+ def raw_apply_capture_options(self):
+ "apply bps.logs.capture configuration changes"
+ kwds = self.options
+
+ value = kwds.get('capture_warnings')
+ if value is True:
+ fmt = kwds.get("warning_fmt")
+ target = kwds.get("warning_target")
+ capture.capture_warnings(fmt=fmt, target=target)
+ elif value is False:
+ capture.release_warnings()
+
+ #check capture_stderr
+ value = kwds.get("capture_stderr")
+ if value is True:
+ capture.capture_stderr()
+ elif value is False:
+ capture.release_stderr()
+
+ #check capture_stdout
+ value = kwds.get("capture_stdout")
+ if value is True:
+ capture.capture_stdout()
+ elif value is False:
+ capture.release_stdout()
+
+ #=========================================================
+ #handler creation
+ #=========================================================
+ _handlers = None #dict of handler name -> instance used by apply_handlers
+
+ def get_handler(self, name):
+ "get handler, creating if needed"
+ if name not in self._handlers:
+ config = self.get_handler_config(name)
+ self._handlers[name] = self.create_handler(**config)
+ return self._handlers[name]
+
+ def create_handler(self,
+ #constructor options
+ klass=None, args=None,
+ #configuration options
+ level=None,
+ formatter=None,
+ target=None,
+ startup_msg=None, delay_startup_msg=True,
+ ):
+ """create handler from options.
+
+ .. note::
+ This function relies on set_handler/get_handler_config
+ to take care of all normalization and type-checking
+ of it's inputs.
+
+ :param klass:
+ Handler class.
+
+ :param args:
+ Arguments to pass to handler constructor, in form of a Params object.
+
+ :param level:
+ Optional logging level for handler, interpreted via :func:`parse_level_name`
+
+ :param startup_msg:
+ Optional flag indicating handler should emit a "logging started" message
+ when it runs. if not a boolean (True/False), assumed to contain a custom startup msg.
+
+ :param delay_startup_msg:
+ Optional flag to control whether startup_msg should be delayed
+ until handler actually emits something, or be printed right away.
+ Defaults to True (delayed until first message is logged).
+
+ :param formatter:
+ Optional formatter instance or name of formatter.
+
+ :param target:
+ Optionally specifies the name of another handler
+ which should be retrieved and passed to this handler's setTarget() method.
+
+ :Returns:
+ a handler instance
+ """
+ #create handler
+ if args is None:
+ handler = klass()
+ else:
+ handler = klass(*args.args, **args.kwds)
+
+ #set level
+ if level is not None:
+ handler.setLevel(parse_level_name(level))
+
+ #set formatter
+ if formatter:
+ if isinstance(formatter, str):
+ formatter = self.get_formatter(formatter)
+ elif not is_formatter(formatter, True):
+ raise TypeError, "formatter param must be str, or Formatter: %r" % (formatter,)
+ handler.setFormatter(formatter)
+
+ #set startup msg
+ if startup_msg:
+ set_startup_msg(handler, startup_msg, delay=delay_startup_msg)
+
+ #set/register target
+ if target is not None:
+ if hasattr(handler, "setTarget"):
+ target = self.get_handler(target)
+ handler.setTarget(target)
+ else:
+ log.warning("ignoring target for handler: handler=%r target=%r", handler, target)
+
+ #done
+ return handler
+
+ #=========================================================
+ #formatter creation
+ #=========================================================
+ _formatters = None
+
+ def get_formatter(self, name):
+ "get formatter, creating if needed"
+ if name not in self._formatters:
+ config = self.get_formatter_config(name)
+ #TODO: could fallback to trying name as class path?
+ self._formatters[name] = self.create_formatter(**config)
+ return self._formatters[name]
+
+ def create_formatter(self, **kwds):
+ klass = kwds.pop("klass", logging.Formatter)
+ if 'format' in kwds:
+ format = kwds.pop("format")
+ return klass(format, **kwds)
+ else:
+ return klass(**kwds)
+
+ #=========================================================
+ #parse logging config from keywords
+ #=========================================================
+ def add_kwds(self, source):
+ """parse logging config from kwd arguments,
+ ala the direct config_logging() style.
+
+ This is mainly used as the base class for parsing
+ the various supporting logging config formats,
+ but it can also be used to parse programmatic input
+ in the form of keywords passed into the constructor.
+
+ In this second mode, the following keywords are recognized:
+
+ level
+ This specifies the master logging level used by the root logger.
+ This is a shortcut for setting the root logger via the levels keyword.
+
+ levels
+ This should be a dictionary mapping logger names to logging levels.
+
+ formatters
+ This should be a dictionary mapping formatter names to dicts of formatter options,
+ to be passed to compile_formatter(). The names may be referred to by the handlers.
+
+ handlers
+ This should be a dictionary mapping handlers names to dicts of handlers options,
+ to be passed to compile_handler(). The names may be referred to be the output section.
+
+ outputs
+ This should be a dictionary mapping loggers to dictionary
+ of handler options. One option is "handlers",
+ which should be a list of handler names or handler objects.
+ There is also the "propagate" boolean keyword,
+ and the "replace" boolean keyword.
+
+ default_handler
+ This is a shortcut, which lets you specifiy the kwds for a single handler,
+ which will be set up as the ONLY handler for the root logger.
+ ``default_handler=dict(XXX)`` is the same as
+ ``output="<root>=default only",handlers=dict(default=dict(XXX))``,
+ but is run before ``outputs`` and ``handlers`` are processed.
+
+ .. note::
+
+ If changes are made to an already-existing instance,
+ call ``self.reparse()`` to re-run internal syntactic validation
+ and parsing routines.
+ """
+ #
+ #parse options first
+ #
+ if 'options' in source:
+ opts = source['options']
+ else:
+ opts = source
+ for name in self.ALL_OPTIONS:
+ if name in opts:
+ self.set_option(name, opts[name])
+
+ #
+ #parse kwds that could be user-provided OR
+ #being passed back in from LoggingConfig object
+ #
+
+ #formatters
+ if 'formatters' in source:
+ for name, desc in source['formatters'].iteritems():
+ config = self.parse_formatter_desc(desc)
+ self.set_formatter(name, **config)
+
+ #handlers
+ if 'handlers' in source:
+ for name, desc in source['handlers'].iteritems():
+ config = self.parse_handler_desc(desc)
+ self.set_handler(name, **config)
+
+ #loggers
+ if 'loggers' in source:
+ for name, desc in source['loggers'].iteritems():
+ self.add_output_desc(name, desc)
+
+ #
+ #parse user-provided programmatic input
+ #
+
+ #check for master level
+ if 'level' in source:
+ self.set_level(ROOT, source['level'])
+
+ #check for level dict, parse values to levels
+ if 'levels' in source:
+ levels = source['levels']
+ if is_str(levels):
+ levels = parse_dict_string(levels, "\n,;", strip_comments=True)
+ for name, level in levels.iteritems():
+ self.set_level(name, level)
+
+ #check for default handler
+ if 'default_handler' in source:
+ self.set_default_handler(source['default_handler'])
+
+ #check for output dict
+ if 'outputs' in source:
+ for name, desc in source['outputs'].iteritems():
+ #desc should be a dict or list
+ if is_seq(desc):
+ desc = dict(outputs=desc)
+ self.add_output_desc(name, desc, outputs=True)
+
+ def add_output_desc(self, name, desc, outputs=False):
+ if not outputs:
+ if 'level' in desc:
+ self.set_level(name, desc['level'])
+ if 'propagate' in desc:
+ self.set_propagate(name, desc['propagate'])
+ if not outputs and 'outputs' in desc:
+ out = desc['outputs']
+ elif outputs and 'handlers' in desc:
+ out = desc['handlers']
+ else:
+ out = None
+ if out is not None:
+ if is_str(out):
+ out = splitcomma(out)
+ if desc.get('add'):
+ self.add_outputs(name, out)
+ else:
+ self.set_outputs(name, out)
+
+ def parse_handler_desc(self, config):
+ "parse programmatic-input format for handler config dict"
+ #NOTE: used internally by get_handler_config & also by KwdConfig
+ config = config.copy()
+ resolve_class_aliases(config)
+ klass = config['klass']
+ if not is_handler(klass, False):
+ klass = resolve_class_path(klass, self.handler_prefixes)
+ config['klass'] = klass
+ if 'kwds' in config:
+ warn("'kwds' option deprecated for handler desc, used args instead", stacklevel=self.stacklevel+2) #relative to add_kwds call
+ if 'args' in config:
+ raise ValueError, "args and kwds both specified"
+ config['args'] = config.pop("kwds")
+ if 'args' in config and is_str(config['args']):
+ config['args'] = Params.parse(config['args'], scope=self.scope)
+ return config
+
+ def parse_formatter_desc(self, config):
+ "parse programmatic-input format for formatter config dict"
+ #NOTE: used internally by get_formatter_config & also by KwdConfig
+ config = config.copy()
+ resolve_class_aliases(config)
+ if 'klass' in config:
+ klass = config.get('klass')
+ if not is_formatter(klass, False):
+ klass = resolve_class_path(klass, self.formatter_prefixes)
+ config['klass'] = klass
+ return config
+
+ def set_default_handler(self, handler):
+ "register the default handler"
+ #this function provides a useful shorthand
+ #for adding a handler to the root logger.
+
+ if is_str(handler) or is_handler(handler, True):
+ self.set_outputs(ROOT, [handler])
+ elif isinstance(handler, dict):
+ config = self.parse_handler_desc(handler)
+ name = self.DEFAULT_HANDLER_NAME
+ self.set_handler(name, **config)
+ self.set_outputs(ROOT, [name])
+ else:
+ #TODO: accept handler instances?
+ raise TypeError, "default_handler must be dict/str: %r" % (handler,)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#compact format (see bps documentation for spec)
+#=========================================================
+class CompactConfig(LoggingConfig):
+ """parse compact logging config format, returning LoggingConfig object.
+
+ :arg source:
+ String, stream, or filepath containing
+ an standard logging config ini file.
+
+ :returns:
+ Config in parsed form, as a LoggingConfig object.
+
+ .. note::
+ This merely creates a valid LoggingConfig object,
+ it doesn't actually make any changes to the logging system
+ (you must call the LoggingCofnig.apply() method for that).
+ """
+ #=========================================================
+ #class constants
+ #=========================================================
+ OPTIONS_SECTION = "logging:options"
+ LEVEL_SECTION = "logging:levels"
+ OLD_OUTPUT_SECTION = "logging:output" #deprecated section name, don't use
+ OUTPUT_SECTION = "logging:outputs"
+ HANDLER_SECTION = "logging:handler"
+ FORMAT_SECTION = "logging:formatter"
+
+ #HACK: have to use 'raw' mode for these keys,
+ #since ConfigParser has no way to escape '%' options
+ RAW_FORMATTER_KEYS = [ 'format', 'fmt', 'datefmt' ]
+ RAW_OPTIONS = ESCAPE_OPTIONS = [ 'warning_fmt', 'warning_target' ]
+
+ #=========================================================
+ #detect
+ #=========================================================
+ @classmethod
+ def detect_cp(cls, parser):
+ "detect format from config-parser object"
+ for name in (cls.OPTIONS_SECTION, cls.LEVEL_SECTION, cls.OUTPUT_SECTION,
+ cls.OLD_OUTPUT_SECTION):
+ if parser.has_section(name):
+ return True
+ return False
+
+ #=========================================================
+ #parse
+ #=========================================================
+ def __init__(self, source, **kwds):
+ "create new config from ini source"
+ self.__super.__init__(**kwds)
+ self.stacklevel += 1
+ self.init_parser(source)
+ self.parse_options()
+ self.parse_levels()
+ self.parse_outputs()
+ self.parse_formatters()
+ self.parse_handlers()
+
+ def init_parser(self, source):
+ #parse the file
+ cp = self.cp = ConfigParser.ConfigParser()
+ try:
+ read_into_parser(source, parser=cp)
+ except ValueError, err:
+ raise InputError(str(err))
+
+ #purge defaults (if this section is part of a larger ini file, eg pylons,
+ #the defaults will add spurious logger names...
+ #FIXME: really, we just want to purge defaults when getting options list,
+ # could leave defaults alone when reading values.
+ cp._defaults.clear()
+
+ def parse_options(self):
+ "parse logging:options section"
+ cp = self.cp
+
+ #parse options
+ if cp.has_section(self.OPTIONS_SECTION):
+ for key in cp.options(self.OPTIONS_SECTION):
+ if key not in self.ALL_OPTIONS:
+ warn("unknown logging:options key encountered: %r" %(key, ), stacklevel=self.stacklevel+1)
+ continue
+ raw = (key in self.RAW_OPTIONS)
+ value = cp.get(self.OPTIONS_SECTION, key, raw)
+ self.parse_option(key, value)
+
+ def parse_option(self, key, value):
+ if key in self.ESCAPE_OPTIONS:
+ value = unescape_string(value)
+ if key in self.BOOL_OPTIONS:
+ value = asbool(value)
+ self.set_option(key, value)
+
+ def parse_levels(self):
+ "parse logging:levels section"
+ cp = self.cp
+ """
+ spec defines format as:
+
+ [logging:levels]
+ lname = level
+ lname = level #comment
+
+ lname can be name of a logger, or <root>
+ level can be name of level or number (NOTSET included).
+ """
+ if cp.has_section(self.LEVEL_SECTION):
+ for lname in cp.options(self.LEVEL_SECTION):
+ value = cp.get(self.LEVEL_SECTION, lname)
+ self.set_level(lname, stripcomment(value))
+
+ def parse_outputs(self):
+ "parse logging:outputs section"
+ cp = self.cp
+
+ #parse *old* logger:output section
+ #TODO: support for this section name should be removed after 2009-08-01
+ if cp.has_section(self.OLD_OUTPUT_SECTION):
+ warn("'logging:output' is deprecated, use 'logging:outputs' instead", DeprecationWarning, stacklevel=self.stacklevel+1)
+ #each value is a comma-sep list of handler names, but that's accepted by set_logging_outputs()
+ for lname in cp.options(self.OLD_OUTPUT_SECTION):
+ value = cp.get(self.OLD_OUTPUT_SECTION, lname)
+ self.parse_output(lname, value)
+
+ #parse new logger:outputs section
+ """
+ spec defines format as:
+
+ [logging:outputs]
+ lname = handler_a, handler_b #comment
+ lname = #this would purge all handlers
+ lname = handler_a | propagate=True, add=True #keywords appended to end.
+ """
+ if cp.has_section(self.OUTPUT_SECTION):
+ #each value is a comma-sep list of handler names, but that's accepted by set_logging_outputs()
+ for lname in cp.options(self.OUTPUT_SECTION):
+ value = cp.get(self.OUTPUT_SECTION, lname)
+ self.parse_output(lname, value)
+
+ def parse_output(self, lname, value):
+ "parse logging:outputs line"
+ value = stripcomment(value)
+ kwds = parse_output_value(value)
+ if 'propagate' in kwds:
+ self.set_propagate(lname, asbool(kwds.pop('propagate')))
+ else:
+ self.set_propagate(lname, True)
+ if 'add' in kwds:
+ add = asbool(kwds.pop("add"))
+ else:
+ add = False
+ outputs = kwds.pop("outputs")
+ if kwds:
+ warn("ignoring unknown flags in logging:outputs section: %r = %r" % (lname, kwds))
+ if add:
+ self.add_outputs(lname, outputs)
+ else:
+ self.set_outputs(lname, outputs)
+
+ def parse_formatters(self):
+ "parse logging:formatter:xxx sections"
+ cp = self.cp
+
+ """
+ spec assumes the following...
+
+ [logging:formatter:a]
+ class = zzz #optional, defaults to 'logging.Formatter', resolved as class path
+ format = zzz #optional, raw - first positional arg to formatter if specified.
+ #all other keys taken as kwd arguments to pass constructor.
+ #if key ends with "format" or "fmt" it will be read raw.
+ #note that inline comments are NOT supported here.
+ """
+
+ prefix = self.FORMAT_SECTION + ":"
+ for section in cp.sections():
+ if section.startswith(prefix):
+ fname = section[len(prefix):]
+ self.parse_formatter(section, fname)
+
+ def parse_formatter(self, section, fname):
+ cp = self.cp
+ opts = cp.options(section)
+ kwds = {}
+ #XXX: what if user uses 'klass' instead?
+ if 'class' in opts:
+ kwds['klass'] = resolve_class_path(cp.get(section, 'class'), self.formatter_prefixes)
+ opts.remove('class')
+ for key in opts:
+ #XXX: could create logging:options flag for setting additional raw kwds
+ raw = (key in self.RAW_FORMATTER_KEYS)
+ kwds[key] = cp.get(section, key, raw)
+ self.set_formatter(fname, **kwds)
+
+ def parse_handlers(self):
+ "parse logging:handler:xxx sections"
+
+ """
+ spec assumes the following...
+
+ [logging:handler:a]
+ class = xxx #required, should be class path
+ args = zzz #optional, should eval to tuple or fcall under vars(logging) context
+
+ #all other kwds are sent to handler constructor.
+ #common ones...
+ formatter = yyy #optional
+ level = xxx #optional
+ target = yyy #optional, defaults to '' but only for MemoryHandler subclasses
+
+ #note that inline comments are NOT supported here.
+ """
+ cp = self.cp
+ prefix = self.HANDLER_SECTION + ":"
+ for section in cp.sections():
+ if section.startswith(prefix):
+ hname = section[len(prefix):]
+ self.parse_handler(section, hname)
+
+ def parse_handler(self, section, hname):
+ cp = self.cp
+ opts = cp.options(section)
+ kwds = {}
+ #XXX: what if user uses 'klass' instead?
+ klass = cp.get(section, "class")
+ kwds['klass'] = resolve_class_path(klass, self.handler_prefixes)
+ opts.remove("class")
+ if 'args' in opts:
+ args = cp.get(section, "args")
+ opts.remove("args")
+ kwds['args'] = Params.parse(args, scope=self.scope)
+ for key in opts:
+ kwds[key] = cp.get(section, key)
+ if 'startup_msg' in kwds:
+ try:
+ kwds['startup_msg'] = asbool(kwds['startup_msg'])
+ except ValueError:
+ pass
+ if 'delay_startup_msg' in kwds:
+ kwds['delay_startup_msg'] = asbool(kwds['delay_startup_msg'])
+ self.set_handler(hname, **kwds)
+
+#=========================================================
+#std format (from original logging module)
+#=========================================================
+class StandardConfig(LoggingConfig):
+ """parse standard logging config format, returning LoggingConfig object.
+
+ :arg source:
+ String, stream, or filepath containing
+ an standard logging config ini file.
+
+ :returns:
+ Config in parsed form, as a LoggingConfig object.
+
+ .. note::
+ This merely creates a valid LoggingConfig object,
+ it doesn't actually make any changes to the logging system
+ (you must call the LoggingCofnig.apply() method for that).
+
+ .. note::
+ This function attempts to replicate the semantics of the original parser
+ as closely as possible, any deviations are probably a bug.
+ """
+
+ @classmethod
+ def detect_cp(cls, parser):
+ """detect ini file contains standard logging config format"""
+ return parser.has_section("loggers") and parser.has_section("logger_root")
+
+ def __init__(self, source, **kwds):
+ "create new config from ini source"
+
+ self.__super.__init__(**kwds)
+
+ #parse the file
+ cp = ConfigParser.ConfigParser()
+ try:
+ read_into_parser(source, parser=cp)
+ except ValueError, err:
+ raise InputError(str(err))
+
+ #parse logger declarations
+ """
+ std format assumes the following...
+
+ [loggers] #required
+ keys = a,b,c #required
+ #all other opts ignored
+
+ [logger_root] #required
+ level = xxx #optional
+ handlers = a,b,c #optional
+ #handlers purged regardless of lists's presence
+ #all other opts ignored
+
+ [logger_a]
+ qualname = yyy #required
+ propagate = 1|0 #optional, defaults to 1
+ level = xxx #optional
+ handlers = a,b,c #optional
+ #handlers purged regardless of list's presence
+ #all other opts ignored
+ """
+ snames = splitcomma(cp.get("loggers", "keys"))
+ if 'root' not in snames:
+ snames.append("root")
+ for sname in snames:
+ section = "logger_" + sname
+ opts = cp.options(section)
+ if sname == "root":
+ lname = ROOT
+ else:
+ lname = cp.get(section, "qualname")
+ if 'level' in opts:
+ self.set_level(lname, cp.get(section, 'level'))
+ if sname != "root":
+ if 'propagate' in opts:
+ self.set_propagate(lname, asbool(cp.getint(section, 'propagate')))
+ else:
+ self.set_propagate(lname, True)
+ if 'handlers' in opts:
+ self.set_outputs(lname, splitcomma(cp.get(section, 'handlers')))
+ else:
+ self.clear_outputs(lname)
+
+ #parse formatters
+ """
+ std format assumes the following...
+
+ [formatters] #required
+ keys = a,b,c #required
+
+ [formatter_a] #required
+ format = xxx #optional, raw, defaults to None
+ datefmt = yyy #optional, raw, defaults to None
+ class = zzz #optional, defaults to 'logging.Formatter', resolved as class path
+ #all other opts ignored
+ """
+ snames = splitcomma(cp.get("formatters", "keys"))
+ for sname in snames:
+ section = "formatter_" + sname
+ opts = cp.options(section)
+ kwds = {}
+ if 'format' in opts:
+ kwds['format'] = cp.get(section, "format", 1)
+ if 'datefmt' in opts:
+ kwds['datefmt'] = cp.get(section, "datefmt", 1)
+ if 'class' in opts:
+ kwds['klass'] = resolve_class_path(cp.get(section, "class"))
+ self.set_formatter(sname, **kwds)
+
+ #parse handlers
+ """
+ std format assumes the following...
+
+ [handlers] #required
+ keys = a,b,c #required
+
+ [handler_a] #required
+ class = xxx #required, should eval to class undef vars(logging) context OR be class path
+ args = zzz #required, should eval to tuple under vars(logging) context
+ formatter = yyy #optional, defaults to ''
+ level = xxx #optional
+ target = yyy #optional, defaults to '' but only for MemoryHandler subclasses
+ """
+ snames = splitcomma(cp.get("handlers", "keys"))
+ for sname in snames:
+ section = "handler_" + sname
+ opts = cp.options(section)
+ kwds = {}
+ klass = cp.get(section, "class")
+ try:
+ kwds['klass'] = eval(klass, self.scope)
+ except (AttributeError, NameError):
+ kwds['klass'] = resolve_class(klass)
+ args = cp.get(section, "args")
+ kwds['args'] = eval(args, self.scope)
+ for k in ("formatter", 'level', 'target'):
+ if k in opts:
+ kwds[k] = cp.get(section, k)
+ self.set_handler(sname, **kwds)
+
+ #set option to replicate fileConfig()'s behavior.
+ self.set_option("disable_existing_loggers", True)
+
+#=========================================================
+#private helpers
+#=========================================================
+BAD_CHARS = "<>[]=#,|"
+
+def parse_logger_name(value):
+ """bps restricts logger names to not contain certain chars"""
+ if value == ROOT: #resolve the alias
+ return ""
+ if value:
+ for c in BAD_CHARS:
+ if c in value:
+ raise ValueError, "invalid logger name: %r" % (value,)
+ return value
+
+def parse_formatter_name(name):
+ """bps restricts formatter names to not contain certain chars"""
+ for c in BAD_CHARS:
+ if c in name:
+ raise ValueError, "invalid formatter name: %r" % (name,)
+ return name
+
+def parse_handler_name(name):
+ """bps restricts handler names to not contain certain chars"""
+ for c in BAD_CHARS:
+ if c in name:
+ raise ValueError, "invalid handler name: %r" % (name,)
+ return name
+
+def parse_output_value(value):
+ """parse line from compact format's logging:outputs section.
+
+ :arg value: string containig logger's output config
+
+ :returns:
+ dict containing:
+ outputs - list of handler names
+ and any other flags which were set
+ """
+ kwds = {}
+ idx = value.find("|")
+ if idx > -1:
+ flags = value[idx+1:]
+ #should be string of "a=b, c=d" etc
+ #NOTE: we use parse_param_string to coerce add/propagate to bool.
+ #could use other method.
+ ##flags = parse_dict_string(flags, ",")
+ flags = Params.parse(flags)
+ if flags.args:
+ raise ValueError, "positional arguments not allowed"
+ flags = flags.kwds
+
+ #should now be dict of dict(a="b",c="d")
+ kwds.update(flags)
+ #strip from end
+ value = value[:idx]
+ if value.lower().endswith(" only"):
+ #TODO: support for this flag should be removed after 2009-08-01
+ warn("'only' suffix is ignored", DeprecationWarning)
+ #this used to be an uglier way to signal propagate=False + add=False
+ #but as of 4.1, add=False by default,
+ #and most of the places "only" was used, propgate=False didn't matter
+ value = value[:-5]
+ kwds['outputs'] = splitcomma(value)
+ return kwds
+
+def resolve_class_aliases(kwds):
+ "resolve aliases for class keyword, allowing easier data-entry"
+ if 'class' in kwds:
+ warn("'klass' kwd is preferred over 'class' kwd: %r" % (kwds,))
+ kwds['klass'] = kwds.pop("class")
+ if 'class_' in kwds:
+ warn("'klass' kwd is preferred over 'class_' kwd: %r" % (kwds,))
+ kwds['klass'] = kwds.pop("class_")
+ return kwds
+
+#=========================================================
+#generic helpers
+#=========================================================
+
+def parse_dict_string(source, sep, strip_comments=False):
+ """parses and returns a string->string dictionary from various formats.
+
+ This is used by parse_config and friends to parse
+ level & output description blocks, among other things.
+
+ :arg source:
+ The input string, may be any of the following:
+
+ * a string containing a single ``"k=v"`` element
+ * a string of ``"k=v"`` elements separated by the separator.
+ * a string of ``"k=v #comment"`` or ``"k=v"`` elements separated by the separator
+ (this only applies if strip_comments set to True).
+
+ :arg seps:
+ A sequence of potential separator strings to try.
+ If more than one, all will be tried in turn.
+
+ :param strip_comments:
+ If True, inline comments starting with "#" will be stripped
+ from the end of each value.
+
+ :returns:
+ A dictionary containing all the k->v pairs in the source,
+ after all whitespace, blank elements, and comments have been stripped out.
+
+ :raises ValueError: if it encounters a non-empty element that's not "k=v #comment"
+ :raises TypeError: if list contains something besides strings and (k,v) pairs.
+
+ Usage Examples::
+ >>> from bps.logs.config import parse_dict_string
+ >>> parse_dict_string("a=1;b=2",";")
+ { a="1", b="2" }
+ >>> parse_dict_string("a=1 #blah\nd=2\n\n\n#blah","\n", strip_comments=True)
+ { a="1", d="2" }
+ """
+ assert is_str(source)
+
+ #split apart string into elements if needed
+ for s in sep:
+ if s in source:
+ parts = source.split(s)
+ break
+ else:
+ parts = [ source ]
+
+ #break apart k=v pairs in list
+ #assume it's a list of "k=v #comment" elements and (k,v) pairs
+ kwds = {}
+ for elem in parts:
+ if strip_comments:
+ idx = elem.find("#")
+ if idx > -1:
+ elem = elem[:idx]
+ idx = elem.find("=")
+ if idx > -1:
+ k = elem[:idx].strip()
+ v = elem[idx+1:].strip()
+ kwds[k] = v
+ elif elem.strip():
+ raise ValueError, "unexpected element in string %r: %r" % (source, elem)
+
+ return kwds
+
+def splitcomma(value):
+ "split a string on the commas, striping out whitespace and empty sections"
+ return [ a.strip() for a in value.split(",") if a.strip() ]
+
+def stripcomment(value):
+ "strip inline comment from string"
+ idx = value.find("#")
+ if idx > -1:
+ value = value[:idx]
+ return value.strip()
+
+def _try_import_module(name):
+ "helper for resolve_class_path"
+ try:
+ return __import__(name)
+ except ImportError, err:
+ if str(err) == "No module named " + name:
+ return None
+ if '.' in name:
+ if str(err) == "No module named " + name[name.rfind(".")+1:]:
+ return None
+ raise
+
+def _try_import_data(name):
+ "helper for resolve_class_path"
+ name = name.split('.')
+ used = name.pop(0)
+ found = _try_import_module(used)
+ if found is None:
+ return None
+ for n in name:
+ used = used + '.' + n
+ try:
+ found = getattr(found, n)
+ except AttributeError:
+ if _try_import_module(used) is None:
+ return None
+ found = getattr(found, n)
+ return found
+
+#XXX: would this be generally useful enough to add to bps.meta ?
+def resolve_class_path(name, prefixes=[]):
+ "resolve class path, trying various prefixes"
+ if '.' in name:
+ cls = _try_import_data(name)
+ if cls is not None:
+ return cls
+ for prefix in prefixes:
+ cls = _try_import_data(prefix + "." + name)
+ if cls is not None:
+ return cls
+ raise ImportError, "No object path named %s" % (name,)
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/logs/formatters.py b/bps/logs/formatters.py
new file mode 100644
index 0000000..5b09abf
--- /dev/null
+++ b/bps/logs/formatters.py
@@ -0,0 +1,432 @@
+"""This module provides additional logging Formatter classes."""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import atexit
+from cStringIO import StringIO
+import sys
+import os
+import logging
+from logging import getLogger, Formatter
+from time import time as get_time
+import threading
+#site
+#pkg
+from bps.cache import cached_method
+from bps.logs.loggers import RAW
+from bps.meta import is_class
+from bps.error.utils import format_exception
+#local
+log = getLogger(__name__)
+__all__ = [
+ #formatter helper funcs
+ 'is_formatter',
+
+ #custom formatters
+ "FancyFormatter",
+ "StdFormatter"
+ "DevFormatter",
+]
+
+#=========================================================
+#formatter factory, used by config_logging
+#=========================================================
+def is_formatter(value, instance=None):
+ "returns True if value is Formatter object or class"
+ #XXX: really formatTime etc aren't required, should use a negative test to distinguish it
+ if instance is not None and not is_class(value) ^ instance:
+ return False
+ return all(hasattr(value, attr) for attr in ("formatTime", "formatException", "format"))
+
+#=========================================================
+#standard formatter
+#=========================================================
+class FancyFormatter(Formatter):
+ """
+ Logging formatter with a couple of bells and whistles:
+ * sanely recovers from errors which occur during formatted
+ - eg, when a logger is passed an invalid format string
+ * can specially format multiline log messages
+ * can add ANSI color coding to output
+ * can adapt logging format based on error level
+ * more detailed exception traceback information, using `format_exception.formatException`
+
+ FancyFormatter does not take a "fmt" option.
+ Instead, it's display format is assembled via
+ a "header" format, the log message itself,
+ and a series of conditionally present "footer" formats.
+ This allows for colorization, and some other features.
+
+ FancyFormatter defines a few additional LogRecord attributes
+ which can be used:
+ %(shortlevelname)s
+ fixed 3-digit form of logging level name.
+ If a name is not defined in cls.short_level_names,
+ the default level name is used.
+
+ %(multiline_header)s
+ If a multiline log message is detected,
+ and multiline formatting is enabled,
+ This will contain the first line of the message
+ ONLY if it appears to be a header line (see `detect_header`),
+ else this will be an empty string.
+
+ %(multiline_content)s
+ If a multiline log message is detected,
+ and multiline formatting is enabled,
+ This will contain the all of the message
+ not included in %(multiline_header)s,
+ else this will be an empty string.
+
+ :Parameters:
+ header_fmt
+ [optional]
+ The format string of the header written before the message proper.
+ If None, the class default is used.
+
+ date_fmt
+ [optional]
+ This behaves the same as Formatter's ``datefmt``,
+ except for the fact that the default value can be specified
+ at the class level. If None or "iso", you get the ISO8601 format.
+
+ error_fmt
+ [optional]
+ This specified the format of the footer added after %(message)s
+ for all log levels of ERROR or higher. By default this is None,
+ but see HumanFormatter for a usage example.
+
+ multiline_fmt
+ [optional]
+ Formatting used for %(multiline_message)s,
+ The default will normally be the desired format,
+ but other formats may be used, or it may be set
+ to False, which disables multiline detection entirely.
+
+ colorize
+ [optional]
+ Enables the used of ANSI color codes (or html, if overridden),
+ which are used to format the various parts of the log message.
+ Setting colorize=True will enable them.
+
+ header_color
+ [optional]
+ If colorize=True, this string will be inserted before the header_fmt,
+ as well as before the error_fmt.
+
+ message_color
+ [optional]
+ If colorize=True, this string will be inserted after the header_fmt.
+
+ exception_color
+ [optional]
+ If colorize=True, this string will be inserted before printing the exc_info.
+
+ multiline_color
+ [optional]
+ If colorize=True, this string will be inserted before printing the multiline_content.
+
+ reset_color
+ [optional]
+ If colorize=True, this string will be inserted at the end of the log message.
+
+ TODO
+ * document this formatters options.
+ * make the per-level error message creation more flexible.
+ """
+ #=========================================================
+ #class/instance attrs
+ #=========================================================
+
+ #formats
+ header_fmt = "[%(asctime)s:%(shortlevelname)s:%(name)s] "
+ date_fmt = None
+ error_fmt = None
+ multiline_fmt = "\n%(multiline_content)s\n"
+
+ #color codes
+ colorize = False
+ header_color = "\x1B[0m"
+ header_color_dbg = "\x1B[1;34m"
+ header_color_inf = "\x1B[1;32m"
+ header_color_wrn = "\x1B[1;33m"
+ header_color_err = "\x1B[1;31m" #covers warning, error and critical
+ message_color = "\x1B[0m"
+ message_color_dbg = "\x1B[1;30m"
+ exception_color = "\x1B[1;31m"
+ multiline_color = "\x1B[32m"
+ reset_color = "\x1B[0m"
+
+ #DOS color code reference
+ # "\x1B[1;%dm" - %d values as follows
+ # 29 : bright white
+ # 30 : grey
+ # 31 : red
+ # 32 : green
+ # 33 : yellow
+ # 34 : blue
+ # 35 : pink
+ # 36 : aqua
+
+ # grey w/blue background - "\x1B[7m" - classic dos look; kindof stands out
+ # blue w/grey background - "\x1B[7m" - really stands out; hard to miss
+
+ #misc options
+ error_msg = "[message not logged: a %(name)s error ocurred during formatting]"
+ multiline_tab = ' ' #: char to replace tabs w/ in multiline content
+ multiline_pad = ' ' #: padding to add to lines of multiline content
+
+ #dictionary mapping std level names to short form
+ short_level_names = {
+ "DEBUG": "DBG",
+ "INFO": "INF",
+ "WARNING": "WRN",
+ "ERROR": "ERR",
+ "CRITICAL": "CRI",
+ }
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self,
+ header_fmt=None, date_fmt=None, error_fmt=None, multiline_fmt=None,
+ colorize=None,
+ header_color=None, message_color=None,
+ exception_color=None, multiline_color=None,
+ reset_color=None,
+ ):
+ #set options
+ if header_fmt is not None:
+ self.header_fmt = header_fmt
+ if date_fmt is not None:
+ self.date_fmt = date_fmt
+ if error_fmt is not None:
+ self.error_fmt = error_fmt
+ if multiline_fmt is not None:
+ self.multiline_fmt = multiline_fmt
+ if colorize is not None:
+ self.colorize = colorize
+ if header_color is not None:
+ self.header_color = header_color
+ if message_color is not None:
+ self.message_color = message_color
+ if exception_color is not None:
+ self.exception_color = exception_color
+ if multiline_color is not None:
+ self.multiline_color = multiline_color
+ if reset_color is not None:
+ self.reset_color = reset_color
+ #send date_fmt -> Formatter.datefmt
+ df= self.date_fmt
+ if df == "iso":
+ df = None
+ Formatter.__init__(self, fmt=None, datefmt=df)
+
+ #=========================================================
+ #format
+ #=========================================================
+ #NOTE: moved format out of attr and to func param, in case
+ # this formatter got called recursively (it happened!)
+ _err_nest = 0
+
+ def format(self, record):
+ "wrapper which catches any renderRecord() errors"
+
+ #trap any errors when formatting (typically, msg/args mismatch)
+ try:
+ fmt = self.prepareRecord(record)
+ return self.renderRecord(fmt, record)
+ except Exception, err:
+ #something in logging system failed, so log info to this module's log.
+ #FIXME: add threading lock around this?
+ #in case error is occurring recursively, just let things
+ #fail if we recurse too much (2 is arbitrary)
+ #NOTE: if we just let this go, recursion error would hide real error
+ if self._err_nest > 2:
+ raise
+ try:
+ self._err_nest += 1
+ self.logFormatError(err, record)
+ finally:
+ self._err_nest -=1
+
+ #return a generic message
+ try:
+ return self.error_msg % dict(name=type(err).__name__)
+ except:
+ return "[message not logged: an error ocurred during formatting]"
+
+ def prepareRecord(self, record):
+ "prepare record & calculated attributes"
+ #NOTE: this code was adapted from python 2.5.2 Formatter.format
+ record.message = record.getMessage()
+ self._prepareMultiline(record)
+ fmt = self.getFormat(record)
+ if "%(shortlevelname)" in fmt:
+ record.shortlevelname = self.short_level_names.get(record.levelname, record.levelname)
+ if "%(asctime)" in fmt:
+ record.asctime = self.formatTime(record, self.datefmt) #NOTE: this reads datefmt not date_fmt
+ if record.exc_info:
+ # Cache the traceback text to avoid converting it multiple times
+ # (it's constant anyway)
+ if not record.exc_text:
+ record.exc_text = self.formatException(record.exc_info)
+ return fmt
+
+ def _prepareMultiline(self, record):
+ "helper for prepareRecord, sets the multiline support attrs"
+ record.multiline_content = ''
+ if not self.multiline_fmt:
+ return
+ message = record.message
+ if '\n' not in message:
+ return
+ lines = message.replace("\t",self.multiline_tab).split("\n")
+ if detect_header(lines[0]):
+ record.multiline_header = lines.pop(0).rstrip()
+ else:
+ record.multiline_header = ''
+ record.multiline_content = '\n'.join(
+ self.multiline_pad + line.rstrip() for line in lines)
+
+ def renderRecord(self, fmt, record):
+ "takes care of actually rendering record, after all attrs are prepared"
+ #NOTE: this code was adapted from python 2.5.2 Formatter.format
+ s = fmt % record.__dict__
+ if record.exc_text:
+ if not s.endswith("\n"):
+ s += "\n"
+ if self.colorize:
+ s += self.exception_color
+ s += record.exc_text + "\n"
+ if self.colorize:
+ s += self.reset_color
+ return s
+
+ def logFormatError(self, err, record):
+ #build message text ourselves, to isolate any more internal failures
+ try:
+ text = (
+ "error formatting log record:\n"
+ " record: %r\n"
+ " msg: %r\n"
+ " args: %r\n"
+ " lineno: %r\n"
+ "filename: %r"
+ ) % (
+ record,
+ getattr(record, "msg", None),
+ getattr(record, "args", None),
+ getattr(record, "lineno", None),
+ getattr(record, "filename", None),
+ )
+ except Exception, err:
+ #we've already failed once, let's not try to log this error
+ text = "error formatting log record:\n<%s error formatting info about error formatting!>" % (type(err).__name__)
+
+ #now try to log it
+ log.critical(text, exc_info=True)
+
+ #=========================================================
+ #generate log format
+ #=========================================================
+## def kf_getFormat(record):
+## return (
+## bool(record.multiline_content),
+## bool(record.exc_info),
+## bool(record.levelno),
+## )
+##
+## @cached_method(key=kf_getFormat)
+ def getFormat(self, record):
+ "builds format string according to formatter options and record attrs"
+ fmt = StringIO()
+ write = fmt.write
+ colorize = self.colorize
+ multiline = bool(record.multiline_content and self.multiline_fmt)
+ if colorize:
+ if record.levelname == "DEBUG":
+ col = self.header_color_dbg
+ elif record.levelname == "INFO":
+ col = self.header_color_inf
+ elif record.levelname == "WARNING":
+ col = self.header_color_wrn
+ elif record.levelname in ("ERROR", "CRITICAL"):
+ col = self.header_color_err
+ else:
+ col = self.header_color
+ write(col)
+ write(self.header_fmt)
+ if colorize:
+ if record.levelname == "DEBUG":
+ col = self.message_color_dbg
+ #elif record.levelname == "INFO":
+ # col = self.header_color_inf
+ elif record.levelname == "WARNING":
+ col = self.header_color_wrn
+ elif record.levelname in ("ERROR", "CRITICAL"):
+ col = self.header_color_err
+ else:
+ col = self.message_color
+ write(col)
+ if multiline:
+ write("%(multiline_header)s")
+ else:
+ write("%(message)s")
+ if self.error_fmt and (record.exc_info or (record.levelno != RAW and record.levelno >= logging.ERROR)):
+ if colorize:
+ write(self.header_color)
+ write(self.error_fmt)
+ if multiline:
+ if colorize:
+ write(self.multiline_color)
+ write(self.multiline_fmt)
+ #NOTE: reset_color added by formatRecord method
+ return fmt.getvalue()
+
+ #=========================================================
+ #format exceptions
+ #=========================================================
+ def formatException(self, exc_info):
+ return format_exception(exc_info)
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+def detect_header(first):
+ "helper for detecting 'header' line in multiline content"
+ first = first.rstrip()
+ return first == '' or first.endswith(":") or first.endswith("...")
+
+#=========================================================
+#debugging
+#=========================================================
+class FileFormatter(FancyFormatter):
+ """helpful preset containin a easily parseable format"""
+ header_fmt = "[%(asctime)s %(shortlevelname)s %(name)s] "
+ error_fmt = " [%(module)s %(lineno)d %(funcName)s]"
+ date_fmt = "%Y-%m-%d %H:%M:%S"
+
+class StdFormatter(FancyFormatter):
+ """helpful preset containing a more human-readable format"""
+ header_fmt = "<%(asctime)s> %(shortlevelname)s %(name)s: "
+ error_fmt = " [Module %(module)r, line %(lineno)d, in %(funcName)s]"
+ date_fmt = "%Y:%m:%d %H:%M:%S"
+
+class DevFormatter(StdFormatter):
+ """helpful preset for when doing debugging on the cmd line, and logging via stderr.
+
+ .. note::
+ This formatter embeds ansi escape codes.
+ It is recommended to use it with :class:`bps.logs.handlers.ConsoleHandler`,
+ which processes them appropriately for the platform & terminal.
+ """
+ date_fmt = "%H:%M:%S"
+ colorize = True
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/logs/handlers.py b/bps/logs/handlers.py
new file mode 100644
index 0000000..d4bdf38
--- /dev/null
+++ b/bps/logs/handlers.py
@@ -0,0 +1,257 @@
+"""This module provides additional logging Handler classes."""
+#=========================================================
+#imports
+#=========================================================
+#core
+try:
+ import codecs
+except ImportError:
+ codecs = None
+import os.path
+import logging
+from logging import StreamHandler
+import os
+import time
+#site
+#pkg
+from bps.meta import is_class
+#local
+log = logging.getLogger(__name__)
+__all__ = [
+ #handle utilities
+ 'set_startup_msg',
+ 'is_handler',
+ 'purge_handlers',
+ 'has_default_handler',
+
+ #custom handlers
+ 'WatchedFileHandler',
+ 'NullHandler',
+]
+
+#=========================================================
+#handler helpers
+#=========================================================
+def set_startup_msg(handler, msg=True, delay=True):
+ """
+ Modifies a handler so it emits a message
+ indicating when the logging system started up,
+ but ONLY when it is first called by the logging system
+ to log something.
+ """
+ #prepare the message text
+ if msg is True:
+ msg = "--- LOGGING STARTED %(asctime)s ---"
+ msg %= dict(
+ asctime = time.strftime("%Y-%m-%d %H:%M:%S")
+ )
+ #create the record we're going to emit
+ startup_record = logging.LogRecord("", logging.INFO, "__main__", 0, msg, (), None)
+
+ if not delay:
+ handler.emit(startup_record)
+ return
+
+ #wrap the handler's emit func with a onetime wrapper
+ #that will remove itself after being called
+ orig_emit = handler.emit
+ def emit_wrapper(record):
+ #remove our hook
+ handler.emit = orig_emit
+ #emit startup record - note we bypass all filters, so this message always gets through
+ orig_emit(startup_record)
+ #emit real record
+ return orig_emit(record)
+ handler.emit = emit_wrapper
+
+def is_handler(value, instance=None):
+ "returns True if value is Handler object or class"
+ if instance is not None and not is_class(value) ^ instance:
+ return False
+ #TODO: make this more comprehensive,
+ #but for now these attrs should be good enough though
+ return all(hasattr(value, attr)
+ for attr in ("setLevel", "format", "handle", "setFormatter"))
+
+def purge_handlers(logger, close=True):
+ "remove all handlers attached to logger, returns list of handlers removed"
+ #FIXME: this assumes handlers aren't bound to 2+ loggers.
+ # probably safe to assume, but could bite us in the future.
+ if isinstance(logger, str):
+ logger = logging.getLogger(logger)
+ handlers = logger.handlers[:]
+ for h in handlers:
+ #remove handler from logger
+ if close:
+ h.acquire()
+ try:
+ logger.removeHandler(h)
+ h.flush()
+ h.close() #close resources, remove from logging tracker
+ finally:
+ h.release()
+ else:
+ logger.removeHandler(h)
+ return handlers #WARNING: if 'close' is True, all these handlers are inoperative (unless .shared is True)
+
+def has_default_handler():
+ "check if a handler has been attached to the root logger"
+ return len(logging.getLogger("").handlers) > 0
+
+#=========================================================
+#WatchedFileHandler
+#=========================================================
+#NOTE: as of py26, there is a core handler that provides this, and has the same name!
+#it would be useful to load that one instead, unless it lacks some feature of this one (doubtful)
+
+class WatchedFileHandler(StreamHandler):
+ """
+ This is a re-implemenation of the core FileHandler,
+ which has the added ability to detect if the file it's writing
+ to has been clobbered (eg, by logrotate), and reopen it before logging.
+
+ It would inherit from the original FileHandler,
+ but too many things needed rearranging
+
+ """
+ def __init__(self, filename, mode='a', encoding=None, shared=True):
+ """
+ Open the specified file and use it as the stream for logging.
+ """
+ if codecs is None:
+ encoding = None
+ self.encoding = encoding
+ self.mode = mode
+ self.shared = shared #if True, assume multiple writers to file, if False, assume this should be only one.
+ self.filename = os.path.abspath(filename)
+ self.stream = None #file handle
+ self.stat = None #stat() of last time we flushed
+ self.prepare() #go ahead and open the stream, so we can pass it to StreamHandler
+ StreamHandler.__init__(self, self.stream)
+
+ def check(self):
+ """
+ Check if stream needs to be (re)opened.
+ """
+ #make sure file exists and stream is open
+ if not self.stream or self.stream.closed:
+ return True
+ if not os.path.exists(self.filename):
+ return True
+ cur = os.stat(self.filename)
+ orig = self.stat
+
+ #check if file has been truncated
+ if self.shared and cur.st_size < orig.st_size:
+ return True
+ elif cur.st_size != orig.st_size:
+ return True
+
+ #check if ctime has changed
+ if cur.st_ctime != orig.st_ctime:
+ return True
+
+ #check if file has been replaced
+ if cur.st_dev != orig.st_dev:
+ return True
+ if cur.st_ino != orig.st_ino:
+ return True
+
+ #NOTE: if ctime was faked, and size was restored to EXACTLY what it was, we might be fooled.
+ return False
+
+ def prepare(self):
+ """
+ Opens the stream if closed,
+ and Reopens the stream if the file has been clobbered.
+ """
+ if self.check():
+ if self.stream:
+ self.stream.close()
+ self.stream = file(self.filename, self.mode)
+ #flush to make sure file is created, and update self.stat
+ self.flush()
+
+ def flush(self):
+ try:
+ StreamHandler.flush(self)
+ except ValueError, err:
+ #only ever seen under windows
+ if os.name == "nt" and str(err) == "I/O operation on closed file":
+ return
+ raise
+ #now that we've flushed, save the latest stat info to check next time
+ if self.stream and not self.stream.closed:
+ self.stat = os.fstat(self.stream.fileno())
+
+ def emit(self, record):
+ self.prepare() #reopen stream if needed
+ return StreamHandler.emit(self, record)
+
+ def close(self):
+ """
+ Closes the stream.
+ """
+ self.flush()
+ self.stream.close()
+ StreamHandler.close(self)
+
+#=========================================================
+#
+#=========================================================
+
+from bps.unstable.ansi import AnsiStripper
+if os.name == "nt":
+ from bps.unstable.winconsole import AnsiConsoleWriter
+ def _create_ansi_wrapper(stream):
+ if not ( hasattr(stream,"isatty") and stream.isatty() ):
+ return AnsiStripper(stream)
+ else:
+ return AnsiConsoleWriter(stream)
+else:
+ def _create_ansi_wrapper(stream):
+ #if not a tty, assume it has no capabilities
+ if not ( hasattr(stream,"isatty") and stream.isatty() ):
+ return AnsiStripper(stream)
+ #XXX: we could use curses if available to check terminfo
+ #otherwise assume it's ansi capable
+ return stream
+
+class ConsoleHandler(StreamHandler):
+ """
+ This stream handler should be used when writing to a console (such as stderr).
+ It can handle ANSI escape codes (such as emitted by DevFormatter),
+ and does one of three things:
+ * [nt] use the windows api to colorize the display appropriately
+ * [posix] pass ansi codes on through if target stream looks capable.
+ * otherwise ansi codes are stripped out
+ """
+ def __init__(self, *a, **k):
+ StreamHandler.__init__(self, *a, **k)
+ #NOTE: all the work is done by the platform-specific AnsiWriter class
+ self.stream = _create_ansi_wrapper(self.stream)
+
+#=========================================================
+#null handler for discarding records
+#=========================================================
+
+class NullHandler(logging.Handler):
+ shared = False #flag used to indicate handler should never be closed.
+
+ def __init__(self, shared=False):
+ self.shared = shared
+ logging.Handler.__init__(self)
+
+ def emit(self, record):
+ pass
+
+ def close(self):
+ if not self.shared:
+ logging.Handler.close(self)
+
+#singleton use by setup_lib_logging() among others
+null_handler = NullHandler(shared=True)
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/logs/loggers.py b/bps/logs/loggers.py
new file mode 100644
index 0000000..8e4f757
--- /dev/null
+++ b/bps/logs/loggers.py
@@ -0,0 +1,256 @@
+"""
+This module installs a custom logger class,
+which enhances the default logger class.
+features include:
+ * setLogLevel() supports strings as well as numeric levels
+ * logger object can be used in comparisions w/ string & int levels
+ to check if it would log a message. eg::
+ if logger > "warning":
+ logger.warn("foo")
+ * integration with bps.logs.capture to help keep stdout/stderr flushed correctly.
+This module is automatically imported by bps.logs,
+so there is no real reason to import it manually.
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import logging
+#site
+#pkg
+flush_buffers = None #NOTE: to prevent a cyclic import, this is filled in by the capture module
+#local
+__all__ = [
+ 'is_logger',
+ 'get_logger',
+## 'will_log_level',
+ 'parse_level_name',
+ 'get_level_name',
+]
+
+ROOT = "<root>" #special alias for root logger
+
+#=========================================================
+#register special log level that capture module uses
+# to display captured output
+#=========================================================
+RAW=45
+logging.addLevelName(RAW, "RAW")
+
+#=========================================================
+#helpers
+#=========================================================
+def is_logger(obj):
+ "test if object appears to be a logger"
+ #NOTE: not all attrs tested for, just enough to identify it
+ return all(
+ hasattr(obj, attr)
+ for attr in ("name", "getEffectiveLevel", "addHandler", "log", "warn")
+ )
+
+def get_logger(value=None):
+ """return logger object.
+
+ unlike logging.getLogger(),
+ this can be passed a logger object or a name,
+ for easily normalizing to a logger object.
+ """
+ if is_logger(value):
+ value = value.name
+ elif value == ROOT:
+ value = ""
+ return logging.getLogger(value)
+
+def get_managed_loggers(resolve=True):
+ """returns name of all loggers which have been referenced up to this point.
+
+ this is mainly useful for code which wants to inspect
+ the existing logging configuration, as any loggers
+ not returned have not been references, so can't have any special configuration.
+
+ :param resolve: if ``False``, returns logger names instead of Logger objects.
+ """
+ #NOTE: this is poking into the logging module's internals.
+ # any changes to that module will require updating here.
+ # this mainly exists to isolate such nosiness to single function.
+ logging._acquireLock()
+ try:
+ #ld maps logging name -> Logger or PlaceHolder,
+ # the latter of which lacks a ".handlers" attr
+ ld = logging.root.manager.loggerDict
+ return [
+ logger if resolve else name
+ for name, logger in ld.iteritems()
+ if hasattr(logger, "handlers")
+ ]
+ finally:
+ logging._releaseLock()
+
+##def purge_all_handlers():
+## """purge all handlers in system"""
+## for logger in get_managed_loggers():
+## if logger.handlers:
+## purge_handlers(logger.name) #removed handlers from logger, calls close
+## logger.propagate = 1
+## logger.disabled = 0
+##
+##def reset_all_loggers():
+## "reset all logger levels, handlers, and settings"
+## #FIXME: would be good to track libs that called setup_lib_logging(), and preserve those
+## for logger in get_managed_loggers():
+## if logger.handlers:
+## purge_handlers(logger)
+## logger.level = logging.NOTSET
+## logger.propagate = 1
+## logger.disabled = 0
+
+##def is_logger_managed(name):
+## """check if logger has been previously referenced"""
+## #NOTE: this is poking into the logging module's internals.
+## # any changes to that module will require updating here.
+## # this mainly exists to isolate such nosiness to single function.
+## logging._acquireLock()
+## try:
+## logger = logging.root.manager.loggingDict.get(name)
+## return logger and hasattr(logger, "handlers")
+## except:
+## logging._releaseLock()
+
+##def will_log_level(name, level):
+## "return ``True`` if specified logger would log info at a given level or above."
+## #FIXME: this won't work for proxy logger.
+## #should resolve it via inspect?
+## logger = get_logger(name)
+## level = parse_level_name(level)
+## return logger.getEffectiveLevel() <= level
+
+def parse_level_name(value):
+ """parse logging level string.
+
+ Given a string containing an int log level or a log level name,
+ returns the corresponding integer log level.
+
+ Integer passed in will be returned unchanged.
+ Unlike the logging package, this function is case-insensitive.
+
+ raises ValueError if value can't be parsed.
+
+ Usage Example::
+
+ >>> from bps import logs
+ >>> logs.parse_level_name("NOTSET")
+ 0
+ >>> logs.parse_level_name("INFO")
+ 20
+ >>> logs.parse_level_name("debug")
+ 10
+ >>> logs.parse_level_name("35")
+ 35
+ >>> logs.parse_level_name(20)
+ 20
+ >>> logs.parse_level_name("BADVALUE")
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ValueError: unknown logging level value: 'BADVALUE'
+ """
+ #check for int
+ try:
+ return int(value)
+ except (ValueError, TypeError):
+ pass
+ #check for registered level name
+ result = logging.getLevelName(value.upper())
+ if isinstance(result, int):
+ return result
+ #give up
+ raise ValueError, "unknown logging level value: %r" % (value,)
+
+def get_level_name(value):
+ """reverse of parse_level; returns registered name of logging level.
+
+ Usage Example::
+
+ >>> from bps import logs
+ >>> logs.get_level_name(0)
+ 'NOTSET'
+ >>> logs.get_level_name(10)
+ "DEBUG"
+ >>> logs.get_level_name("info")
+ "INFO"
+ >>> logs.get_level_name("BADVALUE")
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ValueError: unknown logging level value: 'BADVALUE'
+ """
+ value = parse_level_name(value)
+ name = logging.getLevelName(value)
+ if name.startswith('Level ') and name[6:].isdigit():
+ #just return number as string
+ return str(value)
+ return name
+
+#=========================================================
+#patch logger class
+#=========================================================
+_OldLogger = logging.getLoggerClass()
+
+class BpsLogger(_OldLogger):
+ "helper class bps creates from original logger class to flush captured streams"
+ def handle(self, record):
+ flush_buffers() #call bps.logs.capture's flush hook
+ return _OldLogger.handle(self, record)
+
+## def getLogger(self):
+## #NOTE: right now, this is merely a helper for the proxy loggers
+## #but in future, could add logger resolution ala absimport
+## return self
+
+## def setLevel(self, value):
+## v = _OldLogger.setLevel(self, parse_level(value))
+## _trigger_callbacks(self.name)
+## return v
+
+ def __cmp__(self, other):
+ if isinstance(other, int):
+ return cmp(self.getEffectiveLevel(), other)
+ elif isinstance(other, str):
+ return cmp(self.getEffectiveLevel(), parse_level_name(other))
+ else:
+ #do this so __eq__ works correctly
+ return cmp(id(self), id(other))
+
+ def __hash__(self):
+ return id(self)
+
+logging.setLoggerClass(BpsLogger)
+
+#=========================================================
+#
+#=========================================================
+#NOTE: experimental hook for notifying apps when logging level changes
+#
+##_callbacks = {}
+##
+##def _trigger_callbacks(name):
+## global _callbacks
+## chain = _callbacks.get(name)
+## if chain:
+## for func in chain:
+## func()
+##
+##def on_level_change(name, func):
+## "register a callback called when logger changes level"
+## #XXX: this is an attempted callback system, not finalized at all
+## if hasattr(name, "name"):
+## name = name.name
+## global _callbacks
+## chain = _callbacks.get(name)
+## if name in _callbacks:
+## chain = _callbacks[name]
+## else:
+## chain = _callbacks[name] = []
+## chain.append(func)
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/logs/proxy_logger.py b/bps/logs/proxy_logger.py
new file mode 100644
index 0000000..0ea3c4d
--- /dev/null
+++ b/bps/logs/proxy_logger.py
@@ -0,0 +1,84 @@
+"""proxy logger"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import inspect
+from logging import getLogger
+#site
+#lib
+#pkg
+#local
+__all__ = [
+ 'log', 'multilog', 'classlog',
+]
+#=========================================================
+#
+#=========================================================
+
+#TODO: needs unittests
+
+#TODO: should detect any environments (jython?) under which
+# our globals hack won't work, and make ProxyLogger alias
+# MultiProxyLogger instead for those cases.
+
+class ProxyLogger(object):
+ """
+ FIXME: explain what this does much more clearly, give examples.
+
+ This class is for the lazy programmer who doesn't want to create a new
+ logger class for every module, but still get the benefits of having logging
+ messages mapped to the module name.
+
+ The single instance of this class, ``log``, determines the ``__name__``
+ of the module which called the instance's method, and proxies the
+ logger returned by getLogger(__name__) on a per-call basis.
+
+ Additionally, when first called from a module,
+ it will replace itself in the module's globals with the actual logger,
+ to speed up future calls.
+
+ By my count this is 3 ugly hacks rammed together.
+ But the behavior makes logging kinda nice :)
+ """
+ def __getattribute__(self, attr):
+ globals = inspect.currentframe(1).f_globals
+ name = globals.get("__name__", "unnamed-module")
+ log = getLogger(name)
+ if globals.get("log") is self:
+ globals['log'] = log
+ return getattr(log, attr)
+
+class MultiProxyLogger(object):
+ """
+ This class is just like ProxyLogger,
+ except it doesn't magically replace itself in the global scope
+ when it's first invoked.
+
+ This is useful when importing a log object that's going to be
+ imported again from other contexts (eg: pylons base.py -> controllers)
+ """
+ def __getattribute__(self, attr):
+ globals = inspect.currentframe(1).f_globals
+ name = globals.get("__name__", "unnamed-module")
+ log = getLogger(name)
+ return getattr(log, attr)
+
+class ClassLogger(object):
+ """
+ This is a property object which proxies a logger
+ with the full module path and name of the class
+ it's invoked for.
+ """
+ def __get__(self, obj, cls):
+ return getLogger(cls.__module__ + "." + cls.__name__)
+
+#=========================================================
+#create the single instance anyone will ever need
+#=========================================================
+log = ProxyLogger()
+multilog = MultiProxyLogger()
+classlog = ClassLogger()
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/meta.py b/bps/meta.py
new file mode 100644
index 0000000..d2a1fbb
--- /dev/null
+++ b/bps/meta.py
@@ -0,0 +1,1190 @@
+"""bps.meta - introspection utilities"""
+#===================================================
+#imports
+#===================================================
+from __future__ import absolute_import
+#core
+import os.path
+import inspect
+from functools import update_wrapper, partial
+from logging import getLogger; log = getLogger(__name__)
+import time
+import sys
+from warnings import warn
+from weakref import WeakKeyDictionary
+import types #python module, NOT bps.types
+#needed imports
+#legacy imports
+from bps.undef import Undef
+from bps.error.types import AbstractMethodError
+
+__all__ = [
+ #interfaces
+ 'isseq', 'isnum', 'isstr',
+
+ #introspection & monkeypatching
+ 'is_overridden',
+ 'find_attribute',
+## 'get_module',
+## 'get_module_exports',
+ 'monkeypatch',
+ 'monkeypatch_mixin',
+ 'instrument_super',
+
+ #other decorators
+ 'abstract_method', 'abstract_property', 'AbstractMethodError',
+ 'decorate_per_instance',
+]
+
+#=========================================================
+#interface tests
+#=========================================================
+#XXX: should these groups be moved into bps.types?
+# which place would new users expect them to be located?
+
+#XXX: should we standardize on is_xxx or isxxx; or support both?
+# these funcs are so lightweight, isxxx is probably better
+
+NumericTypes = (int, float, long) #XXX: add decimal?
+SequenceTypes = (list, tuple, set) #heterogenous sequence types (ie, sequences excluding string)
+OrderedTypes = (list, tuple) #heterogenous ordered sequences types
+if sys.version >= (3, 0):
+ StringTypes = (str,)
+ ClassTypes = (type,)
+ _classobj = None
+else:
+ StringTypes = types.StringTypes
+ _classobj = types.ClassType
+ ClassTypes = (types.ClassType, types.TypeType)
+
+def is_seq(obj):
+ "tests if *obj* is a known heterogenous sequence type"
+ return isinstance(obj, SequenceTypes)
+isseq = is_seq
+
+def is_oseq(obj):
+ "tests if *obj* is a known ordered heterogenous sequence"
+ return isinstance(obj, OrderedTypes)
+isoseq = is_oseq
+
+def is_num(obj):
+ "tests if *obj* is a known numeric type"
+ return isinstance(obj, NumericTypes)
+isnum = is_num
+
+#add is_integral / is_real ?
+
+def is_str(obj):
+ "tests if *obj* is a known string type"
+ return isinstance(obj, StringTypes)
+isstr = is_str
+
+def is_class(obj):
+ "test if *obj* is old or new style class object"
+ return isinstance(obj, ClassTypes)
+isclass = is_class
+
+def is_pair(obj):
+ "check if object is an ordered pair"
+ return is_oseq(obj) and len(obj) == 2
+ispair = is_pair
+
+def is_iter(obj):
+ "check if object is an iterator/generator"
+ return hasattr(obj,"next") and hasattr(obj,"__iter__")
+
+def hasattrs(obj, include=None, exclude=None):
+ "helper for testing if object matches expected protocol by checking for whole sets of attrs"
+ return (not include or all(hasattr(obj, attr) for attr in include)) \
+ and not (exclude and any(hasattr(obj,attr) for attr in exclude))
+
+#=========================================================
+#class inspection
+#=========================================================
+def is_overridden(attr, owner, parent):
+ """check if a method has been shadowed.
+
+ :arg attr:
+ the method name to check
+ :arg owner:
+ the object (instance or class) to check
+ :param parent:
+ parent class to compare against.
+ :returns:
+ returns ``True`` if *attr*, as defined by *parent*,
+ is overridden by *owner* or any class between it and parent.
+ Otherwise, returns ``False``.
+ """
+ #TODO: this currently only deals with methods, could extend if needed
+ #TODO: could make parent default to immediate parent of owner
+ new = getattr(owner, attr)
+ old = getattr(parent, attr)
+ return getattr(new, "im_func", None) is not old.im_func
+
+def find_attribute(owner, target, first=True, required=False):
+ """search class hierarchy of *owner* to find which attribute *target*
+ is being stored under.
+
+ Given a *target* object, and an *owner* object to search (either the class or an instance),
+ try to determine what attribute of the class the property is stored under.
+ By default this will return the first attribute the target object is found at.
+ If it is not found, ``None`` will be returned.
+
+ This is useful for property constructors which need to introspect
+ and find what attribute they have been stored under at runtime (see example below).
+
+ :Parameters:
+ owner
+ The object which should be scanned for the property.
+ This may be an instance or a class.
+
+ target
+ The property (or other object such as a function) to search
+ through the attrs of the class hierarchy for.
+
+ first : bool
+ If set to ``False``, *all* attributes the match
+ will be returned as a list. Otherwise only
+ the first match will be returned.
+
+ required : bool
+ If true, a RuntimeError will be raised if the target cannot be found.
+
+ An example of how to use this in a property class::
+
+ >>> class LazyConstructor(object):
+ >>> def __init__(self, func):
+ >>> self.func = func
+ >>> self.name = None
+ >>> def __get__(self, obj, cls):
+ >>> if obj is None:
+ >>> return self
+ >>> if self.name is None: #cache result for later
+ >>> self.name = find_attribute(cls, self, required=True)
+ >>> assert self.name not in obj.__dict__
+ >>> value = obj.__dict__[self.name] = self.func()
+ >>> #we should never get called again for this object
+ >>> return value
+
+ """
+ #resolve instances to their class type if needed
+ if not isinstance(owner, ClassTypes):
+ owner = type(owner)
+ if hasattr(owner, "__mro__"):
+ mro = owner.__mro__[:-1] #remove 'object' cause we don't need to search it
+ elif hasattr(owner, "__bases__"):
+ assert isinstance(owner, types.ClassType)
+ mro = inspect.getmro(owner)
+ else:
+ raise RuntimeError, "unknown object type: %r" % (owner,)
+ #traverse class dicts using MRO, bypassing getattr & property code
+ if not first:
+ out = []
+ for cls in mro:
+ for k,v in cls.__dict__.iteritems():
+ if v is target:
+ if first:
+ return k
+ else:
+ out.append(k)
+ if first:
+ log.warning("find_attribute failed: owner=%r target=%r", owner, target)
+ if not required:
+ return None
+ else:
+ if out or not required:
+ return out
+ raise RuntimeError, "object %r does not appear in the class dictionary of %r" % (target, owner)
+
+def get_cls_kwds(cls):
+ """Return list of keyword arguments accepted by the specified class's constructor.
+
+ This performs it's job by recursively examining
+ the __init__ methods in the specified class and it's bases.
+
+ .. todo::
+ Could look for a class attribute such as "__kwds__" or something,
+ but should survey if any major projects have set up a de facto standard first.
+ """
+ kwds = set() #set of kwds we've seen
+ self_kwds = set() #set of kwds used by class as 'self' argument (usually just contains 'self')
+
+ #for each class in MRO, read arguments of it's init method
+ for c in cls.__mro__:
+ f = c.__dict__.get("__init__")
+ if not isinstance(f, types.FunctionType):
+ continue
+ #only if it's a function object do we check
+ #list of names, and **kwds slot.
+ names, _, varkwds, _ = inspect.getargspec(f)
+ if not names:
+ continue
+ self_name = names[0]
+ if self_name not in kwds:
+ #only store a function's "self" argument
+ #if that kwd wasn't being shadowed by a higher-level
+ #__init__ method... that way, when we remove all self-names
+ #below, we don't remove shadowed self-names
+ self_kwds.add(self_name)
+ kwds.update(names)
+ if not varkwds:
+ #if it doesn't support varkwds,
+ #assume we don't need to go higher up class hierarchy.
+ break
+ #remove all kwds being used for "self"
+ kwds.difference_update(self_kwds)
+ return list(kwds)
+
+##def get_func_kwds(func):
+## """Return list of legal kwd args for the given function"""
+## return inspect.getargspec(func)[0]
+
+def func_accepts_key(func, key):
+ """test if function accepts a given kwd parameter.
+
+ :arg func:
+ function or class object.
+ (if a class, the __init__ method is examined).
+ :arg key:
+ the key (or list of keys) which must be accepted.
+
+ :returns:
+ ``True`` if the keyword is accepted, else ``False``.
+ """
+ #check this is a class
+ method = False
+ if isclass(func):
+ #TODO: make use get_cls_kwds, but need to make sure nothing
+ #relies on this function's **kwds behavior.
+## return key in get_cls_kwds(func)
+ func = func.__init__.im_func
+ method = True
+ elif isinstance(func, partial):
+ while isinstance(func, partial):
+ func = func.func
+ args, varargs, varkw, defaults = inspect.getargspec(func)
+ if bool(varkw):
+ #XXX: is there some de facto protocol to check for this?
+ # for now, just assume it accepts everything
+ return True
+ if method:
+ args = args[1:]
+ if is_seq(key):
+ return all(k in args for k in key)
+ else:
+ return key in args
+
+#=========================================================
+#module inspection
+#=========================================================
+def get_module(name):
+ "return module by absolute name"
+ return __import__(name, None, None, ['dummy'], 0)
+
+def get_module_exports(module):
+ """return list of attrs exported from module by default.
+
+ This is the same as ``list(module.__all__)`` IF the module defined that variable.
+ Otherwise, this returns a list approximating the default python behavior.
+ """
+ if is_str(module):
+ module = get_module(module)
+ try:
+ return list(module.__all__)
+ except AttributeError:
+ return [n for n in dir(module) if not n.startswith("_")]
+
+_script_exts = set([".py", ".pyc", ".pyo"])
+if os.name == "posix":
+ _cmod_exts = set([".so"])
+elif os.name == "nt":
+ _cmod_exts = set([".pyd"])
+ _script_exts.add(".pyw")
+else:
+ #TODO: what do other os's use?
+ _cmod_exts = set()
+
+def lookup_module(path, name=False):
+ """find loaded module given path it came from.
+
+ given a path to a .py file or a package directory,
+ this attempts to find the loaded module that was derived
+ from the file. this attempts to be the reverse of ``module.__path``,
+ using inspection of ``sys.modules``. it's not perfect,
+ but there's no analog in the inspect module.
+
+ :arg path:
+ path to python file or package directory.
+
+ :param name:
+ optionally this function can return the full name
+ of the module, rather than the module itself.
+
+ :returns:
+ name or module instance (see name flag) if
+ a loaded module was found which matches path.
+
+ if no corresponding module has been loaded yet,
+ or the path does not correspond to a python module,
+ returns ``None``.
+ """
+ global _script_exts, _cmod_exts
+ #FIXME: gotta be a better way to figure this info out,
+ #inspect doesn't seem to have the right func for the job.
+
+ if os.path.isfile(path):
+ #figure out what type of file this is
+ root, ext = os.path.splitext(path)
+ root = os.path.abspath(root)
+ if ext in _script_exts:
+ #if it's a script, search for all known script extensions
+ targets = set(root + ext for ext in _script_exts)
+ elif ext in _cmod_exts:
+ #if it's a compiled module, search for all known compiled module extensions
+ targets = set(root + ext for ext in _cmod_exts)
+ else:
+ #no idea what to do
+ log.warning("lookup_module(): path has unknown extension: %r", path)
+ return None
+ test = targets.__contains__
+ elif os.path.isdir(path):
+ #assume it's a package dir, and set root to be the init file inside the directory
+ root = os.path.abspath(os.path.join(path, "__init__"))
+ targets = set(root + ext for ext in _script_exts)
+ test = targets.__contains__
+ elif not os.path.exists(path):
+ log.warning("lookup_module(): path doesn't exist: %r", path)
+ return None
+ else:
+ log.warning("lookup_module(): unsupported file type: %r", path)
+ return None
+
+ #try and find target in loaded modules
+ #NOTE: would like to use iteritems(), but dict changes size on us :(
+ #FIXME: there may be multiple matches (eg: posixpath and os.path),
+ # and this just returns the first one it finds. we could do better,
+ # say returning all of them if requested, or preferring one
+ # whose name matches the pathname (ie, original over aliases)
+ for mod_name, module in sys.modules.items():
+ mod_file = getattr(module, "__file__", None)
+ #XXX: do we need to run mod_path through abspath ?
+ if mod_file and test(mod_file):
+ log.debug("lookup_module(): resolved path to module: %r => %r", path, mod_name)
+ if name:
+ return mod_name
+ else:
+ return module
+
+ #give up
+ log.warning("lookup_module(): can't resolve path to loaded module: %r", path)
+ return None
+
+#=========================================================
+#class manipulation
+#=========================================================
+##def get_private_attr(obj, attr, default=Undef, name=Undef):
+## if name is Undef:
+## if isinstance(obj,type):
+## name = obj.__name__
+## else:
+## name = obj.__class__.__name__
+## if name[0] == "_":
+## attr = "%s__%s" % (name,attr)
+## else:
+## astr = "_%s__%s" % (name,attr)
+## if default is Undef:
+## return getattr(obj,astr)
+## else:
+## return getattr(obj,astr,default)
+##
+##def set_private_attr(obj, attr, value, name=Undef):
+## if name is Undef:
+## if isinstance(obj,type):
+## name = obj.__name__
+## else:
+## name = obj.__class__.__name__
+## if name[0] == "_":
+## attr = "%s__%s" % (name,attr)
+## else:
+## astr = "_%s__%s" % (name,attr)
+## return getattr(obj,astr,value)
+
+#=========================================================
+#monkeypatching
+#=========================================================
+def monkeypatch(target, attr=None, wrap=False, clobber=True):
+ """Decorator to aid in monkeypatching.
+
+ The decorated function will be patched into `target`
+ under the attribute same name as the wrapped function.
+ The attribute can be overriden via the `attr` kwd.
+
+ This was posted by GVR somewhere on the internet.
+ *It's not just evil, it's easy-to-use evil!*
+ Pretend this isn't here unless you really need it.
+
+ :Parameters:
+ target
+ the target object which we're replacing an attribute of
+ attr
+ [optional]
+ attribute to be replaced. if not specified,
+ taken from the name of the function this decorates.
+ wrap
+ if true, original value will be passed in as first positional argument,
+ if false (the default), it will be discarded.
+ clobber
+ By default, this function will overwrite any existing
+ value stored in the target attribute. If this is set
+ to ``False``, an error will be raised if the attribute
+ contains data.
+
+ Usage::
+
+ >>> from bps.meta import monkeypatch
+ >>> #say we have a class...
+ >>> class MyClass(object):
+ >>> def a(self, x=10):
+ >>> return x+1
+ >>> m = MyClass()
+ >>> m.a()
+ 11
+ >>> #and later we want to patch method 'a'
+ >>> @monkeypatch(MyClass)
+ >>> def a(self, x=10):
+ return x*2
+ >>> m.a()
+ 20
+ >>> #say we want to patch it (again) while calling previous copy
+ >>> @monkeypatch(MyClass, wrap=True)
+ >>> def a(orig, self, x=10):
+ >>> return orig(self,x)+5
+ >>> m.a()
+ 25
+ """
+ def builder(func):
+ name = attr or func.__name__
+ if not clobber and getattr(target, name, None):
+ raise AttributeError, "monkeypatch target already exists: target=%r attr=%r" % (target, name)
+ if wrap:
+ orig = getattr(target, name)
+ if isinstance(target, type):
+ #can't use partial since it's going in a class
+ def wrapper(*a, **k):
+ return func(orig, *a, **k)
+ update_wrapper(wrapper, orig)
+ setattr(target, name, wrapper)
+ return
+ func = partial(func, orig)
+ setattr(target, name, func)
+ return builder
+
+def monkeypatch_mixin(target, first=False):
+ """Modify a class by appending another class to it's list of bases.
+ This is mainly useful for monkeypatching a mixin class.
+
+ :arg target: class to be patched
+ :param first: if mixin should be placed at beginning of bases, not end
+
+ Usage::
+
+ >>> from bps.meta import monkeypatch_mixin
+ >>> #say you have a class...
+ >>> class MyClass(object):
+ >>> pass
+ >>> MyClass.__bases__
+ (object,)
+ >>> #and somewhere else, you want to patch one in
+ >>> class OtherClass(object):
+ >>> pass
+ >>> monkeypatch_mixin(MyClass)(OtherClass)
+ >>> MyClass.__bases__
+ (object,OtherClass)
+
+ .. note::
+ If target is subclass of mixin,
+ this function will silently do nothing.
+ """
+ def builder(mixin):
+ #check if it's already merged in
+ if issubclass(target, mixin):
+ return mixin
+
+ #check if we can't due to circular ref
+ if issubclass(mixin, target):
+ raise TypeError, "mixin %r cannot derive from target %r" % (mixin, target)
+
+ #TODO: figure out greatest common ancestor,
+ # and (if it's in target.__bases__, just replace it w/ mixin)
+ if first:
+ target.__bases__ = (mixin,) + target.__bases__
+ else:
+ target.__bases__ += (mixin,)
+
+ return mixin
+
+ return builder
+
+#=========================================================
+#source code inspection
+#=========================================================
+_cache = {} #FIXME: make weakkeyref?
+def get_class_range(cls):
+ """given a class, returns a tuple of ``(path,first,last)``, defined as follows:
+
+ path
+ filepath that class was defined in (may not exist, just used for identification)
+
+ first
+ smallest firstlineno of any method defined in the class
+
+ last
+ largest firstlineno of any method defined in the class
+
+ .. note::
+ This function is mainly used by :func:`init_super_property`.
+ It may have some strange behaviors related to that use-case,
+ which may need to be cleared up when other use-cases are found.
+ """
+ global _cache
+ if cls in _cache:
+ return _cache[cls]
+
+ #first, figure out path of class
+ path = getattr(sys.modules.get(cls.__module__), "__file__", None)
+ if path is None:
+ #builtin, return fake info
+ log.debug("get_class_range(%r): <builtin>", cls)
+ return None, None, None
+ npath = _get_base_path(path)
+
+ #find largest & smallest firstlineno of all of class's methods
+ #which are defined in class path
+ first = None
+ last = None
+ for k, v in cls.__dict__.iteritems():
+ if hasattr(v, "__get__") and not isinstance(v, (MultipleSuperProperty, SingleSuperProperty)):
+ v = v.__get__(None, cls)
+ if hasattr(v, "im_func"):
+ c = v.im_func.func_code
+ elif hasattr(v, "func_code"):
+ c = v.func_code
+ else:
+ continue
+ if _get_base_path(c.co_filename) != npath:
+ continue
+ if first is None or c.co_firstlineno < first:
+ first = c.co_firstlineno
+ if last is None or c.co_firstlineno > last:
+ last = c.co_firstlineno
+ log.debug("get_class_range(%r): path=%r start=%r end=%r", cls, path, first, last)
+ return path, first, last
+
+def _get_base_path(path):
+ """helper func used to normalize filepaths
+ returned by getfile(cls) and func.co_filename paths"
+ """
+ return os.path.splitext(os.path.abspath(path))[0]
+
+#=========================================================
+#super descriptor
+#=========================================================
+class SingleSuperProperty(object):
+ """helper for init_super_property() which provides
+ a __super attribute for a single class."""
+ __thisclass__ = None #class super() should resolve relative to
+
+ def __init__(self, cls):
+ self.__thisclass__ = cls
+
+ def __get__(self, obj, cls):
+ if obj is None: obj=cls
+ return super(self.__thisclass__, obj)
+
+ def __delete__(self, obj):
+ raise ValueError, "__super attributes are read-only"
+
+ def __set__(self, obj, value):
+ raise ValueError, "__super attributes are read-only"
+
+class MultipleSuperProperty(object):
+ """helper for init_super_property() which provides
+ a __super attribute which uses different __thisclass__ values
+ depending on which class accesses the attribute.
+
+ it takes as input the class name it's managing __$NAME__super for,
+ and (via stack inspection), picks the class with that name which seems
+ to contain the calling code. the algorithm works reliably for most cases,
+ including multiple classes in the same file, sharing the same name.
+
+ however, situtations such as explicitly accessing the private namespace
+ from code which lies outside the class are not handled, as there seems
+ to be no "best" behavior in such a situation. this case is pretty rare,
+ however.
+
+ another drawback is this is a rather complicated and expensive algorithm.
+ luckily, init_super_property() only uses this property
+ when multiple classes are sharing the same private namespace.
+ """
+ name = None #: name of classes whose shared namespace this manages __super for
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, cls):
+ #get list of all parent classes with the desired name
+ name = self.name
+ choices = [
+ c for c in cls.__mro__
+ if c.__name__ == name
+ ]
+ if len(choices) == 0:
+ raise RuntimeError, "no classes named %r found in mro of %r" % (name, cls)
+ elif len(choices) == 1:
+ thisclass = choices[0]
+ else:
+ #remove candidates with different module path
+ frame = inspect.currentframe(1)
+ path = frame.f_globals.get("__file__")
+ choices = [
+ c for c in choices if
+ getattr(sys.modules.get(c.__module__), "__file__", None) == path
+ ]
+ if len(choices) == 0:
+ raise RuntimeError, "no classes named %r from file %r found in mro of %r" % (name, path, cls)
+ elif len(choices) == 1:
+ thisclass = choices[0]
+ else:
+ #now the unreliable part:
+ #try and guess which class defined the frame's code,
+ #based on the line numbers used by the candidates's methods.
+ target = frame.f_code.co_firstlineno
+ def match_class(cls):
+ _path, first, last = get_class_range(cls)
+ assert _path == path, "class unexpected changed path"
+ if first is None:
+ return True #XXX: discard cls if we have no info?
+ return first <= target and last >= target
+ choices = [
+ c for c in choices
+ if match_class(c)
+ ]
+ if len(choices) == 0:
+ raise RuntimeError, "no classes named %r from file %r including line %r found in mro of %r" % (name, path, target, cls)
+ elif len(choices) == 1:
+ thisclass = choices[0]
+ else:
+ #FIXME: what do we do now?
+ #multiple classes in same file, seemingly both including target lineno.
+ #kinda weird.
+ log.warning("multiple matches for thisclass: name=%r path=%r line=%r choices=%r", name, path, target, choices)
+ #only case i can think of where this could occur is two nested classes w/ same name,
+ #so for now, we pick the one w/ largest starting lineno
+ #FIXME: what about case where one of choices had first=None?
+ thisclass = choices[0]
+ first = get_class_range(thisclass)[1]
+ for choice in choices[1:]:
+ v = get_class_range(choice)[1]
+ if v > first:
+ thisclass = choice
+ first = v
+
+ #ok, thisclass has been chosen, so generate super()
+ if obj is None:
+ obj = cls
+ return super(thisclass, obj)
+
+ def __delete__(self, obj, cls):
+ raise ValueError, "__super attributes are read-only"
+
+ def __set__(self, obj, value):
+ raise ValueError, "__super attributes are read-only"
+
+def instrument_super(cls, optimize=True):
+ """Sets up a ``__super`` descriptor in the private namespace of the specified class.
+
+ This function should be able to instrument any class which inherits from :class:`object`.
+
+ :param optimize:
+ Setting this to ``False`` disables the fast __super implementation,
+ when normally, the choice will be autodetected.
+ This is mainly a helper for when autodetection fails.
+
+ Usage Example::
+
+ >>> class MyClass(object):
+ >>> def __init__(self, **kwds):
+ >>> self.__super.__init__(**kwds)
+ >>> init_super_property(MyClass) #calling this makes self.__super work, above.
+
+ .. note::
+ This is not needed if you are inheriting from :class:`bps.types.BaseClass`,
+ as that class takes care of calling this function for all subclasses.
+
+ .. note::
+ Since this method of automatic ``super()`` support relies on the class's
+ private namespace being unique, two classes in the mro with the same
+ name will have to share a single attribute. The code behind this function
+ attempts to compensate for this case, but occasionally may get confused.
+
+ .. warning::
+ This function assumes it will be called on a parent
+ class before it's child classes, or never called for the parent classes.
+ The remaining case (where it's called to instrument a parent class
+ AFTER it's been called on a child class) messes up the autodetection algorithm.
+
+ """
+ attr = "_%s__super" % cls.__name__.lstrip("_")
+ #check to see if another class using the same namespace already has __super defined...
+ #if so, we have to use the less desirable _MultipleSuperProperty()
+ #XXX: this check fails to detect if parent __super is initialized AFTER child __super
+ if not optimize or (hasattr(cls, attr) and attr not in cls.__dict__):
+ value = MultipleSuperProperty(cls.__name__)
+ else:
+ value = SingleSuperProperty(cls)
+ #set new __super property
+ setattr(cls, attr, value)
+
+#=========================================================
+#other decorators
+#=========================================================
+def abstract_method(func):
+ """Method decorator which indicates this is a placeholder method which
+ should be overridden by subclass.
+
+ This is mainly useful when defining framework classes that must be
+ subclassed before they will be useful.
+
+ If called directly, this method will raise an :exc:`AbstractMethodError`
+ (which is a subclass of :exc:`NotImplementedError`).
+ """
+ msg = "object %(self)r method %(name)r is abstract, and cannot be called"
+ def wrapper(self, *args, **kwds):
+ text = msg % dict(self=self, name=wrapper.__name__)
+ raise AbstractMethodError(text)
+ update_wrapper(wrapper, func)
+ return wrapper
+
+abstractmethod = abstract_method #for compat with python syntax
+
+class class_property(object):
+ """Decorator which acts like a combination of classmethod+property (limited to read-only)"""
+
+ def __init__(self, func):
+ self.im_func = func
+
+ def __get__(self, obj, cls):
+ return self.im_func(cls)
+
+class fallback_property(object):
+ """Decorator which acts like a combination of classmethod+fallback_method (limited to read-only)"""
+ def __init__(self, func):
+ self.im_func = func
+
+ def __get__(self, obj, cls):
+ return self.im_func(obj, cls)
+
+class fallback_method(object):
+ """Decorator which lets method act like a class OR instance method.
+
+ function will be called with prototype ``func(obj,cls,*args,**kwds)``,
+ where ``obj`` is ``None`` if invoked from a class.
+ """
+ def __init__(self, func):
+ self.im_func = func
+ self.__name__ = func.__name__
+ self.__doc__ = func.__doc__
+ self._cache = WeakKeyDictionary()
+
+ def _bind_func(self, obj, cls):
+ func = self.im_func
+ def method(*a, **k):
+ return func(obj, cls, *a, **k)
+ update_wrapper(method, func)
+ ##if obj is None:
+ ## method.__repr__ = lambda : "<fallback_method %s.%s; bound to class>" % (cls.__name__, func.__name__)
+ ##else:
+ ## method.__repr__ = lambda : "<fallback_method %s.%s; bound to instance %r>" % (cls.__name__, func.__name__, obj)
+ method.__name__ = "<fallback_method_wrapper>" #NOTE: would like to implement repr() above, but can't.
+
+ method.im_func = func
+ method.im_self = obj
+ method.im_class = cls
+ return method
+
+ def __get__(self, obj, cls):
+ if obj is not None:
+ #create method and store in object, so next access calls it directly (shadowing this descriptor)
+ method = self._bind_func(obj, cls)
+ setattr(obj, self.__name__, method)
+ return method
+
+ #old style classes can't do weakrefs, so can't cache our result
+ elif _classobj and isinstance(cls, _classobj):
+ return self._bind_func(None, cls)
+
+ #can't shadow ourselves in subclass, so using internal cache for methods
+ else:
+ cache = self._cache
+ method = cache.get(cls)
+ if method is None:
+ method = cache[cls] = self._bind_func(None, cls)
+ return method
+
+def abstract_property(name, doc=None):
+ """Property constructor which indicates this is a placeholder attribute which
+ should be overridden by subclass.
+
+ This is mainly useful when defining framework classes that must be
+ subclassed before they will be useful.
+
+ If read or written from an instance, this method will raise an :exc:`AbstractMethodError`
+ (which is a subclass of :exc:`NotImplementedError`).
+ """
+ if not name:
+ raise ValueError, "property name must be specified"
+ if not doc:
+ doc = "abstract property %r" % (name,)
+ msg = "object %(self)r attribute %(name)r is abstract, and must be implemented by a subclass"
+ def helper(self, *args, **kwds):
+ text = msg % dict(self=self, name=name)
+ raise AbstractMethodError(text)
+ return property(helper, helper, helper, doc)
+
+def decorate_per_instance(decorator, attr=True, bind="function"):
+ """creates a decorator to be applied per-instance to the instance method.
+
+ .. note::
+ What this function does may be considered "slightly deep magic",
+ at least by those who don't normally traffic in such things.
+ It performs some python trickery which (if it's not the trickery you need)
+ can safely be ignored.
+
+ This is a decorator-decorator: that is, instead of wrapping an
+ already decorated function, it's designed to wrap the decorator itself.
+
+ What is returned is in fact a descriptor object,
+ which lazily uses the provided decorator to wrap
+ the function on a per-instance basis, rather than
+ wrap it a single time for underlying function,
+ which is what a unmodified decorator does.
+
+ This allows decorators such as :func:`cached_method` have
+ separate scopes for each instance, instead of one shared
+ scope for the entire function (such as :func:`cached_function`).
+
+ :param decorator:
+ This should be a decorator function. It will be called
+ for every instance of the class whose method we are decorating.
+
+ :param attr:
+ If attr is ``True`` (the default), the name of
+ the decorated function will be assumed to be the final name
+ of the attribute. If attr is ``None``, an attempt
+ will be made to determine the attr at runtime.
+ Otherwise, attr should be a string which will override
+ the assumed attr.
+
+ :param bind:
+ This controls how the decorator will be attached to the instance.
+ There are currently two possible values:
+
+ ``function``
+ The default behavior:
+
+ The decorator will be passed the underlying function,
+ and it's return value passed to python to create the bound method.
+ This mode has the following properties:
+
+ * The local scope of the decorator will be per-instance,
+ not per function. This is what distinguishes this meta decorator
+ from simply decorating the original function directly.
+
+ * If your decorator stores any state in ``func.__dict__``,
+ it will have to share that dict between all object instances.
+ If this is not desirable, consider using ``method`` mode, below.
+
+ * Like a normal function, *self* will always be the first argument
+ when your decorated function is called.
+
+ ``method``
+ The decorator will be passed a bound instance method
+ instead of the original function, and it's return value
+ will be returned as if it were the desired method.
+ This has the following properties:
+
+ * The decorator scope will still be per-instance like function mode.
+
+ * ``func.__dict__`` will unique per instance, since the function
+ provided will in fact be a instancemethod object that is unique
+ per instance, as opposed to function mode.
+
+ * Since this mode occurs after *self* has been bound into the method,
+ *self* will NOT be present as the first argument in your decorator
+ (though it can be accessed via the instancemethod object's im_self).
+
+ :returns:
+ This returns a decorator which
+ (when passed a function), will return
+ an :class:`instance_decorating_descriptor` object
+ that will lazyily bind the method on a per-instance
+ basis.
+
+ A usage example::
+ >>> from bps.cache import cached_function
+ >>> from bps.meta import decorate_per_instance
+ >>> class MyClass(object):
+ >>> #this is essentially how cached_method() is implemented...
+ >>> @decorate_per_instance(cached_function(args=1))
+ >>> def myfunc(self, value):
+ >>> return value*2
+
+ .. caution::
+
+ The current implementation of this function violates normal decorator
+ behavior, because the result of it's constructor is not a callable function,
+ but a descriptor object. Because of this, should wrapping the topmost (outermost)
+ decorator applied to a function, since most decorators will not be
+ able to handle a descriptor object.
+ """
+ def instance_decorator(func):
+ return instance_decorating_descriptor(func, decorator, attr=attr, bind=bind)
+ return instance_decorator
+
+class instance_decorating_descriptor(object):
+ """property which decorates methods on a per-instance basis.
+
+ this is the backend for :func:`decorate_per_instance`,
+ see that function for details.
+ """
+ def __init__(self, func, decorator, attr=True, bind="function"):
+ assert func
+ self.func = func
+ if attr is True:
+ self.attr = func.__name__
+ else:
+ self.attr = attr #none or a string
+ # we could also clone func's docstring / other things update_wrapper does
+ assert decorator
+ self.decorator = decorator
+ if bind == "function":
+ self.create = self.create_function
+ elif bind == "method":
+ self.create = self.create_method
+ else:
+ raise ValueError, "unknown bind mode: %r" % (bind,)
+
+ create = None #filled in by init
+
+ def create_function(self, obj, cls):
+ "create() implementation using func mode binding"
+ func = self.decorator(self.func)
+ return instancemethod(func, obj, cls)
+
+ def create_method(self, obj, cls):
+ "create() implementation using meth mode binding"
+ meth = instancemethod(self.func, obj, cls)
+ return self.decorator(meth)
+
+ def __get__(self, obj, cls):
+ #take first opportunity to figure out what attribute we're stored in
+ if self.attr is None:
+ self.attr = find_attribute(cls, self, required=True)
+ #deal with class-level access
+ if obj is None:
+ #XXX: could return a decorated but unbound instance
+ #XXX: could do some clever alternate attribute storage for class methods
+ return self
+ #create decorated method, and put it in object's dictionary
+ # to shadow this descriptor, so we won't get called again
+ wrapped_method = obj.__dict__[self.attr] = self.create(obj, cls)
+ return wrapped_method
+
+#hack to get function which creates method instances.
+#instance_decorating_descriptor was chosen just because it was there,
+#this type can be gonna from any method.
+instancemethod = type(instance_decorating_descriptor.__get__)
+
+#=========================================================
+#functional stuff
+#=========================================================
+class Params(object):
+ """object which represents set of args and kwds.
+
+ Like partial() but without the function.
+
+ The args attribute will contain the args tuple.
+ The kwds attribute will contains the kwds dict.
+ """
+
+ @classmethod
+ def normalize(cls, value):
+ "given tuple, dict, or Params object, return a Params object"
+ if hasattr(value, "args") and hasattr(value, "kwds"):
+ #assume it's already a Params objs
+ return value
+ elif hasattr(value, "keys"):
+ #assume it's a dict
+ return cls(**value)
+ #XXX: if is_str, call parse?
+ else:
+ #assume it's a sequence / iterable of positional args
+ return cls(*value)
+
+ @classmethod
+ def parse(cls, source, kwds=True, scope=None):
+ """parse params string, returning Params object.
+
+ :arg source: the source string to parse
+ :arg kwds: whether kwd arguments should be accepted (defaults to True)
+ :param scope: optional dictionary to use as global scope when evaluating string.
+
+ :returns:
+ Params object containing the parsed args and kwds.
+
+ :raises ValueError: if string can't be parsed into Params.
+
+ Usage Example::
+
+ >>> from bps.meta import Params
+ >>> x = Params.parse("1,2,'a',t=5")
+ >>> x
+ Params(1,2,'a',t=5)
+ >>> x.args
+ (1, 2)
+ >>> x.kwds
+ { 't': 5 }
+
+ .. warning::
+ This currently uses 'eval', so it shouldn't be considered secure.
+ In the future, a simpler parser may be written for safety.
+ """
+ assert is_str(source), "expected string: %r" % (source,)
+ if not source.strip():
+ return cls()
+ if kwds:
+ grab = cls
+ else:
+ def grab(*a):
+ return cls(*a)
+ if scope:
+ g = scope.copy()
+ else:
+ g = {}
+ g['__grab'] = grab
+ try: #assume it's already parenthesized
+ result = eval("__grab " + source, g)
+ if isinstance(result, cls):
+ return result
+ #else probably had format of "(1,2),3", so wrong value was returned.
+ except SyntaxError:
+ pass
+ #try wrapping with parens
+ try:
+ result = eval("__grab (" + source + ")", g)
+ except SyntaxError:
+ raise ValueError, "bad params string: %r" % (source,)
+ if isinstance(result, cls):
+ return result
+ raise ValueError, "bad params string: %r" % (source,)
+
+ def __init__(self, *args, **kwds):
+ "create new params object from args"
+ self.args = list(args)
+ self.kwds = kwds
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ #it's an argument
+ return self.args[key]
+ else:
+ #it's a keyword
+ return self.kwds[key]
+
+ #XXX: could do a get() method, __contains__ method, etc
+ #XXX: could have __iter__ yield the ints then kwds for use with __getitem__
+
+ def append_modified(self, kwds, default=None):
+ "append all specified kwds, but only if value doesn't match default"
+ target = self.kwds
+ for k,v in kwds.iteritems():
+ if v != default:
+ target[k] = v
+
+ def append(self, *args, **kwds):
+ "append positional parameters, update kwd parameters"
+ if args:
+ self.args.extend(args)
+ if kwds:
+ self.kwds.update(kwds)
+
+ def insert(self, pos, *args, **kwds):
+ "insert positional parameters, update kwd parameters"
+ if args:
+ self.args[pos:pos] = args
+ if kwds:
+ self.kwds.update(kwds)
+
+## def apply(self, func):
+## return func(*self.args, **self.kwds)
+
+ def clone(self, *args, **kwds):
+ "create new params object with args appended to existing args"
+ other = Params(*self.args, **self.kwds)
+ if args:
+ other.args.extend(args)
+ if kwds:
+ other.kwds.update(kwds)
+ return other
+
+ def clear(self):
+ del self.args[:]
+ self.kwds.clear()
+
+ def render(self, offset=0):
+ """render parenthesized parameters.
+
+ ``Params.parse(p.render())`` should always return
+ a params object equal to the one you started with.
+
+ ``p.render(1)`` is useful for method arguments,
+ when you want to exclude the first argument
+ from being displayed.
+ """
+ txt = ''
+ for a in self.args[offset:]:
+ txt += "%r, " % (a,)
+ kwds = self.kwds
+ for k in sorted(kwds):
+ txt += "%s=%r, " % (k, kwds[k])
+ if txt.endswith(", "):
+ txt = txt[:-2]
+ return txt
+
+ def render_class(self, class_):
+ "render a nice repr for the class using the current parameters"
+ if not hasattr(class_,"__bases__"): #allow obj to be passed in for convience
+ class_ = class_.__class__
+ return "%s.%s(%s)" % (class_.__module__,class_.__name__, self)
+
+ def __str__(self):
+ return self.render()
+
+ def __repr__(self):
+ return "Params(%s)" % self.render()
+
+ def __eq__(self, other):
+ if hasattr(other, "args") and hasattr(other, "kwds"):
+ return self.args == other.args and self.kwds == other.kwds
+## if is_oseq(other) and not self.kwds:
+## return self.args == other
+## if isinstance(other, dict) and not self.args:
+## return self.kwds == other
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+#=========================================================
+#
+#=========================================================
diff --git a/bps/misc.py b/bps/misc.py
new file mode 100644
index 0000000..e81e44c
--- /dev/null
+++ b/bps/misc.py
@@ -0,0 +1,487 @@
+"""bps.misc -- assorted functions that dont fit in another category"""
+#===================================================
+#imports
+#===================================================
+#core
+from functools import update_wrapper
+import time
+import re
+#pkg
+from bps.types import Undef
+from bps.meta import find_attribute
+#local
+__all__ = [
+ #property constructors
+ 'indirect_property',
+ 'constructor_property',
+
+ #http
+ 'parse_agent_string',
+ 'agent_string_has_product',
+
+ #other
+ 'stepped_delay',
+]
+
+#=========================================================
+#property constructors
+#=========================================================
+class indirect_property(object):
+ """descriptor which acts like property(), but resolves methods at instance time.
+
+ One of the drawbacks of the builtin :func:``property`` is that it stored
+ the functions directly. Thus, if a subclass overrides the method
+ which is also being used by a property's fget,
+ the property object will still use the original function.
+
+ This is a drop-in replacement for property which takes in
+ attribute names instead of actual functions. It does
+ runtime resolution of the attributes, so that the named
+ methods can be safely overridden (even on a per-instance basis)
+ and still have the properties use the correct code.
+
+ .. note::
+ Due to the repeated additional lookup, this is slower
+ than a normal property, so use it only if you have to.
+ """
+ #TODO: need to make this work right for various border cases (missing fget/fset)
+ #TODO: default doc based on attr names
+
+ def __init__(self, fget=None, fset=None, fdel=None, doc=None):
+ self.fget = fget
+ self.fset = fset
+ self.fdel = fdel
+ if doc:
+ self.__doc__ = doc
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ else:
+ return getattr(obj, self.fget)()
+
+ def __set__(self, obj, value):
+ if self.fset:
+ getattr(obj, self.fset)(value)
+ else:
+ raise AttributeError("readonly attribute")
+
+ def __delete__(self, obj):
+ if self.fdel:
+ getattr(obj, self.fdel)(value)
+ else:
+ raise AttributeError("can't delete attribute")
+
+class constructor_property(object):
+ """lazy-initialized attribute.
+
+ This is a class property,
+ which takes in a constructor func, and uses that function
+ to fill in the instance attribute when it's first accessed.
+
+ usage::
+ >>> from bps.misc import constructor_property
+ >>> #create a custom class
+ >>> class Example(object):
+ >>> a = constructor_property(dict)
+ >>> e = Example()
+ >>> #initially nothing is stored in 'a'
+ >>> e.__dict__.get("a")
+ None
+ >>> #but when it's first accessed, dict() is called, and the value is stored/returned
+ >>> e.a
+ {}
+ >>> #from then on, that's the value that will be returned for .a, until ``del e.a`` is called
+ >>> e.__dict__.get("a")
+ {}
+
+ :arg func:
+ function / class to call when attribute is first accessed for an instance
+ :arg name:
+ optionally let object know which attribute it's stored under
+ (will be autodiscovered later)
+ :param passref:
+ if True, func will be called with instance as first argument (eg ``func(self)``)
+ rather than without arguments (eg ``func()``)
+ """
+ def __init__(self, func, name=None, passref=False):
+ self.func = func
+ self.name = name
+ self.passref = passref
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ if self.name is None:
+ self.name = find_attribute(cls, self, required=True)
+ assert self.name not in obj.__dict__
+ if self.passref:
+ value = self.func(obj)
+ else:
+ value = self.func()
+ obj.__dict__[self.name] = value
+ #we should never get called again for this object
+ return value
+
+class class_property(object):
+ "classmethod+readonly property"
+ #TODO: document this
+ def __init__(self, fget):
+ self.fget = fget
+ def __get__(self, owner, cls):
+ return self.fget(cls)
+
+#=========================================================
+#
+#=========================================================
+def _iter_decay(lower, upper, half):
+ "helper for stepped_delay"
+ #default delay loop using "lower" "upper" and "half"
+ #equation: delay[idx] = upper - (upper-lower) * (decay ** idx)
+ #such that:
+ # delay[0] == lower
+ # delay[half] = (upper+lower)/2
+ # delay[idx] < upper
+ #
+ #this means decay = (1/2)**(1/half)
+ #
+ if half:
+ decay = .5**(1.0/half)
+ else:
+ decay = .9 ## approx ~ half=7
+ value = upper-lower
+ while True:
+ yield upper-value
+ value *= decay
+
+def stepped_delay(timeout=None, count=None, steps=None, lower=.1, upper=90, half=None):
+ """generate a stepped delay loop; useful when polling a resource repeatedly.
+
+ This function provides a delay loop
+ for such things as polling a filesystem for changes, etc.
+ It provides an initially short delay which slowly backs off.
+ It's designed to be used an iterator, so that all logic
+ stays within your application.
+
+ You can either specify a custom sequence of delay values via *steps*,
+ or use the default exponential decay algorithm, which
+ begans with a delay of *lower*, and slowly increases,
+ approaching a delay time of *upper*.
+
+ :param timeout:
+ If specified, the loop will stop after *timeout* seconds
+ have passed, no matter how many repetitions have been run.
+
+ :param count:
+ If specified, the loop will stop after *count* repetitions.
+
+ :param steps:
+ If specified, this should be a sequence
+ of delay values to use. When the sequence runs
+ out, the last delay value will be repeated.
+ If *steps* is not used, a default exponential
+ decay algorithm will be used.
+
+ :param lower:
+ [ignored if *steps* is specified]
+ This specifies the starting delay.
+ The first delay will be this length,
+ the next a little more, and so on.
+
+ :param upper:
+ [ignored if *steps* is specified]
+ This specifies the upper bound on the delay.
+ Each time the iterator sleeps, the delay
+ will increase, asymptotically approaching
+ the *upper* bound.
+
+ :param half:
+ [optional, ignored if *steps* is specified]
+ If specified, adjusts the rate of the exponential delay
+ increase such that it will take exactly *half*
+ rounds through the iterator before the delay
+ is at the half-way mark between *lower* and *upper*.
+
+ :Returns:
+ This loop yields tuples of ``(index,delay)``,
+ where *index* is the number of passes that have been made,
+ and *delay* is the amount of time it slept before
+ yielding the last tuple. It will increase the delay
+ used each time before it yeilds a new tuple,
+ in accordance with the configuration above.
+ If the loop ends due to *timeout* or *count*,
+ the iterator will raise :exc:`StopIteration`.
+
+ Usage Example::
+
+ >>> import time
+ >>> from bps.misc import stepped_delay
+ >>> for i,d in stepped_delay(count=10, lower=.1, upper=10):
+ >>> print i,d,time.time()
+ >>> #... do stuff, calling break if done with loop
+ >>> else:
+ >>> print "loop exit w/o success"
+ 0 0 1244648293.01
+ 1 0.1 1244648293.11
+ 2 1.09 1244648294.2
+ 3 1.981 1244648296.19
+ 4 2.7829 1244648298.97
+ 5 3.50461 1244648302.48
+ 6 4.154149 1244648306.64
+ 7 4.7387341 1244648311.38
+ 8 5.26486069 1244648316.65
+ 9 5.738374621 1244648322.39
+ loop exit w/o success
+
+ .. todo::
+ Could allow delay to be reset to initial value
+ by sending ``"reset"`` back to the yield statement.
+ """
+
+ #run first round without any delay
+ yield 0, 0
+
+ #prepare delay value generator
+ if steps:
+ #ignore 'lower', 'upper', and 'half'
+ def loopgen():
+ for value in steps:
+ yield value
+ while True: #repeat last value
+ yield value
+ loop = loopgen()
+ else:
+ if upper <= lower: #allow us to set really small 'upper' and auto-scale lower
+ lower = .1 * upper
+ loop = _iter_decay(lower, upper, half)
+
+ #run main delay loop
+ if timeout:
+ end = time.time() + timeout
+ for idx, delay in enumerate(loop):
+ time.sleep(delay)
+ yield idx+1, delay
+ #check if it's time to abort
+ if count and idx+2 >= count:
+ return
+ if timeout and time.time() >= end:
+ return
+
+#=========================================================
+#http agent string
+#=========================================================
+_clean_re = re.compile(r"\s+")
+
+_agent_re = re.compile(
+ r"""
+ ^
+ \s*
+ (
+ (?P<product>
+ (?P<name>
+ [^\s()/]+ # technically this should only be TOKEN chars
+ )
+ (
+ /
+ (?P<version>
+ [^\s()]+ #XXX: what _is_ allowed here? TOKEN?
+ )
+ )?
+ )
+ |
+ (
+ \(
+ (?P<comment>
+ [^)]+ #technically this should only be TOKEN chars
+ )
+ \)
+ )
+ )
+ \s*
+ """, re.I|re.X)
+
+def parse_agent_string(value, normalize=True):
+ """parse a HTTP user agent string.
+
+ This parses an HTTP User Agent string,
+ returning a list of agents identified in the string, in order.
+
+
+ :type value: str
+ :param value:
+ The agent string to parse
+
+ :type normalize: bool
+ :param normalize:
+ This flag (enabled by default)
+ turns on any special-case heuristics for known
+ atypical user agent strings, as well
+ as converting the string to lower case.
+ It can be disabled to get the unmangled results.
+
+ :returns:
+ A list of dictionaries, one for each product found.
+ The first dictionary is usually considered the primary.
+ This code assumes comments will always *follow* the product description
+ they are attached to, but if this rule is violated,
+ a "blank" product entry will be inserted, where all relevant keys
+ except "comment" will be ``None``. Other than that case,
+ the following keys should be filled out for each dictionary:
+
+ product
+ This will contain the raw product name, eg "Mozilla/5.0".
+ name
+ This will contain just the name of the product
+ (assuming it has the format "name/version").
+ If the product couldn't be parsed this way, name's contents are undefined.
+ version
+ This will contain the version of the product,
+ (assuming it has the format "name/version").
+ If the product couldn't be parsed this way, version's contents are undefined.
+ comment
+ This is present if a comment stanza followed
+ the product definition. This will be a list of strings,
+ as read from the comment and separated by semicolons.
+ If no comment is present, the key will not be included.
+
+ Usage Example::
+
+ >>> from bps.misc import parse_agent_string
+ >>> parse_agent_string("Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.11) Gecko/2009060309 Ubuntu/9.04 (jaunty) Firefox/3.0.11")
+ [
+ { 'name': 'Mozilla', 'version': '5.0',
+ 'product': 'Mozilla/5.0',
+ 'comments': ['X11', 'U', 'Linux x86_64',
+ 'en-US', 'rv:1.9.0.11'],
+ },
+ { 'name': 'Gecko', 'product': 'Gecko/2009060309',
+ 'version': '2009060309'
+ },
+ { 'name': 'Ubuntu', 'version': '9.04',
+ 'product': 'Ubuntu/9.04',
+ 'comments': ['jaunty'],
+ },
+ { 'name': 'Firefox', 'version': '3.0.11',
+ 'product': 'Firefox/3.0.11',
+ }
+ ]
+
+ .. seealso:
+
+ :rfc:`2068` is the authoritative agent string format spec.
+ """
+ #NOTE: this code makes the assumption
+ #that a comment will always be FOLLOWING (and is associated with) the preceding product.
+ #this goes against the grammar of RFC2068, but is the de facto case.
+ #thus, if a unexpected comment is encountered, a empty product entry will be created.
+ orig = value
+ value = _clean_re.sub(" ", value).strip()
+ if normalize:
+ value = value.lower()
+ out = []
+ while value:
+ m = _agent_re.match(value)
+ if m:
+ comment = m.group("comment")
+ if comment:
+ comments = [ elem.strip() for elem in comment.split(";") ]
+ if out and isinstance(out[-1], dict) and 'comments' not in out[-1]:
+ out[-1]['comments'] = comments
+ else:
+ log.warning("unexpected comment segment in agent: %r %r", comment, orig)
+ out.append(dict(product=None, name=None, version=None, comments=comments))
+ else:
+ product, name, version = m.group("product", "name", "version")
+ out.append(dict(product=product, name=name, version=version))
+ value = value[m.end():]
+ else:
+ #can this _ever_ happen?
+ log.warning("failed to parse agent segment: %r of %r", value, orig)
+ value = ''
+## if not normalize:
+## return out
+ #TODO: detect the "+http://homepage" elements add end of comment list,
+ # move out to "url" kwd
+ #TODO: detect platform info
+ #TODO: detect firefox, opera, konq, safari, chrome,
+ # and move their products to the front
+## #now we apply various bits of UA-specific knowledge to normalize things
+## #TODO: could pull out 'MSIE' etc
+## for entry in out:
+## if not entry['product'] or not entry['comments']:
+## continue
+## #could parse out site urls
+ return out
+
+def _parse_agent_version(value):
+ if value is None:
+ return None
+ #XXX: use a real version parser here.
+ if isinstance(value, str):
+ try:
+ return tuple(int(x) for x in value.split("."))
+ except ValueError:
+ return None
+ elif isinstance(value, int):
+ return tuple(value)
+ #should be tuple of ints.
+ return value
+
+def agent_string_has_product(agent, name, min_version=None):
+ """tests if agent string references a product name.
+
+ This wrapper for :func:`parse_agent_string`
+ checks if a given product is found in the provided string.
+ This is a simple function, more complex cases may require
+ rolling your own test function.
+
+ :param agent:
+ The raw agent string, OR the output of parse_agent_string.
+ :param name:
+ The name of the product to check for.
+ :param min_version:
+ Optional minimum version.
+ For this to work, min_version must be an integer,
+ tuple of integers, or a period-separated string.
+
+ :returns:
+ Returns ``True`` if a match is found,
+ ``False`` if a match is not found.
+ """
+ name = name.lower()
+ min_version = _parse_agent_version(min_version)
+ if isinstance(agent, str):
+ agent = parse_agent_string(agent)
+ for entry in agent:
+ if entry['name'] == name:
+ if not min_version or min_version <= _parse_agent_version(entry['version']):
+ return True
+ #TODO: IE detect here or in extended?
+ return False
+
+#=========================================================
+#code scraps
+#=========================================================
+
+#need to clean this up a little, but might be useful
+##def formatFuncStr(fname, *args, **kwds):
+## if isinstance(fname, str):
+## pass## elif callable(fname):
+## fname = fname.__name__
+## else:
+## fname = str(fname)
+##
+## body = ""
+## if args:
+## for a in args:
+## if body != "": body += ","
+## body += repr(a)
+## if kwds:
+## for k,v in kwds.items():
+## if body != "": body += ","
+## body += "%s=%r" % (k,v)
+## return "%s(%s)" % (fname,body)
+
+#=========================================================
+#
+#=========================================================
diff --git a/bps/numeric.py b/bps/numeric.py
new file mode 100644
index 0000000..e0f8505
--- /dev/null
+++ b/bps/numeric.py
@@ -0,0 +1,1192 @@
+"""bps.numeric -- numeric extensions for bps3.
+
+this is mainly an extension of the 'math' library.
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from __builtin__ import pow as intpow
+import sys
+from decimal import Decimal
+from itertools import izip
+trunc = int #clobbered by real implementation under py26+
+from math import * #NOTE: we import everything from math so this module can act as a drop-in replacement
+#pkg
+from bps.meta import isnum, isseq
+from bps.rng import random, srandom
+#local
+__all__ = [
+
+ #numeric format conversions
+ 'int_to_base',
+## 'base_to_int',
+
+ 'float_to_base',
+## 'base_to_float',
+
+ 'int_to_roman',
+ 'roman_to_int',
+
+ #byte strings
+ 'int_to_bytes', 'bytes_to_int',
+ 'list_to_bytes', 'bytes_to_list',
+ 'xor_bytes', 'and_bytes', 'or_bytes', 'neg_bytes', 'binop_bytes',
+
+ #numthy
+ 'gcd', 'lcm', 'factors',
+
+ #prime
+ 'is_prime', 'get_factors',
+ 'next_prime', 'prev_prime', 'iter_primes',
+
+ #misc
+ 'sdivmod', 'splitfrac',
+ 'limit', 'avgsd', 'digits',
+
+]
+
+#calc bits-per-float... (usually 53)
+BPF = 0
+while (1+2**-BPF) > 1:
+ BPF += 1
+
+EMPTY_BYTES = ""
+
+#=========================================================
+#misc
+#=========================================================
+def sdivmod(x,y):
+ """divmod variant which preserves sign of remainder"""
+ d,r = divmod(x,y)
+ if x < 0 and r > 0: #NOTE: r < 0 if x is negative Decimal instance
+ d += 1
+ r -= y
+ return d,r
+
+def splitfrac(v):
+ "split number into integer portion and fractional portion; returns ``(int_part as int, frac_part as original type)``"
+ #NOTE: this reason this is present instead of various other solutions:
+ # modf(v) - always returns (float,float); whereas it's frequently needed for int part to be integer.
+ # also, modf coerces Decimal to float, this preserves Decimal in the fractional portion
+ # divmod(v,1) or v%1 - doesn't handle negative values correctly, and int part is not integer
+ # sdivmod(v,1) - int part is not integer, and too more complex for common case
+ if isinstance(v, (int,long)):
+ return v, 0
+ else:
+ ip = trunc(v)
+ return ip, v - ip
+
+def limit(value, lower, upper):
+ """constraints value to specified range.
+
+ :arg value: value to clamp
+ :arg lower: smallest value allowed
+ :arg upper: largest value allowed
+
+ :returns:
+ value, if it's between lower & upper.
+ otherwise returns the appropriate limit;
+
+ Usage Example::
+
+ >>> from bps.numeric import limit
+ >>> limit(-1,0,1)
+ 0
+ >>> limit(.5,0,1)
+ .5
+ >>> limit(100,0,1)
+ 1
+ """
+ if lower > upper:
+ raise ValueError, "lower must be <= upper"
+ if value < lower:
+ return lower
+ if value > upper:
+ return upper
+ return value
+
+def digits(value, base=10):
+ """Returns minimum number of digits required to represent value under a given base.
+
+ :arg value: integer value to check.
+ :arg base: base to count the digits for (defaults to base 10).
+
+ :returns:
+ Returns minimum number of digits needed under specified base.
+ Negative numbers will be converted to positive.
+ ``0`` is special-cased to always return ``1``.
+ Thus this will always return a value >= 1.
+
+ Usage Example::
+
+ >>> from bps.numeric import digits
+ >>> digits(99,10)
+ 2
+ >>> digits(100,10)
+ 3
+ >>> digits(7,2)
+ 3
+ >>> digits(8,2)
+ 4
+ >>> digits(255,16)
+ 2
+ """
+ if value == 0:
+ return 1
+ if value < 0: #ignore the minus sign
+ value = -value
+ return int(ceil(log(value+1, base)))
+
+def avgsd(args, sample=False):
+ "calc avg & std deviation of a sequence of numbers"
+ if not hasattr(args, "__len__"):
+ args = list(iter(args))
+ if not args:
+ raise IndexError, "empty list passed in"
+ num = len(args)
+ avg = sum(args) / float(num)
+ if sample and num > 1:
+ num -= 1
+ sigma = sqrt(sum((x - avg)**2 for x in args) / num)
+ return avg, sigma
+
+#===================================================
+#number theory functions
+#===================================================
+def gcd(a, b):
+ """returns the greatest common divisor of the integers *a* and *b*."""
+ if b < 0:
+ b = -b
+ while b:
+ a, b = b, (a % b)
+ return a
+
+def lcm(a, b):
+ """returns least common multiple of the integers *a* and *b*"""
+ # lcm = abs(a*b) / gcd(a,b)
+ if a == 0 and b == 0:
+ return 0
+ ab = a*b
+ if ab < 0:
+ ab = -ab
+ g = gcd(a, b)
+ assert ab % g == 0
+ return ab//g
+
+def factors(value):
+ """return list of factor of integer *value*
+
+ :arg value:
+ The integer to factor.
+ This can be negative, 0, or positive.
+
+ :returns:
+ This will return a list of prime & exponent pairs.
+ For example, ``factors(48)`` would return ``[(2,4),(3,1)])``,
+ signifying that ``(2**4) * (3**1) == 48``.
+
+ Invariants:
+
+ * All factors will be listed in ascending order,
+ * All exponents will be > 0.
+
+ Special Cases:
+ * If the value is prime, a single entry will be returned,
+ being ``[(value,1)]``.
+ * Negative values will be made positive first.
+ * The numbers 0 and 1 will return empty lists.
+
+ Usage Example::
+
+ >>> from bps.numeric import factors
+ >>> #factors for a prime are just itself
+ >>> factors(5) # 5**1
+ [(5, 1)]
+ >>> #factors for a larger number...
+ >>> factors(104) # 2**3 * 13**1
+ [(2, 3), (13, 1)]
+ >>> #factors for a negative act just like the positive
+ >>> factors(-10)
+ [(2, 1), (5, 1)]
+ """
+ #TODO: find more efficient factoring algorithm
+
+ if value < 0:
+ value = -value
+ if value < 2:
+ return []
+
+ #check if prime (should be quick)
+ if is_prime(value):
+ return [ (value, 1) ]
+
+ #pull off prime factors as we find them
+ out = []
+ for prime in iter_primes():
+ count = 0
+ while value % prime == 0:
+ value //= prime
+ count += 1
+ if count:
+ out.append((prime, count))
+ if value == 1:
+ return out
+
+#===================================================
+#prime iteration
+#===================================================
+
+#the first 64 primes, for quick testing.
+_small_primes = [
+ 2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,
+ 59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,
+ 137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,
+ 227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,
+ ]
+
+#---------------------------------------------------
+#primality testing
+#---------------------------------------------------
+def is_mr_prime(value, rounds=None):
+ """tests for primality using the miller-rabin test.
+
+ :arg value:
+ The value to test for primality
+ :param rounds:
+ The number of rounds to use.
+ The chances of a false positive are ``1/(4**rounds)``.
+
+ By default, the number of rounds will be chosen
+ such that the chances of a false positive are ``1/(value**2)``,
+ so the larger the prime, the more certain you can be :)
+
+ :returns:
+ ``False`` if the number is confirmed composite.
+ ``True`` if the number is probably prime (w/in bounds
+ set by number of rounds).
+ """
+ #boundary cases
+ if value < 2:
+ return False
+ elif value == 2 or value == 3:
+ return True
+ elif value & 1 == 0:
+ return False
+
+ #determine number of rounds
+ if rounds is None:
+ #pick default number of rounds based on size of value.
+ #add 2 rounds for every bit in number...
+ #since prob of false positive is 4**(-rounds),
+ #prob of false positive for n is 4**(-log(n,2)),
+ #or (n**-2).
+ rounds=max(256, int(log(value,2)))
+## max_rounds = value//4+1
+## if rounds > max_rounds:
+## rounds = max_rounds
+ if rounds >= value-10:
+ #run through all witnesses [2..value-1]
+ rounds = value-2
+ rounds_mode = 1
+ else:
+ #randomly pick witnesses
+ rounds_mode = 0
+
+ #find positive ints s & d s.t.
+ # (2**s)*d == value-1, with d % 2 == 1
+ vone = value-1
+ assert vone & 1 == 0
+ s = 0
+ d = vone
+ while d & 1 == 0:
+ d >>= 1
+ s += 1
+ assert d & 1
+ assert s > 0
+ assert (1<<s)*d == vone
+ r_range = xrange(1, s)
+
+ #test some candidates
+ #TODO: prevent repetitions? deal w/ small modulus?
+ randrange = random.randrange
+ for i in xrange(rounds):
+ #generate random candidate
+ if rounds_mode == 0:
+ a = randrange(2, vone)
+ else:
+ assert rounds_mode == 1
+ a = i+2
+
+ #check if a**d = 1 or -1 mod value
+ x = intpow(a, d, value)
+ if x == 1 or x == vone:
+ #satisfied condition for primality witness,
+ #it's not a composite witness
+ #try another one
+ continue
+
+## #just to save some speed, run fermat primality test first
+## # if a**vone is not 1, then a**(d*2**r) will never be 1 OR -1 for any r,
+## # so there's no point in doing mr loop, and intpow is cheaper
+## if intpow(a, vone, value) != 1:
+## return False
+## #...so now we only let primes & carmichael numbers through
+
+ #check if a**(2**r*d) for 0<=r<=s-1, but above check already done for r=0
+ for r in r_range:
+ x = intpow(x, 2, value)
+ if x == 1:
+ #no chance it'll ever be -1, it's a composite witness
+ return False
+ if x == vone:
+ #satisfied condition for primality witness,
+ #it's not a composite witness
+ #try another one
+ break #break 'r' loop; continue 'i' loop
+ else:
+ #no chance it'll ever be -1, it's a composite witness
+ return False
+
+ #probably prime, change we're wrong is 1/4**rounds
+ return True
+
+def is_prime(value):
+ """Test if integer is prime.
+
+ .. note::
+ Implementation wise, this does a quick check
+ against some smaller primes, and then falls
+ back to the miller-rabin test.
+ """
+ if value < 2:
+ return False
+ #do quick check for the small ones
+ for prime in _small_primes:
+ if value % prime == 0:
+ return value == prime
+ #anything up to square of last prime in list has to be prime
+ if value <= prime*prime:
+ return True
+ #fallback to miller-rabin
+ return is_mr_prime(value)
+
+#---------------------------------------------------
+#prime iteration
+#---------------------------------------------------
+
+###don't use, not cryptographically useful
+##def rand_prime(bits, is_prime=is_prime, rng=srandom):
+## """generate a random prime.
+##
+## :param bits:
+## the minimum number of bits,
+## s.t. ``log(prime,2) >= bits``
+##
+## """
+## rng.reseed()
+## #generate odd number between 1<<(bits-1) and 1<<bits
+## cur = (rng.getrandbits(bits-1)<<1) + 1
+## while True:
+## if is_prime(cur):
+## return cur
+## cur += 2
+
+def iter_primes(start=0, stop=None, count=None, rounds=None):
+ """generator which returns sequential primes.
+
+ :param start:
+ Starting point for generating primes.
+ The first value generates will be the smallest
+ prime which is greater than or equal to *start*.
+ This defaults to ``0``, which means
+ all primes starting with ``2`` will be genereated in order.
+
+ :param stop:
+ If specified, the generator will stop
+ after yeilding the largested prime which is
+ strictly less than *stop*.
+
+ :param count:
+ If specified, the generator will stop
+ after yielding *count* prime numbers.
+ If a *stop* is also specified,
+ the generator will halt on whichever
+ condition is reached first.
+
+ :param rounds:
+ Optionally lock the number of rounds
+ used by the Miller-Rabin test.
+ This is generally not needed.
+
+ :returns:
+ A generator which will yield ascending prime numbers
+ according to the parameters above.
+ """
+ if stop or count:
+ #wrap ourselves with a counter
+ idx = 0
+ for prime in iter_primes(start, rounds=rounds):
+ if stop and prime >= stop:
+ return
+ yield prime
+ idx += 1
+ if count and idx >= count:
+ return
+
+ #yield from small primes list to get things started
+ global _small_primes
+ top = _small_primes[-1]
+ if start <= top:
+ for prime in _small_primes:
+ if prime < start:
+ continue
+ yield prime
+ cur = prime+2
+ else:
+ cur = start|1
+ #iterate one by one, using modified is_prime() to test
+ assert cur > top
+ assert cur & 1 == 1
+ while True:
+ for prime in _small_primes:
+ if cur % prime == 0:
+ break
+ else:
+ if is_mr_prime(cur, rounds=rounds):
+ yield cur
+ cur += 2
+
+def next_prime(value, rounds=None):
+ "return the smallest prime strictly greater than the specified value"
+ #pick from small primes list
+ top = _small_primes[-1]
+ if value <= top:
+ for prime in _small_primes:
+ if prime <= value:
+ continue
+ return prime
+ value = prime+2
+ elif value & 1:
+ value += 2
+ else:
+ value += 1
+ #iteratoe one by one, using modified is_prime to test
+ assert value > top
+ assert value & 1
+ while True:
+ for prime in _small_primes:
+ if value % prime == 0:
+ break
+ else:
+ if is_mr_prime(value, rounds=rounds):
+ return value
+ value += 2
+
+def prev_prime(value, rounds=None):
+ "return the largest prime strictly less than the specified value, or ``None``"
+ top = _small_primes[-1]
+
+ #pick from small primes list
+ if value <= top:
+ for prime in reversed(_small_primes):
+ if prime >= value:
+ continue
+ return prime
+ assert value <= 2
+ return None
+
+ #step value down to next candidate
+ if value & 1:
+ value -= 2
+ else:
+ value -= 1
+ assert value & 1
+ assert value >= top
+
+ #iteratoe one by one, until we reach top of preset list
+ while value > top:
+ for prime in _small_primes:
+ if value % prime == 0:
+ break
+ else:
+ if is_mr_prime(value, rounds=rounds):
+ return value
+ value -= 2
+ return top
+
+#=========================================================
+#int <-> binary encoded string
+#=========================================================
+def _log256_ceil(value):
+ "helper for mchr & mord"
+ #FIXME: probably a nice clever way to do this w/ integer / bitshifting
+ if value < 0:
+ return 0
+ return int(ceil(log(value, 256)))
+
+def int_to_bytes(num, bytes=None, upper=None, order="big"):
+ """Returns a multi-character string corresponding to the ordinal *num*.
+
+ Bit String Encoding:
+ This is the multi-character equivalent of :func:`chr`, with some additional features.
+ It takes in a positive integer, and returns a string representation,
+ packed into a specified number of bytes.
+
+ :arg num:
+ The positive integer to encode.
+ :param bytes:
+ Optionally, the number of bytes to encode integer into.
+ If specified, this will be the length of the returned string.
+ If not specified, this will default to the minimum number
+ required to encode the number.
+ :param upper:
+ Upper bound allowed for the number.
+ If not specified, but *bytes* is, upper will default
+ to the largest number representable by that number of bytes.
+ If num is equal to or larger than upper, a ValueError will be raised.
+ :param order:
+ Byte ordering: "big", "little", "native".
+ The default is "big", since this the common network ordering,
+ and "native" as the default would present poor cross-platform predictability.
+
+ :returns:
+ The number encoded into a string, according to the options.
+
+ Usage Example::
+
+ >>> from bps.numeric import bytes_to_int, int_to_bytes
+ >>> int_to_bytes(1234, bytes=4)
+ '\\x00\\x00\\x04\\xd2'
+
+ >>> int_to_bytes(1234, bytes=4, order="little")
+ '\\xd2\\x04\\x00\\x00'
+
+ >>> bytes_to_int('\\x00\\x00\\x04\\xd2')
+ 1234
+ """
+ #TODO: would a "bits" keyword be useful?
+ if bytes is not None:
+ #encode in specified number of bytes
+ if bytes < 1:
+ raise ValueError, "bytes must be None or >= 1: %r" % (bytes,)
+ bupper = 256**bytes
+ if upper is None:
+ upper = bupper
+ elif upper > bupper:
+ raise ValueError, "upper bound too large for number of bytes: bytes=%r upper=%r" % (bytes, upper)
+ else:
+ if upper is None:
+ upper = num+1
+ bytes = _log256_ceil(upper)
+ if upper < 0:
+ raise ValueError, "top must be >= 0: %r" % (upper,)
+ if num < 0 or num >= upper:
+ raise ValueError, "expected number between 0 and %d: %d" % (upper-1, num)
+ if order == "native":
+ order = sys.byteorder
+ if order == "big": #encode bytes-1 byte first, 0 byte last
+ itr = xrange(bytes*8-8, -8, -8)
+ else: #encode 0 byte first, bytes-1 byte last
+ assert order == "little"
+ itr = xrange(0, bytes*8, 8)
+ return EMPTY_BYTES.join(
+ chr((num >> offset) & 255)
+ for offset in itr
+ )
+
+def list_to_bytes(value, bytes=None, order="big"):
+ """Returns a multi-character string corresponding to a list of byte values.
+
+ This is similar to :func:`int_to_bytes`, except that this a list of integers
+ instead of a single encoded integer.
+
+ :arg value:
+ The list of integers to encode.
+ It must be true that ``all(elem in range(0,256)) for elem in value``,
+ or a ValueError will be raised.
+
+ :param bytes:
+ Optionally, the number of bytes to encode to.
+ If specified, this will be the length of the returned string.
+
+ :param order:
+ Byte ordering: "big", "little", "native".
+ The default is "big", since this the common network ordering,
+ and "native" as the default would present poor cross-platform predictability.
+
+ :returns:
+ The number encoded into a string, according to the options.
+
+ Usage Example::
+
+ >>> from bps.numeric import list_to_bytes, bytes_to_list
+ >>> list_to_bytes([4, 210], 4)
+ '\\x00\\x00\\x04\\xd2'
+
+ >>> list_to_bytes([4, 210], 4, order="little")
+ '\\xd2\\x04\\x00\\x00'
+
+ >>> bytes_to_list('\\x00\\x00\\x04\\xd2')
+ [4, 210]
+ """
+ #make sure all elements have valid values
+ if any( elem < 0 or elem > 255 for elem in value):
+ raise ValueError, "value must be list of integers in range(0,256): %r" % (value,)
+
+ #validate bytes / upper
+ if bytes is None:
+ bytes = len(value)
+ if bytes == 0:
+ raise ValueError, "empty list not allowed"
+ else:
+ if bytes < 1:
+ raise ValueError, "bytes must be None or >= 1: %r" % (bytes,)
+ if len(value) > bytes:
+ raise ValueError, "list too large for number of bytes: bytes=%r len=%r" % (bytes, len(value))
+
+ #encode list in big endian mode
+ out = ''.join( chr(elem) for elem in value )
+ pad = bytes-len(out)
+
+ #pad/reverse as needed for endianess
+ if order == "native":
+ order = sys.byteorder
+ if order == "big":
+ if pad:
+ out = ('\x00' * pad) + out
+ else:
+ assert order == "little"
+ if pad:
+ out = out[::-1] + ('\x00' * pad)
+ else:
+ out = out[::-1]
+ return out
+
+def bytes_to_int(value, order="big"):
+ """decode a string into an integer representation of it's binary values.
+
+ Bit String Decoding:
+ This returns a positive integer, as decoded from the string.
+ This is the inverse of the :func:`int_to_bytes` function.
+
+ :arg value:
+ The string to decode.
+ :param order:
+ The byte ordering, defaults to "big".
+ See :func:`int_to_bytes` for more details.
+
+ :returns:
+ The decoded positive integer.
+ """
+ if not value:
+ return 0
+ upper = len(value) #upper bound in bytes
+ if order == "native":
+ order = sys.byteorder
+ if order == "big":
+ itr = xrange(upper*8-8, -8, -8)
+ else:
+ assert order == "little"
+ itr = xrange(0, upper*8, 8)
+ return sum(
+ ord(value[idx]) << offset
+ for idx, offset in enumerate(itr)
+ )
+
+def bytes_to_list(value, order="big"):
+ """decode a string into a list of numeric values representing each of it's bytes.
+
+ This is similar to :func:`bytes_to_int`, the options and results
+ are effectively the same, except that this function
+ returns a list of numbers representing each byte in sequence,
+ with most significant byte listed first.
+
+ :arg value:
+ The string to decode.
+ :param order:
+ The byte ordering, defaults to "big".
+ See :func:`int_to_bytes` for more details.
+
+ :returns:
+ The decoded list of byte values.
+ """
+ if order == "native":
+ order = sys.byteorder
+ if order == "big":
+ return [ ord(c) for c in value ]
+ else:
+ assert order == "little"
+ return [ ord(c) for c in reversed(value) ]
+
+def _bytes_align(left, right, order):
+ "helper used by xor_bytes, and_bytes, etc. to align strings"
+ l = len(left)
+ r = len(right)
+ if l != r:
+ if order is None:
+ raise ValueError, "strings are not same size: %r %r" % (l, r)
+ if order == "native":
+ order = sys.byteorder
+ if order == "big":
+ #right-align strings by padding left with nulls
+ if l < r:
+ left = ("\x00" * (r-l)) + left
+ else:
+ right = ("\x00" * (l-r)) + right
+ else:
+ assert order == "little"
+ #left-align strings by padding right with nulls
+ if l < r:
+ left += ("\x00" * (r-l))
+ else:
+ right += ("\x00" * (l-r))
+ assert len(left) == len(right)
+ return left, right
+
+def binop_bytes(left, right, op, order=None):
+ """perform arbitrary bit-wise logical operation on two bit strings.
+
+ :arg left:
+ left bit string
+ :arg right:
+ right bit string
+ :arg op:
+ This should be a callable with the syntax ``op(left_value,right_value) -> result_value``.
+ It will be called for every byte in the two strings,
+ and will be passed each byte as an integer.
+ It should then return the resulting byte.
+
+ :param order:
+ This sets the byte ordering of the strings,
+ which only really effects how the function deals
+ with strings of different sizes.
+
+ =============== =====================================================
+ Value Action
+ --------------- -----------------------------------------------------
+ ``None`` No byte ordering is specified (the default).
+ The strings must be exactly the same size,
+ or a :exc:`ValueError` will be raised.
+
+ ``"big"`` Big-endian byte ordering.
+ If the two strings are of unequal lengths,
+ the smaller one will be right-aligned,
+ so that the least significant digits line up.
+ The resulting string will be as long as the
+ long of the two inputs.
+
+ ``"little"`` Little-endian byte ordering.
+ If the two strings are of unequal lengths,
+ the smaller one will be left-aligned,
+ so that the least significant digits line up.
+ The resulting string will be as long as the
+ long of the two inputs.
+
+ ``"native"`` The native byte ordering (``"little"`` or ``"big"``)
+ will be used.
+ =============== =====================================================
+
+ :returns:
+ The resulting bit string
+ """
+ left, right = _bytes_align(left, right, order)
+ return "".join(
+ chr(op(ord(a), ord(b)))
+ for a, b in izip(left, right)
+ )
+
+#bytes_xor
+def xor_bytes(left, right, order=None):
+ """XOR two bit strings together.
+
+ This is the equivalent of ``int_to_bytes(bytes_to_int(left) ^ bytes_to_int(right))``.
+
+ :arg left:
+ The first bit string to perform the operation on.
+ :arg right:
+ The second bit string to perform the operation on.
+ :param order:
+ The byte ordering to for aligning
+ strings of different sizes.
+ See :func:`binop_bytes` for details.
+ """
+ left, right = _bytes_align(left, right, order)
+ return "".join(
+ chr(ord(a) ^ ord(b))
+ for a, b in izip(left, right)
+ )
+
+#bytes_and
+def and_bytes(left, right, order=None):
+ """AND two bit strings together.
+
+ This is the equivalent of ``int_to_bytes(bytes_to_int(left) & bytes_to_int(right))``.
+
+ :arg left:
+ The first bit string to perform the operation on.
+ :arg right:
+ The second bit string to perform the operation on.
+ :param order:
+ The byte ordering to for aligning
+ strings of different sizes.
+ See :func:`binop_bytes` for details.
+ """
+ left, right = _bytes_align(left, right, order)
+ return "".join(
+ chr(ord(a) & ord(b))
+ for a, b in izip(left, right)
+ )
+
+#bytes_or
+def or_bytes(left, right, order=None):
+ """OR two bit strings together.
+
+ This is the equivalent of ``int_to_bytes(bytes_to_int(left) | bytes_to_int(right))``.
+
+ :arg left:
+ The first bit string to perform the operation on.
+ :arg right:
+ The second bit string to perform the operation on.
+ :param order:
+ The byte ordering to for aligning
+ strings of different sizes.
+ See :func:`binop_bytes` for details.
+ """
+ left, right = _bytes_align(left, right, order)
+ return "".join(
+ chr(ord(a) | ord(b))
+ for a, b in izip(left, right)
+ )
+
+#bytes_neg
+def invert_bytes(value):
+ """invert a bit string (1->0, 0->1)
+
+ This is the equivalent of ``int_to_bytes(~bytes_to_int(value))``.
+ """
+ return "".join( chr(256+~ord(a)) for a in value)
+
+#=========================================================
+#counting systems
+#=========================================================
+
+#set of chars used by int_to_base()
+_chars = "0123456789abcdefghijklmnopqrstuvwxyz"
+
+def int_to_base(value, base=10, pad=0):
+ """convert integer to specified base.
+
+ The builtin python function :func:`int`
+ has the option for converting integers from string
+ format using a variety of bases.
+
+ This is the inverse, which converts an integer
+ into a string of the specified base.
+
+ :arg value:
+ The integer to convert
+ :arg base:
+ The base to use. Must be between 2 and 36, inclusive.
+ :param pad:
+ Optionally add zeros to left of number until string is at least ``pad``
+ characters long.
+
+ It should always be true that ``int(to_base(n,b),b) == n``.
+
+ Usage Example::
+
+ >>> from bps.numeric import int_to_base
+ >>> int_to_base(123456789,32)
+ '3lnj8l'
+ >>> int('3lnj8l',32)
+ 123456789
+ >>> int_to_base(123456789,16)
+ '75bcd15'
+ >>> 0x75bcd15
+ 123456789
+ """
+ if base < 2 or base > 36 or int(base) != base:
+ raise ValueError, "base must be between 2 and 36, inclusive: %r" % (base,)
+ if value == 0:
+ return "0"
+ if value < 0:
+ neg = True
+ value = -value
+ else:
+ neg = False
+ out = ""
+ while value > 0:
+ out = _chars[ value % base ] + out
+ value = int(value/base)
+ if pad and len(out) < pad:
+ out = ('0' * (pad-len(out))) + out
+ if neg:
+ out = "-" + out
+ return out
+
+base_to_int = int #convience alias for reverse of int_to_base
+
+def float_to_base(value, base, fsize=-1, ftrim=True):
+ """convert float to specified base"""
+ if base < 2 or base > 36 or int(base) != base:
+ raise ValueError, "base must be between 2 and 36, inclusive: %r" % (base,)
+ #split into int & frac parts
+ fp, ip = modf(value)
+ assert int(ip) == ip
+ #format int part
+ text = int_to_base(int(ip), base)
+ if fsize == 0:
+ return text
+ text += "."
+
+ #determine default fsize
+ if fsize == -1: ##or fsize == -2:
+ #show digits to max system precision
+ bits = BPF
+ if ip:
+ bits -= 1+int(floor(log(abs(ip), 2))) #account for integer bits
+ if bits < 0:
+ fsize = 0
+ else:
+ fsize = int(ceil(log(1<<bits, base))) #num digits under base
+ #TODO: under fsize==-1 + ftrim,
+ # could implement "pick shortest repr" algorithm
+ # from py3.1
+ elif fsize < -1:
+ raise ValueError, "fsize must be >= -1: %r" % (fsize,)
+
+ #scale fp up to fsize, and round it
+ r, m = modf(abs(fp) * (base**fsize))
+ m = int(m)
+ if r >= .5:
+ m += 1
+
+ #render in reverse order
+ out = ''
+ for d in xrange(fsize):
+ out += _chars[m % base]
+ m //= base
+
+ #trim the zeroes, reverse, and return
+ if ftrim:
+ out = out.lstrip("0")
+ if not out:
+ out = "0"
+ return text + out[::-1]
+
+#todo: base_to_float
+
+#=========================================================
+#roman numerals
+#=========================================================
+
+##def int_to_barred_roman(value):
+## """convert integer to parsed list of roman numerals,
+## suitable for rendering with overscores via post-processing.
+## """
+## #return tuple of roman numerals s.t.
+## #last string in list should have no overscore,
+## #2nd from last should have 1 overscore,
+## #3rd from last should have 2 overscores, etc.
+## out = []
+## def helper(value, bars):
+## if value >= 4000:
+## #more than 4000, got to invoke special rules
+## if value % 10000 < 4000:
+## #put the thousands w/ current bar level, since <4000
+## x = value//1000
+## helper(x - (x%10), bars+1)
+## value %= 10000
+## else:
+## #else put the thosands w/ next bar level, since >=4000
+## helper(value//1000, bars+1)
+## value %= 1000
+## if value > 0:
+## temp = int_to_roman(value)
+## out.append((temp, bars))
+## helper(value, 0)
+## return out
+
+_roman_level = "IVXLCDM"
+_roman_decimal = "IXCM"
+_roman_values = dict(I=1, V=5, X=10, L=50, C=100, D=500, M=1000)
+_roman_standard = [
+ ('M', 1000), ('CM', 900), ('D', 500), ('CD', 400),
+ ('C', 100), ('XC', 90), ('L', 50), ('XL', 40),
+ ('X', 10), ('IX', 9), ('V', 5), ('IV', 4),
+ ('I', 1),
+ ]
+_roman_additive = [
+ ('M', 1000), ('D', 500), ('C', 100), ('L', 50),
+ ('X', 10), ('V', 5), ('I', 1),
+ ]
+
+def int_to_roman(value, dialect="standard"): ##, bar="~"):
+ "convert integer to roman numerals"
+ #disable till there's a need, and a parser...
+## if mode == "barred":
+## return "".join(
+## "".join(
+## (bar * count) + c
+## for c in elem
+## )
+## for elem, count in int_to_barred_roman(value)
+## )
+ if dialect == "additive":
+ pairs = _roman_additive
+ max_mult = 4
+ else:
+ assert dialect == "standard"
+ pairs = _roman_standard
+ max_mult = 3
+ max_value = 4*pairs[0][1]
+ if value < 1:
+ raise ValueError, "value too small: %r" % (value,)
+ if value >= max_value:
+ raise ValueError, "value too large: %r >= %r" % (value, max_value)
+ out = ''
+ for char, mag in pairs:
+ if value >= mag:
+ mult = value//mag
+ assert 1 <= mult <= max_mult
+ #^ thanks to 9/5/4 pairs, we shouldn't ever have mults larger
+ assert mult == 1 or char in _roman_decimal
+ #^ thanks to 1 pairs, 9/5/4 pairs shouldn only have 0/1 mult
+ out += char * mult
+ value -= mag * mult
+ assert value == 0
+ return out
+
+def roman_to_int(value, strict=False):
+ """convert roman numerals to integer.
+
+ This function accepts all properly formed roman numerals (eg ``"xiv"``),
+ but will also attempt grammatically incorrect strings (eg ``"iiiiv"``),
+ but will reject ones which aren't interpretable as a valid positive integer (eg ``"vvx"``).
+ Such invalid values will result in a ValueError.
+
+ :param strict:
+ If this is set to ``True``, the input must a a proper roman numeral.
+ That is to say, the subtraction only allows for
+ a single "I" before a "V" or "X", a single "X" before a "L" or "C",
+ and a single "C" before a "D" or "M", and all additive letters
+ must occur in decreasing value if they occur at all.
+ Under strict mode, any violations of this rule will cause a ValueError.
+ """
+ orig = value
+ value = value.strip().upper()
+ if not value:
+ raise ValueError, "invalid literal for int_from_roman: %r" % (orig,)
+ if any(c not in _roman_level for c in value):
+ raise ValueError, "invalid literal for int_from_roman: %r" % (orig,)
+ if strict:
+ return _strict_parse_roman(orig, value)
+ else:
+ return _parse_roman(orig, value, len(value)-1, 999)[1]
+
+def _strict_parse_roman(orig, value):
+ "parser used by int_from_roman"
+ out = 0
+ for char, mag in _roman_standard:
+ if char in _roman_decimal: #only IXCM are allowed to repeat
+ count = 0
+ while value.startswith(char):
+ if count == 4:
+ #max of 4 repetitions is allowed to permit additive style,
+ #5 is just plain too many
+ raise ValueError, "invalid synatax for int_from_roman: %r" % (orig,)
+ out += mag
+ value = value[len(char):]
+ count += 1
+ elif value.startswith(char): # VLD and the subtractive pairs can occur only once
+ out += mag
+ value = value[len(char):]
+ if value:
+ raise ValueError, "invalid syntax for int_from_roman: %r" % (orig,)
+ return out
+
+def _parse_roman(orig, value, idx, stop_level):
+ "parser used by int_from_roman()"
+ out = 0
+ cur_level = -1
+ while idx > -1:
+ char = value[idx]
+ level = _roman_level.find(char)
+ if level >= stop_level:
+ #if we hit higher level, return value and cursor
+ return idx, out
+ if level < cur_level:
+ #if dropped down from last level, this is beginning of a substraction stanza,
+ #which will last for all chars from to the left of idx (including idx),
+ #until (but excluding) the first char w/ the same level as cur_level
+ idx, diff = _parse_roman(orig, value, idx, cur_level)
+ out -= diff
+ if out < 1:
+ raise ValueError, "invalid syntax for int_from_roman: %r" % (orig,)
+ else:
+ #else we're at old level or better
+ out += _roman_values[char]
+ cur_level = level
+ idx -= 1
+ return -1, out
+
+#=========================================================
+#misc
+#=========================================================
+
+##def iter_fibonacci():
+## yield 1
+## yield 1
+## last = 1
+## cur = 1
+## while True:
+## last, cur = cur, last+cur
+## yield cur
+
+
+##def seqsum(*seqs):
+## """generate a list that contains the element-wise sum of all the sequences passed in.
+## the result will be as long as the longest sequence passed in,
+## and any shorter input sequences will be implicitly right-padded with zeros.
+## """
+#### if len(seqs) == 1 and isseq(seqs[0]):
+#### seqs = seqs[0]
+## if not seqs:
+## return []
+## if isnum(seqs[0]):
+## assert len(seqs) % 2 == 0
+## #assume it's a sequence of [weight1, seq1, weight2, seq2 ... ],
+## size = max(len(seq) for seq in seqs[1::2])
+## return [
+## sum(
+## seqs[col] * (
+## seqs[col+1][idx] if idx < len(seqs[col+1]) else 0
+## )
+## for col in xrange(0, len(seqs), 2)
+## )
+## for idx in xrange(size)
+## ]
+## elif isseq(seqs[0]) and len(seqs[0]) == 2 and isseq(seqs[0][1]):
+## #assume it's a sequence of [ (weight1, seq1), (weight2, seq2) ... ],
+## size = max(len(row[1]) for row in seqs)
+## return [
+## sum(
+## weight * (seq[idx] if idx < len(seq) else 0)
+## for weight, seq in seqs
+## )
+## for idx in xrange(size)
+## ]
+## else:
+## #assume it's a sequence of [seq1, seq2...]
+## if len(seqs) == 1:
+## return list(seqs[0])
+## size = max(len(seq) for seq in seqs)
+## return [
+## sum((seq[idx] if idx < len(seq) else 0) for seq in seqs)
+## for idx in xrange(size)
+## ]
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/parsing/__init__.py b/bps/parsing/__init__.py
new file mode 100644
index 0000000..311c349
--- /dev/null
+++ b/bps/parsing/__init__.py
@@ -0,0 +1 @@
+"""bps.parsing -- reading and writing various file formats"""
diff --git a/bps/parsing/config.py b/bps/parsing/config.py
new file mode 100644
index 0000000..b203679
--- /dev/null
+++ b/bps/parsing/config.py
@@ -0,0 +1,178 @@
+"""bps.parsing.config -- ConfigParser helpers"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from cStringIO import StringIO
+import re
+from logging import getLogger; log = getLogger(__name__)
+import ConfigParser
+import os.path
+#site
+#pkg
+from bps.stream import get_input_type
+#local
+__all__ = [
+## "unescape_string",
+## "section_to_dict",
+## "no_parser_defaults",
+]
+
+#=========================================================
+#parser object helpers
+#=========================================================
+def read_into_parser(source, parser=None, errors="strict", reset=False, source_type=None):
+ """load input into parser instance
+
+ This is a helper for loading inputs into a ConfigParser instance,
+ since it's provider read() method is somewhat annoying.
+
+ :param source:
+ The source to load data from.
+ This may be any of:
+
+ * a path to a local file
+ * a string containing the raw data
+ * an open stream (file, buffer) object
+
+ Which one of these it is will be autodetected,
+ and the appropriate parser methods invoked.
+
+ :param parser:
+ The parser to load data into.
+ If not specified, a new :class:`ConfigParser.ConfigParser` instance
+ is created, populated, and returned.
+
+ :param errors:
+ What to do when errors occur:
+
+ ============ =============================================
+ Value Action
+ ------------ ---------------------------------------------
+ ``"strict"`` errors are raised; this is the default
+ ``"ignore"`` errors are silently ignored and ``None`` is returned
+ ``"log"`` errors are logged and ``None`` is returned
+ ============ =============================================
+
+ :param reset:
+ If true, and source is a stream,
+ it will be reset back to it's current location
+ after the data has been loaded.
+ This is useful when you want to "peek" at the data in the stream.
+
+ :raises ValueError:
+ if file coudn't be parsed as a cfg/ini file
+
+ :returns:
+ parser object on success, ``None`` if errors occurred but were ignored.
+ """
+ if parser is None:
+ parser = ConfigParser.ConfigParser()
+ t = get_input_type(source, source_type=source_type)
+ if t == "raw":
+ parser.readfp(StringIO(source))
+ elif t == "stream":
+ if reset:
+ pos = source.tell()
+ parser.readfp(source)
+ if reset:
+ source.seek(pos, 0)
+ else:
+ assert t == "path"
+ if not os.path.exists(source):
+ if errors == "ignore":
+ return None
+ elif errors == "log":
+ log.error("ini file not found: %r", source)
+ return None
+ else:
+ raise ValueError, "ini file not found: %r" % (source,)
+ if not parser.read([source]):
+ if errors == "ignore":
+ return None
+ elif errors == "log":
+ log.error("failed to read ini file: %r", source)
+ return None
+ else:
+ raise ValueError, "failed to read ini file: %r" % (source,)
+ return parser
+
+def parser_get_section(parser, section, raw=None):
+ "convert section of ConfigParser to a dict"
+ #TODO: document this better
+ out = {}
+ for key in parser.options(section):
+ out[key] = parser.get(section, key, raw=(raw and (raw is True or key in raw)))
+ return out
+
+#=========================================================
+#helpers
+#=========================================================
+def unescape_string(value):
+ """if a value is defined in the ini using '\n' etc, the backslashes will be
+ returned literally. this takes in such a string, and lets python eval
+ all the backslash-escapes in the string"""
+ if value is None: return value
+ #Escape any double-quote chars with an EVEN number of preceding backslashes,
+ #so eval() can't be exploited by malicious input; but DONT escape any
+ #double-quote chars with an ODD number of preceding backslashes,
+ #those are already properly escaped. I think that covers it :)
+ value = re.sub(r'(^|[^\\])((\\\\)*)"', r'\1\2\\"', value)
+ return eval('"%s"' % (value,))
+
+#=========================================================
+#unused
+#=========================================================
+##
+###XXX: is this worth keeping in the long run?
+
+#NOTE: just "path,section" currently used, rest can be redesigned
+#should expand to load whole file if possible
+def read_to_dict(path, section, cls=ConfigParser.ConfigParser, defaults=None, raw=None):
+ "load specified section from file and convert to dict"
+ p = cls()
+ read_into_parser(path, p)
+ if defaults:
+ p.defaults().update(defaults)
+ if section == "DEFAULT":
+ #XXX: what about RAW here?
+ return p.defaults()
+ else:
+ return parser_get_section(p, section, raw=raw)
+
+##class no_parser_defaults(object):
+## """context manager that disables ConfigParser defaults
+## for the duration it's scope.
+## value returned by manager is the defaults we're ignoring
+## (please treat as readonly!!!)
+## FIXME: this currently isn't threadsafe!!!
+## would have to put a lock inside parser object, say.
+##
+## :Parameters:
+## parser
+## parser to act on
+## keep
+## optional list of keys whose defaults should be kept
+## """
+## "with-statement manager that disables parser's defaults"
+## def __init__(self, parser, keep=None):
+## if hasattr(parser, "cfg"): #hack for ConfigDict
+## parser = parser.cfg
+## self.parser = parser
+## self.keep = keep
+##
+## def __enter__(self):
+## self.defaults = self.parser._defaults
+## self.parser._defaults = {}
+## if self.keep:
+## for k in self.keep:
+## if k in self.defaults:
+## self.parser._defaults[k] = self.defaults[k]
+## return self.defaults
+##
+## def __exit__(self, *exc_info):
+## self.parser._defaults = self.defaults
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/refs.py b/bps/refs.py
new file mode 100644
index 0000000..d469d7f
--- /dev/null
+++ b/bps/refs.py
@@ -0,0 +1,646 @@
+"""bps.refs -- weak reference objects and proxy objects"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from contextlib import contextmanager
+from sys import getrefcount
+import thread
+from time import time as cur_time
+from weakref import ref as make_weakref
+from warnings import warn
+#pkg
+from bps.undef import Undef
+from bps.error.types import ProxyNestError, ProxyEmptyError
+from bps.types import CustomDict
+#local
+__all__ = [
+ #decorators
+ 'weakref_property',
+
+ #weak ref classes
+ 'WeakSet',
+ 'SoftValueDict',
+
+ #proxy objects
+ 'ProxyObject',
+ 'proxy_using_object',
+ 'is_proxy_active',
+]
+#=========================================================
+#decorators
+#=========================================================
+class weakref_property(object):
+ """descriptor which provides transparent access to stored object via weakref.
+
+ A common need is to store a weak reference in a attribute,
+ and then resolve it before using it.
+ This property takes care of that, by storing a weak reference
+ to any value passed to it inside another (hidden) attribute,
+ thus the attribute acts like a proxy for whatever is referenced.
+ ``None`` is returned if no reference is present,
+ and if ``None`` is stored, any weak references will be purged.
+
+ :param attr:
+ This allows specifying the attribute that will be used
+ to store the weak reference. By default, a random one
+ based on the property's id will be chosen.
+ """
+ #XXX: really should be called 'weakref_attribute'
+ name = None #: the attribute we'll store the weakref in
+
+ def __init__(self, attr=None):
+ if attr is None:
+ #FIXME: will this cause problems for, say, pickling?
+ # shouldn't be pickling weakrefs, but could anything
+ # be relying on the attr name only to have it shift
+ # when the id changes?
+ self.attr = "_weakref_property__%d" % (id(self),)
+ else:
+ self.attr = attr
+
+ def __get__(self, instance, owner):
+ if instance is None:
+ return self
+ else:
+ ref = getattr(instance, self.attr, None)
+ if ref:
+ return ref()
+ else:
+ return None
+
+ def __set__(self, instance, value):
+ if value is None:
+ setattr(instance, self.attr, None)
+ else:
+ setattr(instance, self.attr, make_weakref(value))
+
+ def __delete__(self, instance):
+ delattr(instance, self.attr)
+
+#=========================================================
+#constructors
+#=========================================================
+
+#=========================================================
+#classes
+#=========================================================
+try:
+ #introduced in py27
+ from weakref import WeakSet
+except ImportError:
+ #TODO: make sure this conforms to py27 weakset
+ #esp ensure non-standard methods like iterrefs() match whatever py27 has (if available)
+
+ class WeakSet(object):
+ """A :class:`set` compatible object which stored weak references
+ to all of it's elements.
+
+ .. warning::
+ This class is not fully fleshed out, has some implementation
+ issues, and may need development work for all but the simplest
+ use-cases. Many of these issues can be fixed if the need arises.
+
+ * the | & ^ operators haven't been implemented (yet).
+ * there may be some glitches in some methods (needs UTs)
+ * this class doesn't derive from :class:`set`, but it should
+
+ all standard set methods should be present and act the same,
+ with the minor change that ones which previously returned
+ a set() instance now return a WeakSet() instance.
+
+ this class provides one additional method not found in the normal
+ set class:
+
+ .. automethod:: iterrefs
+
+ """
+ #TODO: inherit from set ?
+ #TODO: implement the other set methods correctly
+ #TODO: implement __or__, __and__, etc
+
+ def __init__(self, source=None):
+ data = self.data = set() #contains weakrefs to real data
+
+ #when weakref goes away, this callback will be called,
+ #which takes care of updating set.
+ def remove(elem, dref=make_weakref(data)):
+ data = dref()
+ if data is not None:
+ data.discard(elem)
+ self._remove = remove
+
+ #load initial data
+ if source:
+ self.update(source)
+
+ def __contains__(self, elem):
+ try:
+ r = make_weakref(elem)
+ except TypeError: #elem not weakref'able
+ return False
+ #NOTE: this relies on weakrefs being comparable based on obj's __eq__ method
+ return r in self.data
+
+ # __and__
+ # __eq__
+ # __iand__
+ # __ior__
+ # __ixor__
+
+ def __iter__(self):
+ for ref in self.data:
+ elem = ref()
+ if elem is not None:
+ yield elem
+
+ def __len__(self):
+ c = 0
+ for ref in self.data:
+ if ref() is not None:
+ c += 1
+ return c
+
+ # __ne__
+
+ def __nonzero__(self):
+ for ref in self.data:
+ if ref() is not None:
+ return True
+ return False
+
+ # __or__
+ # __rand__
+ # __ror__
+ # __rxor__
+ # __xor__
+
+ def add(self, elem):
+ self.data.add(make_weakref(elem, self._remove))
+
+ def clear(self):
+ self.data.clear()
+
+ def copy(self):
+ return WeakSet(self)
+
+ def difference(self, other):
+ "return new weakset containing elements in ``self`` which are also in ``other``"
+ return WeakSet(elem for elem in other if elem not in self)
+
+ def difference_update(self, other):
+ "remove elements from ``self`` which are also in ``other``"
+ discard = self.data.discard
+ for elem in other:
+ try:
+ ref = make_weakref(elem)
+ except TypeError:
+ continue
+ discard(ref)
+
+ def discard(self, elem):
+ try:
+ ref = make_weakref(elem)
+ except TypeError:
+ return
+ self.data.discard(ref)
+
+ def flush(self, force=True):
+ #NOTE: this is holdover from old implementation
+ # which didn't use weakref callback hook to clean unused refs.
+ warn("WeakSet.flush is deprecated & now a noop, it longer needs to be called", DeprecationWarning, 2)
+
+ def intersection(self, other):
+ return WeakSet(elem for elem in other if elem in self)
+
+ def intersection_update(self, other):
+ "remove elements from ``self`` which are not in ``other``"
+ #FIXME: probably have error since we're modifying set as we iterate it.
+ #might be better to use difference & different update
+ discard = self.data.discard
+ for ref in self.data:
+ elem = ref()
+ if elem is not None and elem not in other:
+ discard(ref)
+
+ #NOTE: this was introduced in py26, should we expose it for py25?
+ def isdisjoint(self, other):
+ "true if two sets have NO elements in common"
+ for elem in other:
+ if elem in self:
+ return False
+ return True
+
+ def issubset(self, other):
+ "whether other contains all elements of self"
+ if not hasattr(other, "__contains__"):
+ other = set(other)
+ return all(elem in other for elem in self)
+
+ def issuperset(self, other):
+ "whether self contains all elements of other"
+ return all(elem in self for elem in other)
+
+ def iterrefs(self):
+ "iterate through all weakrefs [not part of set standard]"
+ return iter(self.data)
+
+ def pop(self):
+ pop = self.data.pop
+ while True:
+ ref = pop() #raises KeyError when underlying set is empty
+ elem = ref()
+ if elem is not None:
+ return elem
+
+ def remove(self, elem):
+ try:
+ ref = make_weakref(elem)
+ except TypeError:
+ #raise KeyError since this could never be a key
+ raise KeyError, elem
+ try:
+ self.data.remove(ref)
+ except KeyError:
+ raise KeyError, elem
+
+ def symmetric_difference(self, other):
+ "return elems in self OR in other, but not both"
+ out = self.copy()
+ out.symmetric_difference_update(other)
+ return out
+
+ def symmetric_difference_update(self, other):
+ add, remove = self.add, self.remove
+ for elem in other:
+ if elem in self:
+ remove(elem)
+ else:
+ add(elem)
+
+ def union(self, other):
+ target = self.copy()
+ target.update(other)
+ return target
+
+ def update(self, other):
+ add = self.data.add
+ remove = self._remove
+ for elem in other:
+ add(make_weakref(elem,remove))
+
+#TODO: this will need much more work before it's publically used
+class SoftValueDict(CustomDict):
+ """This dict operates much like :class:`weakref.WeakValueDictionary`,
+ except that it attempts to provide something similar to java's soft references
+ (which are real references, but purged if memory is needed).
+
+ Currently, the implementation does not rely on memory at all,
+ but rather uses a somewhat inefficient cache / timed expiration,
+ so that infrequently accessed references will be dropped.
+
+ In order for this to work, ``self.flush()`` must be called occasionally,
+ it order to flush stale references.
+
+ .. todo::
+ * The main use of this class is by :class:`bps.fs.filepath`,
+ so the class may not be as useful in other places.
+ * Document this better
+ * Work up a better soft-reference algorithm.
+
+ .. warning::
+
+ This uses a pretty ugly reference counting hack internally,
+ it's almost guaranteed to break for someone... but the break
+ will (at worst) mean it this dict acts like a strong reference dict.
+
+ :param expire:
+ Number of seconds a key can remain unreferenced
+ for it's deemed "stale" and cleared on the next flush cycle.
+ :param flush:
+ How many seconds between flush cycles.
+ Unless ``self.flush(force=True)`` is used,
+ called ``self.flush()`` will only cause a flush to occur
+ every *flush* seconds, allowing this function to be
+ safely called *a lot*.
+ """
+ #=================================================
+ #init
+ #=================================================
+ def __init__(self, expire=300, flush=150):
+ self.expire = expire
+ self.flush_delay = flush
+ self.next_flush = 0
+ CustomDict.__init__(self)
+
+ def flush(self, force=False):
+ "flush expired entries"
+ #TODO: could "amortize" the cost of the flush over all the calls?
+ cur = cur_time()
+ if not force and cur > self.next_flush:
+ return
+ self.next_flush = cur + self.flush_delay
+ cutoff = cur - self.expire
+ purge = [
+ key
+ for key, (atime, value) in dict.iteritems(self)
+ if getrefcount(value) <= 4 and atime < cutoff
+ #4 refs held by: self, ``value`` in current frame, getrefcount frame, and iteritems frame
+ ]
+ print "DelayedValueDict: purging keys: %r" % (purge,)
+ for key in purge:
+ del self[key]
+
+ #=================================================
+ #first level
+ #=================================================
+ def __getitem__(self, key):
+ entry = CustomDict.__getitem__(self, key)
+ entry[0] = cur_time() #fresh access time
+ return entry[1]
+
+ def __setitem__(self, key, value):
+ return CustomDict.__setitem__(self, key, [cur_time(), value])
+
+ #__delitem__ left alone
+
+ #=================================================
+ #EOC
+ #=================================================
+
+#=========================================================
+#proxy objects
+#=========================================================
+#TODO: a potential use-case involves global default values
+# which can then be overridden on a per-thread basis.
+#
+import threading
+
+#TODO: definitely need ProxyObject unittests
+
+class ProxyObject(object):
+ """This is a global proxy object.
+
+ Once an instance is created, proxy targets are pushed onto the stack.
+ Any attribute access will be proxied to the last target object pushed on to the stack.
+ Targets must be removed in LIFO order from the stack.
+
+ This is mainly useful for when you absolutely have to have
+ a global object, but need to import it before the object itself exists.
+
+ :param name:
+ Optional unique name to give the proxy instance you're created.
+ This will be reported in the text of any raised errors,
+ and via ``repr()``.
+
+ :param default:
+ Optional object which will be used as default target
+ when the stack is empty.
+
+ :param threaded:
+ If ``True``, the instance will maintain a unique stack of targets
+ for each thread. If ``False`` (the default), a single stack will
+ be shared across all threads in the process.
+
+ .. automethod:: _current_obj
+ .. automethod:: _pop_object
+ .. automethod:: _push_object
+
+ .. note::
+ This class tries to adhere to the interface used by
+ `Paste's StackObjectProxy <http://pythonpaste.org/modules/registry.html#paste.registry.StackedObjectProxy>`_,
+ which it was directly inspired by.
+
+ """
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, name=None, default=Undef, threaded=False):
+ self.__dict__['_BpsProxyObject_name'] = name or ("Unnamed 0x%x" % id(self))
+ self.__dict__['_BpsProxyObject_default'] = default
+ self.__dict__['_BpsProxyObject_threaded'] = threaded
+ if threaded:
+ self.__dict__['_BpsProxyObject_local'] = threading.local()
+ else:
+ self.__dict__['_BpsProxyObject_stack'] = []
+
+ #=========================================================
+ #stack management
+ #=========================================================
+ def _current_obj(self):
+ """Returns object on top of proxy stack.
+
+ If the stack is empty, the default object will be used.
+ If there is no default object, :exc:`bps.error.types.ProxyEmptyError` will be raised.
+ """
+ threaded = self.__dict__['_BpsProxyObject_threaded']
+ if threaded:
+ stack = getattr(self.__dict__['_BpsProxyObject_local'], "stack", None)
+ else:
+ stack = self.__dict__['_BpsProxyObject_stack']
+ if stack:
+ return stack[-1]
+ default = self.__dict__['_BpsProxyObject_default']
+ if default is Undef:
+ raise ProxyEmptyError(
+ 'No object registered for global proxy %r'
+ % self.__dict__['_BpsProxyObject_name'])
+ else:
+ return default
+
+ def _push_object(self, obj):
+ """push another object onto the proxy stack"""
+ threaded = self.__dict__['_BpsProxyObject_threaded']
+ if threaded:
+ local = self.__dict__['_BpsProxyObject_local']
+ if hasattr(local, "stack"):
+ stack = local.stack
+ else:
+ stack = local.stack = []
+ else:
+ stack = self.__dict__['_BpsProxyObject_stack']
+ stack.append(obj)
+
+ def _pop_object(self, obj=Undef):
+ """pop top object off proxy stack, and return it.
+
+ If the stack is empty, an :exc:`IndexError` will be raised.
+
+ Objects are popped off in LIFO order.
+
+ If the parameter *obj* is specified, it will be checked against
+ the removed object, and if it does not match, :exc:`bps.error.types.ProxyNestError` will be raised.
+ """
+ threaded = self.__dict__['_BpsProxyObject_threaded']
+ if threaded:
+ local = self.__dict__['_BpsProxyObject_local']
+ if hasattr(local, "stack"):
+ stack = local.stack
+ else:
+ stack = local.stack = []
+ else:
+ stack = self.__dict__['_BpsProxyObject_stack']
+ cur = stack.pop()
+ if obj is not Undef:
+ if obj is not cur:
+ raise ProxyNestError(
+ "Unexpected object popped from %s proxy %r: popped %r, but expected %r"
+ % (
+ ["global","thread"][threaded],
+ self.__dict__['_BpsProxyObject_name'],
+ cur, obj)
+ )
+ return cur
+
+ def _object_stack(self):
+ """return copy of current object stack as a list"""
+ threaded = self.__dict__['_BpsProxyObject_threaded']
+ if threaded:
+ local = self.__dict__['_BpsProxyObject_local']
+ if hasattr(local, "stack"):
+ return local.stack[:]
+ else:
+ return []
+ else:
+ return self.__dict__["_BpsProxyObject_stack"][:]
+
+ #=========================================================
+ #methods that have to be overridden for proxying to work
+ #=========================================================
+
+ #proxy attribute access
+ def __getattr__(self, attr):
+ "proxy all attribute reads to the proxy target"
+ return getattr(self._current_obj(), attr)
+
+ def __setattr__(self, attr, value):
+ "proxy all attribute writes to the proxy target"
+ setattr(self._current_obj(), attr, value)
+
+ def __delattr__(self, attr):
+ "proxy all attribute deletes to the proxy target"
+ delattr(self._current_obj(), attr)
+
+ #=========================================================
+ #special attributes
+ # exposing these attributes speeds things up, but more
+ # importantly, Python performs various "capability" checks
+ # (eg is an object callable) by looking at the class dict,
+ # so __getattr__ is never hit. Thus, if we don't expose
+ # these, Python won't detect them.
+ #=========================================================
+
+ #NOTE: only going to uncomment these after testing
+ # that _all_ are need for the above behavior.
+ # confirmed for __call__, but need unittests
+
+ #proxy item access
+ ##def __getitem__(self, key):
+ ## return self._current_obj()[key]
+ ##
+ ##def __setitem__(self, key, value):
+ ## self._current_obj()[key] = value
+ ##
+ ##def __delitem__(self, key):
+ ## del self._current_obj()[key]
+
+ #proxy 'in' operator
+ ##def __contains__(self, key):
+ ## return key in self._current_obj()
+
+ ###proxy the special methods
+ ##def __len__(self):
+ ## return len(self._current_obj())
+
+ def __call__(self, *args, **kw):
+ return self._current_obj()(*args, **kw)
+
+ ##def __iter__(self):
+ ## return iter(self._current_obj())
+ ##
+ ##def __nonzero__(self):
+ ## return bool(self._current_obj())
+
+ #=========================================================
+ #methods that expose some information about the proxy
+ #=========================================================
+ def __dir__(self):
+ "reports list of all of proxy object's attrs as well as target object's attributes (if any)"
+ attrs = set(dir(self.__class__))
+ attrs.update(self.__dict__)
+ try:
+ obj = self._current_obj()
+ except ProxyEmptyError:
+ pass
+ else:
+ attrs.update(dir(obj))
+ return sorted(attrs)
+
+ def __repr__(self):
+ "tries to report target's repr, falls back to proxy object id"
+ try:
+ obj = self._current_obj()
+ except (ProxyEmptyError, AttributeError):
+ #NOTE: AttributeError caught in case repr() was called
+ # before object created or after partly torn down.
+ return object.__repr__(self)
+ else:
+ return repr(obj)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#TODO: alter_proxy_config()
+# which can change from threads->non-threaded, change name & default
+
+def is_proxy_active(proxy):
+ """checks if a proxy object currently has a target set.
+
+ :arg proxy: the proxy object to test.
+
+ :returns:
+ ``True`` if proxy object has a target (whether by default, or added).
+ ``False`` if proxy object has no target (reading it will result in :exc:`ProxyEmptyError`).
+ """
+ #TODO: make this work w/ Paste proxies?
+ #TODO: add default=False to exclude default from check, somehow?
+ try:
+ proxy._current_obj()
+ return True
+ except ProxyEmptyError:
+ return False
+
+@contextmanager
+def proxy_using_object(proxy, obj):
+ """context-manager for proxy objects, manages adding & removing object for you.
+
+ Usage Example::
+
+ >>> from bps.refs import ProxyObject, proxy_using_object
+ >>> app = ProxyObject()
+ >>> obj = object()
+ >>> with proxy_using_object(app,obj):
+ >>> #during this context, 'app' will proxy 'obj',
+ >>> #and it will stop proxying 'obj' when the context ends
+
+ :param proxy:
+ The proxy instance
+ :param obj:
+ The object it should proxy for the during of the context.
+
+ :returns:
+ Returns the original proxy object,
+ after having *obj* pushed onto it's stack.
+ """
+ proxy._push_object(obj)
+ try:
+ yield proxy
+ finally:
+ proxy._pop_object(obj)
+
+#TODO: ProxyBinding, for pushing/popping from a group of proxies at once
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/rng.py b/bps/rng.py
new file mode 100644
index 0000000..595516d
--- /dev/null
+++ b/bps/rng.py
@@ -0,0 +1,218 @@
+"""bps.rng - random generators"""
+#================================================================
+#imports
+#================================================================
+#core
+from hashlib import sha512
+import os
+import random as _random
+#site
+try:
+ bytes
+except NameError:
+ bytes = str
+#local
+__all__ = [
+ 'random',
+ 'srandom',
+ 'drandom',
+]
+
+#check if we have urandom support
+try:
+ os.urandom(1)
+ has_urandom = True
+except NotImplementedError:
+ has_urandom = False
+
+#calibrate the precision of the host's time
+## tick = time.time()
+## #FIXME: ~ 1/10 chance we'll miss last digit this way, 1/100 the 2nd to last, etc
+## # would love a way to properly interrogate the time (or sys) module for this info.
+## mag = 1
+## while tick != int(tick):
+## mag *= 2
+## tick *= 2
+## #NOTE: log(mag,2) gives precision in bits
+
+
+#================================================================
+#more random methods
+#================================================================
+class BaseRandom(_random.Random):
+ """enhances builtin PRNG with extra features"""
+## state_bits = 19937 #approx bits of state in generator
+
+ #================================================================
+ #prng enhancements
+ #================================================================
+ def reseed(self):
+ """attempt to scramble PRNG state by adding in entropy from time & other sources"""
+ self.jumpahead(self.genseed())
+
+ def genseed(self):
+ """generate a good random seed value from a number of entropy sources.
+
+ .. note:
+ While this function returns 64 byte integer,
+ the sources it draws from have maybe 32-42 bits worth of new entropy.
+ Since python's prng has 19937 bits worth of state,
+ this is probably barely enough to reseed with effective randomness,
+ and really, more sources should be found.
+ """
+ #want to scramble the prng as best as possible...
+ #do this by gathering together all the entropy we can,
+ #ordering the least-predictable information first,
+ #and then run it through sha512.
+
+ t = threading.current_thread()
+ #NOTE: entropy estimated below are relative to an attacker
+ # who is on the same system, with another process running,
+ # but who is not able to access the internal state of _this_ process.
+ # this is all very heuristic and ugly, but you don't have urandom,
+ # do you? so we going to do our best...
+ #
+ #NOTE: characters are put in string w/ most predictabile chars at start,
+ # then string is reversed and digested
+ text = "%x\x00%s\x00%x\x00%s\x00%x\x00%.15f" % (
+ os.getpid(),
+ #the current pid, for the heck of it
+
+ t.name,
+ id(t),
+ #id and name of current thread, for thread-uniqueness
+
+ self.getrandbytes(32),
+ #feed a little from existing generator
+ #just added to help mix things up.
+
+ id(object()),
+ #id of a freshly created object, to make timing attacks
+ #just a little harder
+ #at least 16 bits of useful entropy
+
+ time.time(),
+ #the current time, for some tasty entropy.
+ #about 16 bits of useful entropy
+ )
+## print repr(text)
+ return int(sha512(text[::-1]).hexdigest(), 16) # 64 byte long, < 4 bytes new entropy :(
+
+ #================================================================
+ #extra methods usuable by all rngs
+ #================================================================
+ def getrandbytes(self, size):
+ """return string of *size* number of random bytes"""
+ #TODO: make this faster?
+ bits = size<<3
+ value = self.getrandbits(bits)
+ return ''.join(
+ chr((value >> offset) & 0xff)
+ for offset in xrange(0, bits, 8)
+ )
+
+ def weighted_choice(self, source):
+ """pick randomly from a weighted list of choices.
+
+ The list can be specified in a number of formats (see below),
+ but in essence, provides a list of choices, each with
+ an attached (non-negative) numeric weight. The probability of a given choice
+ being selected is ``w/tw``, where ``w`` is the weight
+ attached to that choice, and ``tw`` is the sum of all weighted
+ in the list.
+
+ :param source:
+ weighted list of choices to select from.
+
+ * source can be dict mapping choice -> weight.
+ * source can be sequence of ``(choice,weight)`` pairs
+ * source can be sequence of weights, in which case
+ a given index in the sequence will be chosen based on the weight
+ (equivalent too ``enumerate(source)``).
+
+ :returns:
+ The selected choice.
+
+ .. note::
+ * Choices with a weight of ``0`` will NEVER be chosen.
+ * Weights should never be negative
+ * If the total weight is 0, an error will be raised.
+ """
+ if not source:
+ raise IndexError, "no choices"
+ if hasattr(source, "items"):
+ #assume it's a map of choice=>weight
+ total = sum(source.itervalues())
+ if total == 0:
+ raise ValueError, "zero sum weights"
+ pik = 1+self.randrange(0, total)
+ cur = 0
+ for choice, weight in source.iteritems():
+ cur += weight
+ if cur >= pik:
+ return choice
+ else:
+ raise RuntimeError, "failed to sum weights correctly"
+ source = source.items()
+ elif isinstance(source[0], (int, float, long)):
+ #assume it's a sequence of weights, they just want the index
+ total = sum(source)
+ if total == 0:
+ raise ValueError, "zero sum weights"
+ pik = 1+self.randrange(0, total)
+ cur = 0
+ for idx, weight in enumerate(source):
+ cur += weight
+ if cur >= pik:
+ return idx
+ else:
+ raise RuntimeError, "failed to sum weights correctly"
+ else:
+ #assume it's a sequence of (choice,weight) pairs
+ total = sum(elem[1] for elem in source)
+ if total == 0:
+ raise ValueError, "zero sum weights"
+ pik = 1+self.randrange(0, total)
+ cur = 0
+ for choice, weight in source:
+ cur += weight
+ if cur >= pik:
+ return choice
+ else:
+ raise RuntimeError, "failed to sum weights correctly"
+
+ #================================================================
+ #eof
+ #================================================================
+
+#================================================================
+#custom randoms
+#================================================================
+class SystemRandom(_random.SystemRandom, BaseRandom):
+ "new SystemRandom with additional methods mixed in"
+ reseed = _random.SystemRandom._stub
+
+class DeadRandom(BaseRandom):
+ "rng with no external entropy sources besides seed(), useful for predicatable unittests"
+ def reseed(self):
+ pass
+
+#================================================================
+#
+#================================================================
+
+#pseudo random with entropic seeding
+random = BaseRandom()
+
+#strongest random (system random if available, else prandom)
+if has_urandom:
+ srandom = SystemRandom()
+else:
+ srandom = random
+
+#entropy-free prng for testing purposes
+drandom = DeadRandom()
+
+#================================================================
+#
+#================================================================
diff --git a/bps/security/__init__.py b/bps/security/__init__.py
new file mode 100644
index 0000000..6a12dcb
--- /dev/null
+++ b/bps/security/__init__.py
@@ -0,0 +1 @@
+"""bps.security -- security related functions"""
diff --git a/bps/security/_bcrypt.py b/bps/security/_bcrypt.py
new file mode 100644
index 0000000..d44d1eb
--- /dev/null
+++ b/bps/security/_bcrypt.py
@@ -0,0 +1,727 @@
+"""bps.security._bcrypt
+
+Derivation
+==========
+This is a pure-python implementation of the bcrypt hash algorithm.
+It's based off the java bcrypt implementation jBcrypt 0.2,
+downloaded from http://www.mindrot.org/projects/jBCrypt/.
+
+The original jBcrypt was released with the following license::
+
+ Copyright (c) 2006 Damien Miller <djm@mindrot.org>
+
+ Permission to use, copy, modify, and distribute this software for any
+ purpose with or without fee is hereby granted, provided that the above
+ copyright notice and this permission notice appear in all copies.
+
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+The original jBcrypt source was converted line-by-line from java,
+and then adapted to be more efficient for python by Eli Collins <elic@astllc.org>
+
+This is meant mainly as a fall-back for BPS to use,
+if you need speedy bcrypt support, install the python-bcrypt
+library, and BPS will use _it_ instead of this module.
+
+The bcrypt unit-tests in bps.test.test_security_bcrypt
+will test all available backends.
+
+Usage
+=====
+This class attempts to be compatible with py-bcrypt
+(at least as far as BPS is concerned),
+since this is merely a fallback for BPS when py-bcrypt
+is not available::
+
+ import bps.security._bcrypt as bcrypt
+
+ # Hash a password for the first time
+ hashed = bcrypt.hashpw(password, bcrypt.gensalt())
+
+ # gensalt's log_rounds parameter determines the complexity
+ # the work factor is 2**log_rounds, and the default is 12
+ hashed = bcrypt.hashpw(password, bcrypt.gensalt(10))
+
+ # Check that an unencrypted password matches one that has
+ # previously been hashed
+ if bcrypt.hashpw(plaintext, hashed) == hashed:
+ print "It matches"
+ else:
+ print "It does not match"
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from cStringIO import StringIO
+#pkg
+from bps.rng import srandom
+from bps.numeric import bytes_to_list, list_to_bytes
+#local
+__all__ = [
+ 'hashpw',
+ 'gensalt',
+]
+#=========================================================
+#bcrypt/blowfish constants
+#=========================================================
+
+# BCrypt parameters
+BCRYPT_SALT_LEN = 16
+BCRYPT_DEFAULT_ROUNDS = 10
+BCRYPT_MIN_ROUNDS = 4
+BCRYPT_MAX_ROUNDS = 31
+
+# Blowfish parameters
+BLOWFISH_NUM_ROUNDS = 16
+
+# Initial contents of key schedule
+BLOWFISH_P = [
+ 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
+ 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89,
+ 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c,
+ 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
+ 0x9216d5d9, 0x8979fb1b
+]
+
+BLOWFISH_S = [
+ 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7,
+ 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99,
+ 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16,
+ 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e,
+ 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee,
+ 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013,
+ 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef,
+ 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e,
+ 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60,
+ 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440,
+ 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce,
+ 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a,
+ 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e,
+ 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677,
+ 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193,
+ 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032,
+ 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88,
+ 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239,
+ 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e,
+ 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0,
+ 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3,
+ 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98,
+ 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88,
+ 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe,
+ 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6,
+ 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d,
+ 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b,
+ 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7,
+ 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba,
+ 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463,
+ 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f,
+ 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09,
+ 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3,
+ 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb,
+ 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279,
+ 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8,
+ 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab,
+ 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82,
+ 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db,
+ 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573,
+ 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0,
+ 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b,
+ 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790,
+ 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8,
+ 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4,
+ 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0,
+ 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7,
+ 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c,
+ 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad,
+ 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1,
+ 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299,
+ 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9,
+ 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477,
+ 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf,
+ 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49,
+ 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af,
+ 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa,
+ 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5,
+ 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41,
+ 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915,
+ 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400,
+ 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915,
+ 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664,
+ 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a,
+ 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623,
+ 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266,
+ 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1,
+ 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e,
+ 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6,
+ 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1,
+ 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e,
+ 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1,
+ 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737,
+ 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8,
+ 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff,
+ 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd,
+ 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701,
+ 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7,
+ 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41,
+ 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331,
+ 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf,
+ 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af,
+ 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e,
+ 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87,
+ 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c,
+ 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2,
+ 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16,
+ 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd,
+ 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b,
+ 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509,
+ 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e,
+ 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3,
+ 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f,
+ 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a,
+ 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4,
+ 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960,
+ 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66,
+ 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28,
+ 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802,
+ 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84,
+ 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510,
+ 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf,
+ 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14,
+ 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e,
+ 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50,
+ 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7,
+ 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8,
+ 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281,
+ 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99,
+ 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696,
+ 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128,
+ 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73,
+ 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0,
+ 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0,
+ 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105,
+ 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250,
+ 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3,
+ 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285,
+ 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00,
+ 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061,
+ 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb,
+ 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e,
+ 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735,
+ 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc,
+ 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9,
+ 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340,
+ 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20,
+ 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7,
+ 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934,
+ 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068,
+ 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af,
+ 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840,
+ 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45,
+ 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504,
+ 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a,
+ 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb,
+ 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee,
+ 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6,
+ 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42,
+ 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b,
+ 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2,
+ 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb,
+ 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527,
+ 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b,
+ 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33,
+ 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c,
+ 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3,
+ 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc,
+ 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17,
+ 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564,
+ 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b,
+ 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115,
+ 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922,
+ 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728,
+ 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0,
+ 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e,
+ 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37,
+ 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d,
+ 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804,
+ 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b,
+ 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3,
+ 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb,
+ 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d,
+ 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c,
+ 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350,
+ 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9,
+ 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a,
+ 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe,
+ 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d,
+ 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc,
+ 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f,
+ 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61,
+ 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2,
+ 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9,
+ 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2,
+ 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c,
+ 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e,
+ 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633,
+ 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10,
+ 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169,
+ 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52,
+ 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027,
+ 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5,
+ 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62,
+ 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634,
+ 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76,
+ 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24,
+ 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc,
+ 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4,
+ 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c,
+ 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837,
+ 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0,
+ 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b,
+ 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe,
+ 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b,
+ 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4,
+ 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8,
+ 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6,
+ 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304,
+ 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22,
+ 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4,
+ 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6,
+ 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9,
+ 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59,
+ 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593,
+ 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51,
+ 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28,
+ 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c,
+ 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b,
+ 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28,
+ 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c,
+ 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd,
+ 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a,
+ 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319,
+ 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb,
+ 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f,
+ 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991,
+ 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32,
+ 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680,
+ 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166,
+ 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae,
+ 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb,
+ 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5,
+ 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47,
+ 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370,
+ 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d,
+ 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84,
+ 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048,
+ 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8,
+ 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd,
+ 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9,
+ 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7,
+ 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38,
+ 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f,
+ 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c,
+ 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525,
+ 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1,
+ 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442,
+ 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964,
+ 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e,
+ 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8,
+ 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d,
+ 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f,
+ 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299,
+ 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02,
+ 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc,
+ 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614,
+ 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a,
+ 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6,
+ 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b,
+ 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0,
+ 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060,
+ 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e,
+ 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9,
+ 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f,
+ 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6
+];
+
+# bcrypt IV: "OrpheanBeholderScryDoubt"
+BF_CRYPT_CIPHERTEXT = [ # 6 int
+ 0x4f727068, 0x65616e42, 0x65686f6c,
+ 0x64657253, 0x63727944, 0x6f756274
+]
+
+#(unencoded) length of bcrypt chk length
+BCRYPT_CHK_LEN = len(BF_CRYPT_CIPHERTEXT)*4-1
+
+#=========================================================
+#base64 encoding
+#=========================================================
+
+# Table for Base64 encoding
+CHARS = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+CHARSIDX = dict( (c, i) for i, c in enumerate(CHARS))
+
+def encode_base64(d):
+ """Encode a byte array using bcrypt's slightly-modified base64 encoding scheme.
+
+ Note that this is *not* compatible with the standard MIME-base64 encoding.
+
+ :param d:
+ the bytes to encode
+ :returns:
+ the bytes as encoded using bcrypt's base64
+ """
+ if isinstance(d, unicode):
+ d = d.encode("utf-8")
+ #ensure ord() returns something w/in 0..255
+
+ rs = StringIO()
+ write = rs.write
+ dlen = len(d)
+ didx = 0
+
+ while True:
+ #encode first byte -> 1 byte (6 bits) w/ 2 bits left over
+ if didx >= dlen:
+ break
+ c1 = ord(d[didx])
+ write(CHARS[(c1 >> 2) & 0x3f])
+ c1 = (c1 & 0x03) << 4
+ didx += 1
+
+ #encode 2 bits + second byte -> 1 byte (6 bits) w/ 4 bits left over
+ if didx >= dlen:
+ write(CHARS[c1])
+ break
+ c2 = ord(d[didx])
+ write(CHARS[c1 | (c2 >> 4) ])
+ c2 = (c2 & 0x0f) << 2
+ didx += 1
+
+ #encode 4 bits left over + third byte -> 1 byte (6 bits) w/ 2 bits left over
+ if didx >= dlen:
+ write(CHARS[c2])
+ break
+ c3 = ord(d[didx])
+ write(CHARS[c2 | (c3 >> 6)])
+ write(CHARS[c3 & 0x3f])
+ didx += 1
+
+ return rs.getvalue()
+
+def decode_base64(s):
+ """Decode bytes encoded using bcrypt's base64 scheme.
+
+ :param s:
+ string of bcrypt-base64 encoded bytes
+
+ :returns:
+ string of decoded bytes
+
+ :raises ValueError:
+ if invalid values are passed in
+ """
+ rs = StringIO()
+ write = rs.write
+ slen = len(s)
+ sidx = 0
+
+ def char64(c):
+ "look up 6 bit value in table"
+ try:
+ return CHARSIDX[c]
+ except KeyError:
+ raise ValueError, "invalid chars in base64 string"
+
+ while True:
+
+ #decode byte 1 + byte 2 -> 1 byte + 4 bits left over
+ if sidx >= slen-1:
+ break
+ c2 = char64(s[sidx+1])
+ write(chr((char64(s[sidx]) << 2) | (c2 >> 4)))
+ sidx += 2
+
+ #decode 4 bits left over + 3rd byte -> 1 byte + 2 bits left over
+ if sidx >= slen:
+ break
+ c3 = char64(s[sidx])
+ write(chr(((c2 & 0x0f) << 4) | (c3 >> 2)))
+ sidx += 1
+
+ #decode 2 bits left over + 4th byte -> 1 byte
+ if sidx >= slen:
+ break
+ write(chr(((c3 & 0x03) << 6) | char64(s[sidx])))
+ sidx += 1
+
+ return rs.getvalue()
+
+#=========================================================
+#helpers
+#=========================================================
+def streamtoword(data, offp):
+ """Cycically extract a word of key material
+ :param data: the string to extract the data from
+ :param offp: a "pointer" (as a one-entry array) to the current offset into data
+ :returns: the next word of material from data
+ """
+ # data byte[]
+ # offp int[]
+ #return int
+ word = 0
+ off = offp[0]
+ dlen = len(data)
+
+ for i in xrange(0, 4):
+ word = (word << 8) | ord(data[off])
+ off = (off + 1) % dlen
+
+ offp[0] = off
+ return word
+
+def encode_hash(minor, rounds, salt, chk=None):
+ "helper for formatting hash string"
+ if not minor or minor == '\x00':
+ minor = ''
+ salt = encode_base64(salt)
+ if chk:
+ return '$2%s$%02d$%s%s' % (minor, rounds, salt, encode_base64(chk))
+ else:
+ return '$2%s$%02d$%s' % (minor, rounds, salt)
+
+#=========================================================
+#hash class
+#=========================================================
+class BCrypt(object):
+
+ #private copy of keys
+ P = None
+ S = None
+
+ def encipher(self, lr, off):
+ """Blowfish encipher a single 64-bit block encoded as two 32-bit halves
+ :param lr: an array containing the two 32-bit half blocks
+ :param off: the position in the array of the blocks
+ """
+ P, S = self.P, self.S
+
+ l = lr[off]
+ r = lr[off+1]
+
+ l ^= P[0];
+ for i in xrange(0, BLOWFISH_NUM_ROUNDS-1, 2):
+ # Feistel substitution on left word
+ n = S[(l >> 24) & 0xff]
+ n += S[0x100 | ((l >> 16) & 0xff)]
+ n ^= S[0x200 | ((l >> 8) & 0xff)]
+ n += S[0x300 | (l & 0xff)]
+ r ^= n ^ P[i+1]
+
+ # Feistel substitution on right word
+ n = S[(r >> 24) & 0xff]
+ n += S[0x100 | ((r >> 16) & 0xff)]
+ n ^= S[0x200 | ((r >> 8) & 0xff)]
+ n += S[0x300 | (r & 0xff)]
+ l ^= n ^ P[i+2]
+
+ lr[off] = r ^ P[BLOWFISH_NUM_ROUNDS + 1]
+ lr[off + 1] = l
+
+ def init_key(self):
+ """
+ /**
+ * Initialise the Blowfish key schedule
+ */
+ """
+ self.P = list(BLOWFISH_P)
+ self.S = list(BLOWFISH_S)
+
+ def key(self, key):
+ """
+ /**
+ * Key the Blowfish cipher
+ * @param key an array containing the key
+ */
+ """
+ #key byte[]
+ #return void
+ koffp = [0]
+ lr = [0, 0]
+ P, S = self.P, self.S
+ plen = len(P)
+ slen = len(S)
+
+ for i in xrange(plen):
+ P[i] = P[i] ^ streamtoword(key, koffp)
+
+ for i in xrange(0, plen, 2):
+ self.encipher(lr, 0)
+ P[i] = lr[0]
+ P[i + 1] = lr[1]
+
+ for i in xrange(0, slen, 2):
+ self.encipher(lr, 0)
+ S[i] = lr[0]
+ S[i + 1] = lr[1]
+
+ def ekskey(self, data, key):
+ """Perform the "enhanced key schedule" step described by
+ Provos and Mazieres in "A Future-Adaptable Password Scheme"
+ http:#www.openbsd.org/papers/bcrypt-paper.ps
+
+ :param data: salt information
+ :param key: password information
+ """
+ P, S = self.P, self.S
+ koffp = [0]
+ doffp = [0]
+ lr = [0, 0]
+ plen = len(P)
+ slen = len(S)
+
+ for i in xrange(plen):
+ P[i] = P[i] ^ streamtoword(key, koffp)
+
+ for i in xrange(0, plen, 2):
+ lr[0] ^= streamtoword(data, doffp)
+ lr[1] ^= streamtoword(data, doffp)
+ self.encipher(lr, 0)
+ P[i] = lr[0]
+ P[i + 1] = lr[1]
+
+ for i in xrange(0, slen, 2):
+ lr[0] ^= streamtoword(data, doffp)
+ lr[1] ^= streamtoword(data, doffp)
+ self.encipher(lr, 0)
+ S[i] = lr[0]
+ S[i + 1] = lr[1]
+
+ def crypt_raw(self, password, salt, log_rounds):
+ """perform central password hashing step in bcrypt scheme.
+
+ :param password: the password to hash
+ :param salt: the binary salt to use
+ :param rounds: the log2 of the number of rounds
+ :returns: array containing hashed password
+ """
+ # password byte[]
+ # salt byte[]
+ # log_rounds int
+ # returns byte[]
+ cdata = list(BF_CRYPT_CIPHERTEXT)
+ clen = len(cdata)
+
+ if log_rounds < BCRYPT_MIN_ROUNDS or log_rounds > BCRYPT_MAX_ROUNDS:
+ raise ValueError, "Bad number of rounds"
+ if len(salt) != BCRYPT_SALT_LEN:
+ raise ValueError, "Bad salt length: %r" % salt
+
+ self.init_key()
+ self.ekskey(salt, password)
+ for i in xrange(1 << log_rounds):
+ self.key(password)
+ self.key(salt)
+
+ for i in xrange(64):
+ for j in xrange(clen>>1):
+ self.encipher(cdata, j << 1)
+
+ ret = [0] * (clen*4)
+ j = 0
+ for i in xrange(clen):
+ ret[j] = ((cdata[i] >> 24) & 0xff)
+ ret[j+1] = ((cdata[i] >> 16) & 0xff)
+ ret[j+2] = ((cdata[i] >> 8) & 0xff)
+ ret[j+3] = (cdata[i] & 0xff)
+ j += 4
+ return ret
+
+#=========================================================
+#frontends
+#=========================================================
+
+def hashpw(password, salt):
+ """Hash a password using the OpenBSD bcrypt scheme.
+
+ :param password:
+ the password to hash
+ :param salt:
+ the salt to hash with (generated via gensalt)
+ :returns:
+ the hashed password
+ """
+ # "$2$rd$saltchk"
+ # "$2a$rd$saltchk"
+
+ #extract version number
+ if not salt.startswith("$2"):
+ raise ValueError, "Invalid salt version"
+ if salt[2] == '$':
+ minor = '\x00'
+ off = 3
+ else:
+ minor = salt[2]
+ if minor != 'a' or salt[3] != '$':
+ raise ValueError, "Invalid salt revision"
+ off = 4
+
+ #extract number of rounds
+ if salt[off+2] != '$':
+ raise ValueError, "Missing salt rounds"
+ rounds = int(salt[off:off+2])
+
+ #extract salt string
+ real_salt = salt[off+3:off+25] #25-3=22, 22*3/4=16.5, 16.5=.5 + BCRYPT_SALT_LEN
+ saltb = decode_base64(real_salt)
+ if len(saltb) < BCRYPT_SALT_LEN:
+ raise ValueError, "Missing salt bytes"
+ elif len(saltb) > BCRYPT_SALT_LEN:
+ saltb = saltb[:BCRYPT_SALT_LEN]
+
+ #prepare password
+ passwordb = password.encode("utf-8")
+ if minor >= 'a':
+ passwordb += '\x00'
+
+ #encrypt password
+ hashed = BCrypt().crypt_raw(passwordb, saltb, rounds)
+
+ #return hash string
+ chk = list_to_bytes(hashed[:BCRYPT_CHK_LEN])
+ if minor < 'a':
+ minor = ''
+ return encode_hash(minor, rounds, saltb, chk)
+
+def gensalt(log_rounds=BCRYPT_DEFAULT_ROUNDS, random=srandom):
+ """Generate a salt for use with the BCrypt.hashpw() method.
+
+ :param log_rounds:
+ the log2 of the number of rounds of
+ hashing to apply - the work factor therefore increases as
+ 2**log_rounds.
+ :param random:
+ An random.Random compatible RNG instance to use,
+ by default uses ``bps.rng.srandom``.
+ :returns:
+ the encoded random salt value
+ """
+ try:
+ #getrandbytes() is not standard :(
+ salt = random.getrandbytes(BCRYPT_SALT_LEN)
+ except AttributeError:
+ salt = ''.join(chr(random.randrange(0, 256)) for i in xrange(BCRYPT_SALT_LEN))
+ if log_rounds < BCRYPT_MIN_ROUNDS:
+ log_rounds = BCRYPT_MIN_ROUNDS
+ elif log_rounds > BCRYPT_MAX_ROUNDS:
+ log_rounds = BCRYPT_MAX_ROUNDS
+ return encode_hash('a', log_rounds, salt)
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/security/_gpw_data.py b/bps/security/_gpw_data.py
new file mode 100644
index 0000000..6427dbb
--- /dev/null
+++ b/bps/security/_gpw_data.py
@@ -0,0 +1,1580 @@
+"""this module contains frequency tables for the gpw password generation algorithm,
+as compiled from various dictionary files.
+
+new tables can be built with the compile_gpw_data() function.
+existing tables can be retreived via the get_gpw_data() function.
+new tables which are compiled can be added to the end of this module,
+and then integrated into the get_gpw_data() function.
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import re
+#pkg
+from bps import *
+#local
+__all__ = [
+ 'get_gpw_data',
+]
+default_alphabet = "abcdefghijklmnopqrstuvwxyz"
+#=========================================================
+#main entry point
+#=========================================================
+_hooks = {}
+_cache = {}
+def get_gpw_data(language=None):
+ "retreive data tables for specified language"
+ global _hooks, _cache
+ language = language.lower() if language else None
+ if language not in _hooks:
+ log.warning("unsupported gpw language: %r", language)
+ language = "gpw"
+ if language in _cache:
+ return _cache[language]
+ else:
+ data =_cache[language] = _hooks[language]()
+ return data
+
+#=========================================================
+#original data table -
+#=========================================================
+def orig_data():
+ "data tables from Tom Van Vleck's implementation"
+ data = dict(alphabet=default_alphabet, tris_total=125729, words=20278, chars=166285)
+ #NOTE: words & chars estimated assuming 8.2 cpw on average
+ data['tris'] = [
+ [
+ [2,0,3,0,0,0,1,0,0,0,0,1,1,1,0,0,0,3,2,0,0,0,0,0,0,0], #A A
+ [37,25,2,5,38,0,0,2,46,1,0,304,0,2,49,0,0,24,24,0,19,0,0,0,14,0], #A B
+ [26,1,64,2,107,0,1,94,67,0,173,13,5,1,35,1,13,32,3,114,23,0,0,0,45,0], #A C
+ [35,7,3,43,116,6,3,8,75,14,1,16,25,3,44,3,1,35,20,1,10,25,9,0,18,0], #A D
+ [2,0,2,1,0,1,3,0,0,0,0,10,0,2,3,0,0,12,6,0,2,0,0,0,0,0], #A E
+ [5,0,0,0,14,50,2,0,3,0,2,5,0,2,7,0,0,5,1,39,1,0,0,0,1,0], #A F
+ [30,1,0,1,182,0,42,5,30,0,0,7,9,42,51,3,0,24,3,0,21,0,3,0,3,0], #A G
+ [12,0,0,0,20,0,0,0,3,0,0,5,4,2,13,0,0,2,0,0,1,0,0,0,0,0], #A H
+ [2,0,10,26,2,1,10,0,2,1,2,87,13,144,0,2,0,93,30,23,0,3,1,0,0,0], #A I
+ [4,0,0,0,3,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0], #A J
+ [11,0,1,1,98,1,0,1,15,0,0,3,0,0,5,1,0,3,0,1,2,0,3,0,8,0], #A K
+ [78,20,34,45,124,21,24,5,109,0,28,237,31,3,53,23,0,7,16,69,29,26,5,0,26,2], #A L
+ [70,57,1,1,98,3,0,1,68,0,0,3,38,2,43,69,0,3,14,3,12,0,2,0,14,0], #A M
+ [114,6,156,359,103,8,146,12,141,2,57,4,0,89,61,1,4,1,124,443,29,6,1,3,28,9], #A N
+ [0,0,1,0,0,0,0,0,0,0,0,3,1,0,0,0,0,3,2,2,2,0,0,0,0,0], #A O
+ [29,3,0,1,59,1,0,86,25,0,1,14,1,1,37,94,0,9,22,30,8,0,0,0,9,0], #A P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,0,0,0,0,0], #A Q
+ [124,64,101,233,115,12,47,5,188,3,61,55,68,34,46,25,6,94,48,189,5,22,5,1,172,2], #A R
+ [19,3,32,0,71,0,1,81,49,0,22,3,19,2,19,34,4,0,152,211,12,0,1,0,17,1], #A S
+ [50,3,41,2,863,4,0,144,352,0,5,14,6,3,144,0,0,60,13,106,57,1,5,0,8,5], #A T
+ [0,5,23,35,5,5,38,1,0,1,3,33,4,23,0,4,1,35,52,56,0,1,0,7,0,1], #A U
+ [35,0,0,1,108,0,0,0,49,0,0,1,0,0,19,0,0,0,0,0,3,1,0,0,6,0], #A V
+ [30,10,0,4,3,6,2,2,2,0,10,13,4,15,3,0,0,6,3,5,0,0,0,0,2,0], #A W
+ [3,0,0,0,4,0,0,0,22,0,0,1,0,0,7,2,0,0,1,1,0,0,3,0,3,0], #A X
+ [11,8,1,5,16,5,1,2,2,0,0,10,7,4,13,1,0,3,5,7,3,0,5,0,0,0], #A Y
+ [10,0,0,1,22,0,0,0,10,0,0,0,0,0,7,0,0,0,0,2,2,0,0,0,4,11], #A Z
+ ],[
+ [0,17,74,11,1,2,19,4,8,0,10,68,7,73,1,7,0,110,54,55,9,1,3,1,12,1], #B A
+ [7,0,0,0,16,0,0,0,10,0,0,24,0,0,9,0,0,2,3,0,2,0,0,0,14,0], #B B
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #B C
+ [2,0,0,0,2,0,0,0,2,0,0,0,0,0,3,0,0,1,0,0,3,0,0,0,0,0], #B D
+ [51,1,14,34,18,11,16,7,9,0,1,85,5,48,2,2,2,199,36,41,0,4,5,1,6,2], #B E
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0], #B F
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #B G
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,1,0,0,0,0,0], #B H
+ [34,8,22,21,8,3,9,1,0,3,1,50,7,45,16,4,2,29,22,59,4,4,0,0,0,3], #B I
+ [0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #B J
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #B K
+ [57,0,0,0,519,0,0,0,35,0,0,0,0,0,47,0,0,0,0,0,32,1,0,0,3,0], #B L
+ [0,0,0,0,1,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0], #B M
+ [1,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #B N
+ [62,7,4,21,3,2,9,3,8,1,1,46,8,63,58,2,0,55,15,20,46,6,17,10,19,0], #B O
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0], #B P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #B Q
+ [110,0,0,0,77,0,0,0,100,0,0,0,0,0,78,0,0,0,0,0,28,0,0,0,10,0], #B R
+ [0,0,6,0,16,0,0,0,7,0,0,0,0,0,12,0,0,0,0,27,2,0,0,0,0,0], #B S
+ [1,0,0,0,3,1,0,0,0,0,0,4,0,0,1,0,0,3,0,0,0,0,0,0,0,0], #B T
+ [0,3,21,16,3,5,14,0,12,1,2,52,7,20,2,0,1,104,44,54,0,0,0,3,1,5], #B U
+ [0,0,0,0,3,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #B V
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #B W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #B X
+ [1,0,0,0,3,0,1,2,0,0,0,4,0,0,0,3,0,6,8,3,0,0,2,0,0,2], #B Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #B Z
+ ],[
+ [1,47,17,33,1,3,4,5,7,1,3,120,40,120,1,59,1,171,60,150,19,20,1,0,5,0], #C A
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0], #C B
+ [23,0,0,0,22,0,0,5,13,0,0,13,0,0,26,0,0,7,0,0,27,0,0,0,0,0], #C C
+ [1,0,1,0,1,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0], #C D
+ [23,6,4,17,6,6,1,2,13,0,0,50,12,109,7,43,0,76,63,22,1,0,4,0,2,1], #C E
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #C F
+ [0,0,0,0,1,0,0,0,2,0,0,0,0,0,2,0,0,4,1,0,1,0,0,0,0,0], #C G
+ [165,10,2,3,176,4,3,1,141,0,0,26,20,16,102,1,0,63,8,10,44,0,13,0,20,0], #C H
+ [76,15,8,33,24,16,3,0,0,0,0,38,5,45,50,28,0,29,38,71,6,8,0,0,0,0], #C I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #C J
+ [17,16,2,3,90,4,1,7,20,1,1,45,8,8,12,9,0,3,32,6,6,0,13,0,22,0], #C K
+ [95,0,0,0,84,0,0,0,50,0,0,0,0,0,54,0,0,0,0,0,34,0,0,0,3,0], #C L
+ [1,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #C M
+ [2,0,0,0,1,0,0,0,4,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0], #C N
+ [33,16,40,22,14,10,11,12,9,1,1,101,218,421,24,56,2,129,37,40,86,22,25,4,4,2], #C O
+ [1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #C P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,0,0,0,0,0], #C Q
+ [101,0,0,0,112,0,0,0,75,0,0,0,0,0,88,0,0,0,0,1,41,0,0,0,25,0], #C R
+ [0,0,0,0,0,0,0,0,3,0,0,0,0,1,2,0,0,0,1,2,0,0,0,0,0,0], #C S
+ [44,0,0,0,12,2,0,0,113,0,0,0,2,0,94,0,0,46,0,0,42,0,1,0,3,0], #C T
+ [3,12,2,6,6,6,0,0,8,0,0,102,42,10,9,15,0,72,51,41,1,0,0,0,0,0], #C U
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #C V
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #C W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #C X
+ [5,1,20,0,0,0,1,0,0,0,0,3,0,2,2,4,0,3,2,9,0,0,0,0,0,0], #C Y
+ [2,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #C Z
+ ],[
+ [0,7,16,7,1,2,13,6,18,0,3,54,23,59,0,10,0,31,6,40,8,13,3,0,32,3], #D A
+ [9,0,0,0,7,0,0,0,3,0,0,2,0,0,8,0,0,1,0,0,8,0,0,0,2,0], #D B
+ [5,0,0,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,0,2,0,0,0,0,0], #D C
+ [8,0,0,0,30,0,0,3,19,0,0,38,0,0,4,0,0,4,0,0,1,0,0,0,16,0], #D D
+ [34,37,82,14,17,41,11,4,5,2,0,88,62,170,14,40,4,183,99,39,6,20,16,6,1,2], #D E
+ [6,0,0,0,0,0,0,0,6,0,0,2,0,0,5,0,0,2,0,0,4,0,0,0,0,0], #D F
+ [4,0,0,0,73,0,0,0,2,0,1,1,1,0,0,0,0,1,0,0,2,0,1,0,3,0], #D G
+ [8,0,0,0,9,0,0,0,4,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0], #D H
+ [100,10,104,12,33,26,31,1,1,0,1,22,22,65,57,15,0,20,138,53,20,31,1,6,0,1], #D I
+ [4,0,0,0,2,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,7,0,0,0,0,0], #D J
+ [0,0,0,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #D K
+ [9,0,0,0,79,0,0,0,12,0,0,0,0,0,7,0,0,0,0,0,1,0,0,0,3,0], #D L
+ [13,0,0,0,3,0,0,0,21,0,0,0,0,0,11,0,0,0,0,0,1,0,0,0,0,0], #D M
+ [7,0,0,0,9,0,0,0,3,0,0,0,0,0,1,0,0,0,0,6,0,0,0,0,0,0], #D N
+ [1,5,21,10,6,3,20,1,3,0,0,30,38,54,17,7,0,39,11,10,30,5,54,5,1,3], #D O
+ [6,0,0,0,1,0,0,1,3,0,0,1,0,0,7,0,0,1,0,0,0,0,0,0,0,0], #D P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0], #D Q
+ [74,0,0,0,47,0,0,0,53,0,0,0,0,0,80,0,0,0,0,0,22,0,0,0,8,0], #D R
+ [1,0,3,0,10,0,0,9,5,0,1,3,10,0,16,8,0,0,0,31,1,0,2,0,0,0], #D S
+ [3,0,0,0,1,0,0,6,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #D T
+ [10,7,52,2,5,3,4,0,2,0,1,33,14,15,5,11,1,19,15,8,1,0,0,0,0,1], #D U
+ [3,0,0,0,13,0,0,0,7,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #D V
+ [19,0,0,0,10,0,0,0,19,0,0,0,0,0,8,0,0,2,0,0,0,0,0,0,2,0], #D W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #D X
+ [4,2,1,2,3,1,2,0,1,0,1,4,4,12,0,0,0,0,8,1,0,0,1,0,0,0], #D Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #D Z
+ ],[
+ [0,39,34,110,0,12,13,3,0,0,50,68,38,71,0,13,1,117,80,112,28,19,7,0,0,1], #E A
+ [32,5,0,0,31,0,0,0,8,0,0,6,0,0,28,0,0,32,2,3,29,0,0,0,4,0], #E B
+ [33,0,9,2,51,0,0,39,49,0,47,26,0,0,59,0,0,35,2,206,42,0,0,0,2,0], #E C
+ [29,7,1,16,45,5,22,3,88,0,0,8,9,4,24,2,0,27,8,4,27,0,7,0,13,0], #E D
+ [2,4,13,63,1,6,1,4,10,0,19,23,13,66,1,42,0,43,9,34,1,4,6,0,0,8], #E E
+ [14,0,1,2,36,33,0,0,22,0,0,15,0,0,24,0,0,14,1,13,35,0,0,0,5,0], #E F
+ [48,1,0,0,36,1,15,2,38,0,0,7,4,4,26,0,0,38,0,0,19,0,0,0,4,0], #E G
+ [14,0,0,0,24,0,0,0,6,0,0,0,1,0,18,0,0,4,0,0,4,0,0,0,3,0], #E H
+ [8,0,5,13,2,1,42,0,1,1,2,13,7,59,1,1,0,10,25,22,0,7,0,0,0,2], #E I
+ [4,0,0,0,4,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,3,0,0,0,0,0], #E J
+ [2,1,0,1,6,0,0,0,4,0,0,0,0,1,1,0,0,0,2,3,0,0,0,0,1,0], #E K
+ [76,7,6,57,131,19,7,3,125,0,4,238,22,1,48,15,0,4,27,26,17,19,2,0,7,0], #E L
+ [87,53,1,0,84,0,0,0,102,0,0,3,8,8,56,64,0,0,4,0,19,0,1,0,8,0], #E M
+ [78,17,68,159,128,8,35,14,96,2,2,4,5,54,57,3,2,9,127,624,33,10,8,0,11,16], #E N
+ [0,0,8,10,0,6,7,1,2,0,0,23,10,38,0,16,0,14,6,4,41,3,2,2,0,1], #E O
+ [26,1,1,0,27,0,0,32,45,0,0,21,1,0,35,9,0,35,10,65,13,0,2,0,3,0], #E P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,59,0,0,0,0,0], #E Q
+ [217,57,66,22,190,41,70,13,200,3,14,40,134,117,113,42,2,123,167,135,23,58,22,1,123,1], #E R
+ [17,7,74,6,58,1,3,25,82,0,3,6,17,5,34,52,7,0,222,278,18,2,1,0,6,0], #E S
+ [78,3,19,0,129,4,0,93,105,0,1,3,2,2,50,1,0,73,5,113,17,0,4,0,32,4], #E T
+ [0,4,7,6,1,0,4,0,0,0,2,3,17,4,0,15,0,46,20,18,0,2,1,0,0,0], #E U
+ [29,0,0,0,121,0,0,0,56,0,0,0,0,0,26,0,0,2,1,0,2,2,0,0,3,1], #E V
+ [33,4,3,4,16,2,0,5,24,0,0,3,3,3,23,2,0,3,15,4,0,0,1,0,2,0], #E W
+ [29,0,43,0,20,0,0,14,21,0,0,0,0,0,15,78,1,0,0,72,12,0,0,1,2,0], #E X
+ [7,3,1,4,25,2,0,2,0,0,1,4,6,4,4,1,0,2,3,0,0,1,4,0,0,0], #E Y
+ [1,0,0,0,9,0,0,0,1,0,0,0,0,0,4,0,0,1,0,0,1,1,0,0,2,3], #E Z
+ ],[
+ [1,10,39,5,2,1,1,3,18,0,2,35,10,27,0,0,0,36,13,18,10,0,2,3,4,1], #F A
+ [2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F B
+ [1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F C
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #F D
+ [18,5,24,6,12,0,2,0,6,0,1,25,6,18,2,0,0,114,17,15,4,2,2,0,1,0], #F E
+ [10,2,0,0,51,0,0,2,45,0,0,21,4,0,13,0,0,9,7,0,7,0,0,0,8,0], #F F
+ [1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F G
+ [2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F H
+ [9,9,58,18,42,7,11,0,0,0,0,29,2,53,0,0,0,40,41,18,0,2,0,10,0,3], #F I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #F J
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F K
+ [64,0,0,0,50,0,0,0,21,0,0,0,0,0,60,0,0,0,0,0,42,0,0,0,15,0], #F L
+ [6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F M
+ [0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F N
+ [5,1,8,2,1,0,7,0,6,0,0,34,1,8,32,2,0,165,5,0,25,1,2,7,1,0], #F O
+ [0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F Q
+ [64,0,0,0,66,0,0,0,35,0,0,0,0,0,35,0,0,0,0,0,11,0,0,0,3,0], #F R
+ [1,0,0,0,2,0,0,2,0,0,1,0,0,0,1,1,0,0,0,2,0,0,0,0,0,0], #F S
+ [1,1,1,0,19,0,0,3,1,0,0,0,1,0,3,0,0,1,9,0,0,0,4,0,8,0], #F T
+ [0,0,4,2,1,0,9,0,0,2,0,119,7,24,0,0,0,28,31,6,0,0,0,0,0,2], #F U
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F V
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F X
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #F Z
+ ],[
+ [0,20,5,11,3,2,11,3,13,0,0,68,24,60,1,5,0,63,23,68,15,8,5,0,2,5], #G A
+ [4,0,0,0,1,0,0,0,3,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0], #G B
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #G C
+ [2,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], #G D
+ [23,3,2,4,12,1,1,3,4,0,0,32,8,141,39,4,0,96,29,33,1,1,4,0,5,0], #G E
+ [0,0,0,0,1,0,0,0,3,0,0,0,0,0,0,0,0,1,0,0,3,0,0,0,0,0], #G F
+ [8,0,0,0,20,0,0,1,60,0,0,24,0,0,3,1,0,6,4,0,0,0,0,0,12,0], #G G
+ [18,4,1,1,12,2,1,1,2,0,1,4,0,3,12,1,0,1,3,153,2,0,3,0,1,0], #G H
+ [23,21,16,6,7,2,9,0,0,0,0,24,7,103,17,1,0,10,26,19,3,10,0,0,0,1], #G I
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #G J
+ [0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #G K
+ [49,0,0,0,73,0,0,0,25,0,0,0,0,0,38,0,0,0,0,0,13,0,0,0,17,0], #G L
+ [23,0,0,0,12,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,0,0,0,1,0], #G M
+ [26,1,0,0,28,0,0,0,20,0,0,0,0,0,26,2,0,0,0,1,7,0,0,0,0,0], #G N
+ [6,4,3,16,6,1,10,1,5,0,0,22,1,49,20,3,0,34,12,23,16,7,5,0,1,0], #G O
+ [0,0,0,0,1,0,0,0,3,0,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #G P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #G Q
+ [216,0,0,0,97,0,0,0,43,0,0,0,0,0,50,0,0,0,0,0,14,0,0,0,3,0], #G R
+ [2,2,0,0,0,0,0,2,2,0,1,1,0,0,2,1,0,0,0,18,0,0,1,0,0,0], #G S
+ [2,0,0,0,0,0,0,8,3,0,0,0,0,0,17,0,0,1,0,0,0,0,0,0,0,0], #G T
+ [28,1,1,0,49,1,1,0,41,0,0,26,15,24,2,0,0,14,22,6,0,0,0,0,3,1], #G U
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #G V
+ [5,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0], #G W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #G X
+ [1,0,0,0,0,0,0,0,0,0,0,0,7,3,0,6,0,5,0,0,0,0,0,0,0,0], #G Y
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #G Z
+ ],[
+ [2,26,15,20,6,8,22,3,31,0,11,90,66,171,3,25,0,142,30,49,20,11,20,0,13,8], #H A
+ [4,0,0,0,3,0,0,0,1,0,0,2,0,0,12,0,0,2,0,0,4,0,0,0,1,0], #H B
+ [1,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0], #H C
+ [2,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,4,0,0,0,0,0,0,0,0], #H D
+ [123,5,22,33,37,5,3,0,27,0,0,87,65,86,17,7,1,311,57,42,11,11,14,8,11,2], #H E
+ [2,0,0,0,0,0,0,0,3,0,0,0,0,0,2,0,0,0,0,0,10,0,0,0,0,0], #H F
+ [1,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], #H G
+ [1,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #H H
+ [22,22,56,15,23,6,19,0,0,1,1,73,20,79,17,41,0,36,53,39,3,11,0,0,0,6], #H I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #H J
+ [0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #H K
+ [5,0,0,0,11,0,0,0,8,0,0,0,0,0,22,0,0,1,0,0,1,0,0,0,1,0], #H L
+ [21,0,0,0,15,0,0,0,6,0,0,0,1,0,7,0,0,0,2,0,1,0,0,0,0,0], #H M
+ [3,0,0,0,8,0,0,0,9,0,0,0,0,1,3,0,0,0,4,0,2,0,0,0,0,0], #H N
+ [13,18,13,25,17,5,13,0,7,1,4,101,62,62,44,29,0,130,45,33,81,8,28,0,6,2], #H O
+ [3,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0], #H P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #H Q
+ [20,0,0,0,23,0,0,0,40,0,0,1,0,0,72,0,0,0,0,0,13,0,0,0,3,0], #H R
+ [3,0,1,0,0,0,0,2,1,0,0,0,0,0,3,0,0,0,0,5,0,0,0,0,0,0], #H S
+ [3,0,2,1,21,9,1,7,5,0,0,1,4,3,4,1,0,2,7,1,1,0,3,0,6,0], #H T
+ [3,13,7,6,3,5,12,1,0,0,0,7,37,26,0,3,0,37,24,15,0,0,0,2,2,1], #H U
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #H V
+ [17,0,0,0,5,0,0,2,5,0,0,0,0,0,9,0,0,0,0,0,0,0,0,0,0,0], #H W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #H X
+ [5,1,1,39,1,0,3,0,1,0,0,13,9,0,0,25,0,9,29,9,0,0,0,1,0,0], #H Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #H Z
+ ],[
+ [0,33,20,8,1,0,17,5,1,0,2,169,20,230,0,3,0,30,13,91,0,1,1,2,0,1], #I A
+ [11,19,0,0,38,0,0,0,22,0,0,131,1,2,10,0,0,20,1,0,23,0,0,0,2,0], #I B
+ [161,0,3,0,113,0,0,62,113,0,142,15,0,4,46,0,0,12,5,53,42,0,0,0,7,0], #I C
+ [51,2,0,31,232,0,30,0,46,1,0,5,1,8,10,1,0,1,10,5,11,0,7,0,9,0], #I D
+ [0,1,17,6,1,16,11,1,0,0,1,52,4,70,0,1,0,66,18,50,7,17,6,0,0,2], #I E
+ [7,0,0,0,31,45,0,0,27,0,0,9,0,1,10,0,0,2,0,24,10,0,0,0,71,0], #I F
+ [48,0,0,0,41,0,30,147,30,0,0,4,15,57,20,1,0,23,3,1,15,0,1,0,2,2], #I G
+ [1,0,0,0,2,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #I H
+ [1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #I I
+ [3,0,0,0,2,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0], #I J
+ [6,0,0,0,17,0,0,0,3,0,1,0,0,0,3,0,0,0,0,1,2,0,0,0,1,0], #I K
+ [60,10,6,36,106,6,5,7,90,0,13,253,14,0,24,1,0,1,10,31,6,6,5,0,10,0], #I L
+ [76,26,0,0,94,1,0,1,53,0,0,1,38,1,30,133,0,1,8,0,17,0,0,0,2,0], #I M
+ [212,12,143,168,396,83,435,26,94,8,43,9,6,44,70,3,10,2,139,205,35,46,4,4,15,1], #I N
+ [2,2,20,10,1,0,9,0,0,0,0,28,12,604,0,8,0,25,13,24,139,3,2,3,0,1], #I O
+ [20,5,0,0,26,2,0,16,16,1,0,33,6,0,13,39,0,5,19,28,5,0,1,0,1,0], #I P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,36,0,0,0,0,0], #I Q
+ [41,2,39,24,106,7,9,0,19,0,11,20,24,1,24,8,0,39,11,31,3,5,8,0,10,0], #I R
+ [35,5,71,4,110,4,2,189,56,1,13,12,93,5,55,33,3,6,85,271,4,1,1,0,8,0], #I S
+ [136,1,34,1,184,5,0,77,158,0,1,4,6,5,70,1,0,31,2,105,72,0,1,0,142,19], #I T
+ [0,0,1,0,0,0,0,0,0,0,0,1,121,1,0,0,0,1,19,0,0,0,0,0,0,0], #I U
+ [57,0,0,0,292,0,0,0,37,0,0,0,0,0,12,0,0,1,0,0,3,0,0,0,2,0], #I V
+ [3,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #I W
+ [1,0,0,0,2,1,1,0,3,0,0,0,0,0,4,0,0,0,0,9,1,0,0,0,1,0], #I X
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #I Y
+ [9,0,0,0,13,0,0,0,0,0,0,0,0,0,7,0,0,0,0,0,1,1,0,0,0,16], #I Z
+ ],[
+ [0,2,32,1,1,0,3,3,2,0,3,1,8,17,0,2,0,5,2,0,2,3,2,1,1,2], #J A
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J B
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J C
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J D
+ [4,0,24,1,1,3,0,1,0,2,0,2,0,6,2,0,0,11,9,5,0,0,6,0,0,0], #J E
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J F
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J G
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J H
+ [0,1,0,0,0,1,4,0,0,0,0,2,4,3,0,0,0,0,0,4,0,1,0,0,0,0], #J I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J J
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J K
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J L
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J M
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J N
+ [4,2,6,0,3,0,3,12,10,0,1,6,0,5,0,0,0,10,10,1,13,4,2,0,7,0], #J O
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J Q
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J R
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J S
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J T
+ [3,3,0,19,0,0,8,0,2,2,2,8,5,24,0,1,0,15,9,5,0,1,0,2,0,0], #J U
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J V
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J X
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #J Z
+ ],[
+ [0,3,0,6,1,2,8,2,1,1,1,9,4,13,2,3,0,18,4,17,2,1,2,1,5,2], #K A
+ [3,0,0,0,3,0,0,0,2,0,0,0,0,0,11,0,0,1,0,0,1,0,0,0,0,0], #K B
+ [2,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0], #K C
+ [3,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0], #K D
+ [4,3,0,7,28,3,3,2,1,0,0,20,5,55,3,3,0,59,18,56,2,1,4,0,27,0], #K E
+ [1,0,0,0,1,0,0,0,1,0,0,0,0,0,3,0,0,0,0,0,3,0,0,0,0,0], #K F
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0], #K G
+ [9,0,0,0,2,0,0,0,0,0,0,0,1,0,8,0,0,1,0,1,0,0,0,0,0,0], #K H
+ [5,2,3,9,15,1,1,0,0,0,1,10,10,87,2,4,0,11,15,13,0,2,2,0,0,0], #K I
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #K J
+ [1,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0], #K K
+ [15,0,0,0,46,0,0,0,13,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,2,0], #K L
+ [13,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #K M
+ [5,0,0,0,11,0,0,0,10,0,0,0,0,0,24,0,0,0,0,0,8,0,0,0,0,0], #K N
+ [1,1,2,3,2,4,0,2,1,0,1,3,1,7,1,2,0,6,2,1,7,4,5,2,0,0], #K O
+ [2,0,0,0,0,0,0,0,4,0,0,4,0,0,5,0,0,0,0,0,0,0,0,0,0,0], #K P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #K Q
+ [10,0,0,0,3,0,0,0,3,0,0,0,0,0,6,0,0,0,0,0,5,0,0,0,2,0], #K R
+ [2,2,1,0,1,0,1,9,5,0,1,0,4,0,8,3,0,0,0,11,4,0,1,0,1,0], #K S
+ [3,0,0,0,0,0,0,2,3,0,0,0,0,0,5,0,0,2,0,0,0,0,0,0,0,0], #K T
+ [0,0,0,2,0,0,0,1,0,0,0,5,1,1,0,8,0,2,1,1,0,0,1,0,1,0], #K U
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #K V
+ [9,0,0,0,4,0,0,1,2,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0], #K W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #K X
+ [2,0,0,0,1,0,0,1,0,1,0,4,0,0,2,0,0,2,1,0,1,0,3,0,0,0], #K Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #K Z
+ ],[
+ [1,46,84,43,3,2,46,9,52,0,10,3,64,242,4,23,1,157,92,210,45,21,23,9,42,11], #L A
+ [12,0,0,0,17,0,0,0,3,0,0,2,0,0,13,0,0,4,0,0,4,0,0,0,2,0], #L B
+ [9,0,0,0,6,0,0,12,4,0,0,1,1,0,19,0,0,2,0,1,7,0,0,0,2,0], #L C
+ [2,3,2,0,41,4,0,1,16,0,0,1,2,3,13,1,0,8,9,2,3,0,5,0,3,0], #L D
+ [94,25,75,44,36,13,55,9,26,1,1,9,55,121,22,22,0,77,84,115,12,29,14,30,75,1], #L E
+ [9,1,0,0,4,1,1,1,12,0,0,1,0,0,7,0,0,8,1,2,8,0,1,0,0,0], #L F
+ [16,0,0,0,12,0,0,0,10,0,0,0,0,0,6,0,0,6,0,0,0,0,0,0,0,0], #L G
+ [7,0,0,0,6,0,0,0,2,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0], #L H
+ [82,33,140,26,43,37,73,0,0,1,6,11,46,238,50,40,13,5,90,127,12,36,0,3,0,7], #L I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #L J
+ [7,0,0,0,4,0,0,3,9,0,0,2,0,1,2,0,0,0,3,0,0,0,3,0,8,0], #L K
+ [128,12,2,4,169,7,2,4,152,1,0,0,7,0,100,2,0,1,10,2,41,0,7,0,53,0], #L L
+ [27,0,0,2,11,0,0,2,9,0,0,0,1,0,13,0,0,0,4,0,3,0,0,0,3,0], #L M
+ [0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,3,0,0,0,0,0], #L N
+ [23,23,65,15,7,4,132,3,32,0,2,7,29,69,50,36,11,74,33,53,66,16,80,1,12,1], #L O
+ [11,0,0,0,3,1,0,21,5,0,0,0,1,0,6,0,0,3,1,4,0,0,0,0,1,0], #L P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #L Q
+ [2,0,0,0,1,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,2,0,0,0,6,0], #L R
+ [7,1,0,0,16,0,0,8,23,0,1,0,1,0,20,3,0,0,1,23,0,0,1,0,2,0], #L S
+ [22,1,0,0,23,0,0,14,34,0,0,0,2,0,23,0,0,9,3,0,8,1,1,0,18,5], #L T
+ [5,17,26,18,31,5,13,0,5,2,4,8,68,31,15,5,0,21,68,56,0,4,0,13,0,1], #L U
+ [19,0,0,1,46,0,0,0,9,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], #L V
+ [8,0,0,0,2,0,0,1,2,0,0,0,0,0,9,0,0,0,0,0,0,0,0,0,1,0], #L W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #L X
+ [2,4,12,2,2,2,3,7,2,0,1,3,13,11,2,11,0,2,31,15,1,0,4,0,0,0], #L Y
+ [2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #L Z
+ ],[
+ [0,10,59,34,3,0,57,7,31,3,25,104,6,326,2,4,0,144,49,192,10,2,3,11,14,7], #M A
+ [31,1,0,1,44,0,0,0,32,0,0,31,0,1,27,1,0,32,1,0,21,0,0,0,0,0], #M B
+ [3,1,17,6,2,2,9,3,5,0,9,3,3,4,2,1,0,0,0,0,0,0,0,0,0,0], #M C
+ [0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0], #M D
+ [30,6,8,45,3,2,14,1,4,0,1,51,19,283,10,4,0,125,39,128,0,2,9,3,4,1], #M E
+ [0,0,0,0,3,0,0,0,3,0,0,2,0,0,4,0,0,0,0,0,4,0,0,0,0,0], #M F
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #M G
+ [0,0,0,0,3,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,1,0,0,0,0,0], #M H
+ [19,0,93,54,8,2,19,0,0,1,2,76,9,194,4,0,1,21,96,109,10,0,0,5,0,1], #M I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #M J
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #M K
+ [1,0,0,0,3,0,0,0,6,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], #M L
+ [40,0,0,0,46,0,0,0,33,0,0,0,0,0,32,0,0,0,0,0,17,0,0,0,12,0], #M M
+ [12,0,0,0,4,0,0,0,10,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,1,0], #M N
+ [4,10,13,28,4,1,14,3,11,0,6,47,10,168,16,3,0,107,40,45,56,8,1,1,1,2], #M O
+ [52,3,0,0,71,1,1,26,18,0,4,71,0,0,50,0,0,41,9,43,19,0,0,0,7,0], #M P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0], #M Q
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0], #M R
+ [0,1,2,1,5,1,0,2,3,0,1,0,2,0,8,2,0,0,1,10,1,0,0,0,2,0], #M S
+ [0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0], #M T
+ [0,0,7,11,6,3,6,0,2,0,2,55,11,29,2,1,0,18,53,30,0,0,0,0,0,3], #M U
+ [0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #M V
+ [2,0,0,0,2,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #M W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #M X
+ [0,0,11,0,5,0,1,0,0,0,0,1,0,2,7,0,0,7,7,4,0,0,0,0,0,0], #M Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #M Z
+ ],[
+ [2,24,33,23,6,3,30,6,20,0,9,115,29,59,2,31,0,94,28,159,19,10,5,0,1,5], #N A
+ [5,0,1,0,20,0,0,0,1,0,0,4,0,0,7,0,0,4,1,0,10,0,0,0,0,0], #N B
+ [25,0,0,0,190,0,0,87,51,0,1,18,0,0,62,0,0,16,0,36,21,0,0,0,8,0], #N C
+ [75,11,4,1,162,6,3,7,102,1,1,22,10,2,57,9,2,46,30,4,37,0,11,0,20,0], #N D
+ [34,12,36,12,29,17,16,4,14,0,0,45,16,20,25,8,6,88,80,84,32,12,37,18,45,3], #N E
+ [15,0,0,0,30,0,0,0,38,0,0,23,0,0,26,0,0,10,0,0,19,0,0,0,0,0], #N F
+ [22,8,0,3,114,6,0,15,18,0,3,51,5,0,20,2,0,24,24,28,38,0,2,0,9,0], #N G
+ [18,0,0,0,16,0,0,0,6,0,0,0,0,0,15,0,0,0,0,0,2,0,0,0,3,0], #N H
+ [90,9,148,14,33,27,35,4,1,0,5,12,25,44,26,21,7,4,87,94,29,11,0,4,0,4], #N I
+ [2,0,0,0,3,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,13,0,0,0,0,0], #N J
+ [6,0,1,0,22,4,1,1,10,0,0,12,2,0,1,1,0,2,2,3,0,0,0,0,9,0], #N K
+ [9,0,0,0,8,0,0,0,5,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,1,0], #N L
+ [8,0,0,0,5,0,0,0,2,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0], #N M
+ [39,0,0,0,74,0,0,0,52,0,1,0,0,0,23,0,0,0,1,0,14,0,1,0,25,0], #N N
+ [4,18,21,10,4,4,15,0,11,0,0,30,60,34,11,11,0,80,32,47,52,18,24,7,2,2], #N O
+ [0,0,0,0,1,0,0,0,1,0,0,4,0,0,6,0,0,0,0,0,2,0,0,0,0,0], #N P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,22,0,0,0,0,0], #N Q
+ [3,0,1,0,1,0,0,0,6,0,0,0,0,0,6,0,0,0,0,0,3,0,0,0,6,0], #N R
+ [26,4,23,2,73,17,3,12,96,0,5,8,13,0,60,25,0,1,3,79,39,4,4,0,5,0], #N S
+ [143,1,1,1,175,2,2,64,209,0,0,13,3,1,65,1,0,114,3,0,32,0,2,0,21,1], #N T
+ [12,6,16,6,11,3,6,0,5,0,1,15,35,9,6,3,0,9,25,31,1,0,0,0,0,1], #N U
+ [15,0,0,0,43,0,0,0,20,0,0,0,0,0,17,0,0,0,0,0,4,0,0,0,1,0], #N V
+ [12,0,0,0,3,0,0,2,4,0,0,0,0,0,6,0,0,1,0,0,0,0,0,0,0,0], #N W
+ [0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0], #N X
+ [5,3,1,1,0,0,0,1,0,0,0,7,14,0,4,1,1,1,3,1,1,1,2,1,0,0], #N Y
+ [10,0,0,0,5,0,0,0,5,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,5,0], #N Z
+ ],[
+ [1,0,20,30,0,2,5,2,0,0,9,9,8,18,0,4,1,51,13,44,1,1,0,2,0,0], #O A
+ [17,24,2,2,28,2,0,1,32,4,0,19,0,1,16,0,0,5,26,3,8,3,1,0,2,0], #O B
+ [50,0,28,0,38,0,0,47,26,0,129,14,0,0,33,0,0,25,0,34,20,0,0,0,8,0], #O C
+ [17,3,3,15,59,3,13,4,47,0,1,13,2,1,22,3,0,8,11,0,21,0,8,0,35,0], #O D
+ [0,6,1,7,0,3,0,1,6,0,1,10,3,13,1,0,1,10,15,6,2,7,0,3,1,0], #O E
+ [7,0,0,0,4,63,0,0,10,0,0,4,1,0,6,0,0,1,0,15,4,0,0,0,1,0], #O F
+ [34,2,0,1,44,1,22,3,15,1,0,11,3,11,7,0,0,80,1,2,18,0,1,0,83,0], #O G
+ [10,0,0,0,8,0,0,0,6,0,0,1,5,9,5,0,0,2,0,0,0,0,0,0,1,0], #O H
+ [3,1,12,53,1,1,2,0,0,0,1,27,0,51,0,0,0,11,39,8,0,0,0,1,0,0], #O I
+ [1,0,0,0,5,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #O J
+ [5,2,1,0,48,0,0,1,7,0,1,4,0,0,3,1,0,0,5,0,3,0,1,0,6,0], #O K
+ [71,4,6,83,111,8,5,3,121,0,14,124,16,1,132,6,0,1,18,24,43,16,2,0,46,1], #O L
+ [89,50,1,0,174,5,0,1,76,0,0,2,64,7,56,125,1,1,4,0,4,0,2,0,22,0], #O M
+ [129,3,64,82,181,52,86,3,124,10,11,7,3,46,75,1,6,10,107,149,8,38,9,1,54,5], #O N
+ [0,2,4,92,0,22,4,1,0,0,68,42,42,44,0,19,0,21,21,68,0,3,0,0,0,2], #O O
+ [28,1,2,0,71,0,2,82,32,1,3,16,1,1,45,29,0,17,14,21,10,0,2,0,19,0], #O P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,0,0,0,0,0], #O Q
+ [122,26,31,96,138,7,34,2,143,0,61,8,85,76,61,59,1,58,46,211,11,4,9,0,116,1], #O R
+ [31,4,24,0,107,0,3,18,102,0,2,7,9,1,18,42,2,0,63,127,5,1,2,0,8,0], #O S
+ [45,7,11,0,64,2,1,88,63,0,0,10,3,1,42,4,0,17,7,63,9,0,3,0,11,0], #O T
+ [3,11,17,13,3,3,62,1,6,0,0,32,1,137,0,11,1,86,445,103,0,7,0,1,0,2], #O U
+ [26,0,0,0,109,0,0,0,27,0,1,0,0,0,7,0,0,0,0,0,0,0,0,0,2,0], #O V
+ [18,14,2,13,48,6,0,8,8,0,1,28,7,83,1,8,0,5,13,2,2,0,1,0,4,1], #O W
+ [2,1,3,0,5,1,1,3,26,0,0,0,0,1,1,0,0,0,0,1,0,1,1,0,14,0], #O X
+ [15,1,4,6,3,1,0,0,1,0,0,3,0,1,4,1,0,1,2,1,0,0,0,0,0,0], #O Y
+ [2,0,0,0,9,0,0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,3,1], #O Z
+ ],[
+ [0,8,38,11,1,0,18,0,17,0,2,50,5,73,1,23,1,176,50,101,18,5,7,1,10,2], #P A
+ [3,0,0,0,3,0,0,0,0,0,0,1,0,0,6,0,0,2,1,0,3,0,0,0,0,0], #P B
+ [0,0,0,0,0,0,0,1,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], #P C
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0], #P D
+ [51,1,62,34,19,4,8,0,3,1,2,47,2,108,4,10,0,292,22,50,3,1,8,2,2,4], #P E
+ [0,0,0,0,1,0,0,0,2,0,0,1,0,0,0,0,0,1,0,0,3,0,0,0,0,0], #P F
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0], #P G
+ [56,0,0,2,88,0,0,0,76,0,0,3,0,1,97,0,0,13,1,3,5,0,0,0,79,0], #P H
+ [21,0,74,25,33,1,19,0,0,0,6,27,3,74,12,11,2,37,27,57,3,2,0,2,0,2], #P I
+ [1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #P J
+ [0,0,0,0,2,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #P K
+ [150,0,0,0,121,0,0,0,59,0,0,0,0,0,33,0,0,0,0,0,29,0,0,0,11,0], #P L
+ [6,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0], #P M
+ [0,0,0,0,4,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], #P N
+ [2,1,19,10,12,2,7,0,31,0,12,111,14,55,23,17,0,97,126,52,20,3,13,3,2,0], #P O
+ [16,0,0,0,48,0,0,1,20,0,0,32,1,0,25,0,0,32,3,0,1,0,0,0,16,0], #P P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #P Q
+ [39,0,0,0,166,0,0,0,104,0,0,0,0,0,273,0,0,0,0,0,12,0,0,0,1,0], #P R
+ [4,1,3,0,17,0,0,5,22,0,1,1,2,0,13,0,0,0,0,14,6,0,1,0,35,0], #P S
+ [16,0,1,0,9,0,0,3,107,0,0,0,0,0,33,0,0,3,0,0,19,0,0,0,4,0], #P T
+ [1,8,4,8,3,6,4,0,1,0,1,41,8,22,0,9,0,39,18,28,0,0,0,0,0,1], #P U
+ [0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #P V
+ [3,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0], #P W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #P X
+ [1,2,0,0,0,0,3,0,1,0,1,3,0,0,1,0,0,20,0,3,0,0,1,0,0,0], #P Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #P Z
+ ],[
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0], #Q A
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q B
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q C
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q D
+ [0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q E
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q F
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q G
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q H
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q J
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q K
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q L
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q M
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q N
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q O
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q Q
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q R
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q S
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q T
+ [110,0,0,0,100,0,0,0,128,0,0,0,0,0,13,0,0,0,0,0,0,0,0,0,3,0], #Q U
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q V
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q X
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Q Z
+ ],[
+ [0,72,130,95,8,35,73,14,85,3,10,121,95,313,2,119,1,26,66,277,19,45,28,2,28,13], #R A
+ [32,0,0,0,26,0,0,0,35,0,0,4,0,0,44,0,0,3,1,0,9,0,0,0,5,0], #R B
+ [18,0,2,0,47,0,0,86,25,0,3,11,0,0,13,0,0,1,2,7,38,0,0,0,4,0], #R C
+ [22,5,1,0,26,1,0,4,42,0,0,4,0,2,17,1,0,5,9,4,3,0,4,0,7,0], #R D
+ [166,26,106,99,114,52,55,20,25,4,4,60,69,143,20,72,8,11,257,119,14,56,34,7,23,2], #R E
+ [11,0,0,0,15,1,0,0,9,0,0,7,0,0,8,0,0,4,0,0,12,0,0,0,0,0], #R F
+ [26,0,0,0,63,0,0,5,25,0,0,11,1,0,18,0,0,2,2,0,13,0,0,0,11,0], #R G
+ [11,0,0,0,19,0,0,0,5,0,0,0,0,0,18,0,0,0,0,0,2,0,0,0,3,0], #R H
+ [182,54,210,87,79,38,65,1,0,1,6,49,65,166,82,61,1,0,151,141,29,44,1,6,1,10], #R I
+ [0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,2,0,0,0,0,0], #R J
+ [4,2,0,1,19,0,0,3,9,0,0,6,3,2,5,3,0,1,10,2,0,0,1,0,6,0], #R K
+ [24,2,0,4,28,0,0,0,36,0,0,0,0,0,14,1,0,0,2,1,2,0,1,0,8,0], #R L
+ [97,1,2,0,29,2,0,3,65,0,0,2,0,0,39,1,0,0,1,1,10,0,1,0,5,0], #R M
+ [53,5,0,0,50,4,0,3,29,0,1,0,6,0,16,1,0,0,9,5,7,0,2,0,4,0], #R N
+ [46,40,79,40,18,22,56,4,32,5,10,76,90,167,84,127,2,14,127,74,127,42,63,17,15,3], #R O
+ [10,0,0,0,21,0,0,33,10,0,0,5,1,0,25,0,0,12,8,8,5,0,0,0,1,0], #R P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0], #R Q
+ [53,0,0,0,92,0,0,5,85,0,0,0,0,0,47,0,0,0,0,0,14,0,0,0,60,0], #R R
+ [26,2,2,2,84,1,0,16,44,0,4,2,3,1,43,12,1,0,0,32,14,1,2,0,2,0], #R S
+ [39,2,2,0,61,5,3,101,99,0,0,11,7,3,32,0,0,17,12,1,27,0,2,0,24,7], #R T
+ [5,21,30,31,15,6,12,0,18,0,0,10,46,41,1,28,0,3,83,22,0,1,1,1,0,1], #R U
+ [31,0,0,0,37,0,0,0,28,0,0,0,0,0,5,0,0,0,0,0,1,0,0,0,2,0], #R V
+ [15,0,0,0,6,0,0,0,12,0,0,0,0,0,15,0,0,0,0,0,0,0,0,0,0,0], #R W
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #R X
+ [5,3,3,5,3,0,1,0,0,0,0,10,11,4,12,16,0,0,9,4,0,0,2,0,0,0], #R Y
+ [2,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #R Z
+ ],[
+ [2,44,23,16,1,10,21,4,16,1,7,80,17,89,1,10,0,36,10,43,22,10,13,5,7,0], #S A
+ [9,0,0,0,4,0,0,0,2,0,0,0,0,0,6,0,0,2,0,0,18,0,0,0,3,0], #S B
+ [81,0,0,0,65,0,1,78,37,0,0,5,1,0,88,0,0,92,0,0,40,0,0,0,3,0], #S C
+ [11,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,2,0,0,2,0,0,0,0,0], #S D
+ [38,14,47,18,33,7,8,3,11,0,1,63,39,101,5,28,14,83,28,41,12,19,15,15,19,1], #S E
+ [3,0,0,0,7,0,0,0,5,0,0,0,0,0,7,0,0,0,0,0,6,0,0,0,1,0], #S F
+ [0,0,0,0,2,0,0,0,2,0,0,0,0,0,2,0,0,5,1,0,2,0,0,0,0,0], #S G
+ [97,9,1,0,79,3,0,0,75,0,1,4,16,3,81,2,0,27,0,1,20,1,6,0,17,0], #S H
+ [55,56,44,80,28,15,38,0,0,0,2,50,40,78,148,7,1,7,99,89,9,76,0,8,0,3], #S I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #S J
+ [9,0,0,0,24,0,0,0,35,0,0,0,2,0,3,0,0,1,0,0,5,0,0,0,23,0], #S K
+ [42,0,0,0,35,0,0,0,29,0,0,1,0,0,29,0,0,0,0,0,13,0,0,0,2,0], #S L
+ [57,0,0,0,30,0,0,0,31,0,0,0,0,0,25,0,0,0,0,0,14,0,0,0,2,0], #S M
+ [21,0,0,0,12,0,0,0,12,0,0,0,0,0,19,0,0,0,0,4,6,0,0,0,2,0], #S N
+ [6,4,26,12,6,10,4,1,8,1,0,67,65,190,8,21,0,71,0,11,34,6,3,0,3,1], #S O
+ [63,1,0,0,116,0,0,41,82,0,0,24,0,0,69,0,0,34,1,0,16,0,0,0,3,0], #S P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,52,0,0,0,0,0], #S Q
+ [4,0,0,0,1,0,0,0,1,0,0,0,0,0,2,0,0,0,1,0,3,0,0,0,0,0], #S R
+ [50,3,2,0,77,3,0,4,151,0,0,5,11,1,42,2,0,4,0,4,17,0,13,0,19,0], #S S
+ [258,6,4,1,291,9,1,11,240,1,0,25,12,2,205,6,0,255,3,0,58,2,7,0,36,0], #S T
+ [14,38,17,6,7,11,6,0,11,0,0,39,35,37,1,42,0,71,30,4,0,0,0,0,0,4], #S U
+ [0,0,0,0,5,0,0,0,6,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0], #S V
+ [37,0,0,0,31,0,0,0,28,0,0,0,0,0,21,0,0,2,0,0,2,0,0,0,0,0], #S W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #S X
+ [0,2,32,1,1,0,1,0,0,0,1,18,19,30,0,2,0,9,5,1,0,0,0,0,0,1], #S Y
+ [0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #S Z
+ ],[
+ [0,74,44,8,3,9,45,8,68,0,15,130,36,181,1,23,0,128,22,185,13,11,9,13,4,0], #T A
+ [7,0,0,0,4,0,0,0,4,0,0,0,0,0,6,0,0,3,0,0,3,0,0,0,0,0], #T B
+ [5,0,0,0,0,0,0,112,0,0,0,2,0,0,5,0,0,1,0,0,1,0,0,0,1,0], #T C
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0], #T D
+ [52,9,29,37,66,9,17,6,16,0,2,65,49,185,18,20,0,588,61,23,9,9,9,16,1,0], #T E
+ [6,0,0,0,1,0,0,0,5,0,0,1,0,0,6,0,0,1,0,0,24,0,0,0,0,0], #T F
+ [4,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,0,0], #T G
+ [68,6,1,5,274,8,1,2,62,0,1,9,13,3,90,4,1,61,8,2,31,0,16,0,49,0], #T H
+ [99,35,342,16,35,45,34,0,0,0,3,67,75,183,419,28,9,18,75,88,9,128,0,0,0,2], #T I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #T J
+ [2,0,0,0,1,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0], #T K
+ [18,0,0,0,102,0,0,0,5,0,0,2,0,0,3,0,0,0,0,0,2,0,0,0,3,0], #T L
+ [25,0,0,0,8,0,0,0,3,0,0,0,0,0,11,0,0,0,0,0,3,0,0,0,0,0], #T M
+ [3,0,0,0,9,0,0,0,5,0,0,0,0,0,2,0,0,0,0,4,1,0,0,0,0,0], #T N
+ [5,6,34,11,8,7,26,0,14,0,9,38,65,238,26,56,0,319,19,16,36,3,36,7,3,2], #T O
+ [2,0,0,0,1,0,0,0,1,0,0,2,0,0,3,0,0,5,0,0,0,0,0,0,0,0], #T P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #T Q
+ [315,0,0,0,98,0,0,0,246,0,0,0,0,0,201,0,0,0,0,0,68,0,1,0,64,0], #T R
+ [2,2,2,1,10,2,0,3,4,0,1,0,13,0,9,3,0,0,0,8,5,2,5,0,3,0], #T S
+ [44,0,0,0,154,1,1,2,53,0,1,45,0,0,33,0,0,10,8,0,4,1,0,0,25,0], #T T
+ [41,14,9,41,8,5,4,0,10,0,0,19,30,29,13,10,0,159,35,22,0,0,0,1,1,0], #T U
+ [3,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #T V
+ [14,0,0,0,12,0,0,1,23,0,0,0,0,0,15,0,0,0,0,0,2,0,0,1,0,0], #T W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #T X
+ [2,1,2,0,0,0,1,0,1,0,0,14,2,0,0,34,0,14,3,0,0,0,2,1,0,0], #T Y
+ [1,0,0,0,5,0,1,0,2,0,0,1,1,0,1,1,0,1,1,0,0,0,0,0,0,0], #T Z
+ ],[
+ [0,4,7,21,0,1,5,1,4,0,5,51,2,26,0,1,0,48,9,37,0,2,4,0,3,0], #U A
+ [8,18,0,1,20,0,0,2,18,2,0,23,5,0,2,1,0,10,15,8,7,2,0,0,1,0], #U B
+ [10,0,14,0,23,0,0,31,29,0,55,16,0,0,7,0,0,9,1,47,5,0,0,0,2,0], #U C
+ [17,1,0,24,67,0,18,0,39,0,0,4,0,0,8,0,0,1,10,0,2,0,2,0,7,1], #U D
+ [6,9,0,1,5,5,4,1,0,1,0,21,1,33,1,1,0,19,22,15,2,0,0,0,3,6], #U E
+ [1,0,0,0,0,58,0,0,0,0,0,1,1,0,1,0,0,0,0,3,1,0,0,0,0,0], #U F
+ [19,1,0,0,21,0,34,80,3,0,0,4,2,2,6,0,0,1,1,0,11,0,0,0,0,0], #U G
+ [3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0], #U H
+ [3,2,14,14,6,0,1,0,0,0,0,32,0,31,1,8,0,19,44,64,1,4,0,2,0,3], #U I
+ [1,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,3,0,0,0,0,0], #U J
+ [1,0,0,1,12,0,0,0,3,0,1,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0], #U K
+ [136,4,11,11,46,14,7,0,35,0,10,67,5,2,23,16,0,1,24,73,16,3,1,0,5,1], #U L
+ [22,52,3,1,51,5,0,1,32,0,0,2,28,11,8,48,1,0,8,1,6,2,0,0,0,0], #U M
+ [21,6,73,131,25,5,46,2,55,0,33,4,2,13,4,2,0,2,15,82,1,0,2,0,5,0], #U N
+ [0,0,0,1,0,0,0,0,3,0,0,2,0,3,0,2,0,16,3,5,29,0,0,0,2,0], #U O
+ [4,4,1,2,31,1,1,14,10,0,1,13,1,0,8,24,0,13,13,24,2,0,2,0,2,0], #U P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0], #U Q
+ [75,27,21,17,149,8,60,1,66,2,11,17,11,55,28,15,1,51,43,43,9,15,3,0,28,1], #U R
+ [31,5,29,2,105,0,1,53,64,0,17,3,0,1,8,12,1,0,34,115,6,0,0,0,4,0], #U S
+ [45,1,14,1,69,0,1,55,77,0,0,8,3,3,49,0,0,13,7,51,11,0,2,0,6,2], #U T
+ [0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0], #U U
+ [0,0,0,0,8,0,0,0,5,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0], #U V
+ [2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #U W
+ [0,0,0,0,4,0,0,0,2,0,0,1,0,0,1,0,0,0,0,5,4,0,0,0,0,0], #U X
+ [1,0,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0], #U Y
+ [2,0,0,0,4,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,12], #U Z
+ ],[
+ [0,9,20,8,1,0,14,2,8,1,3,69,2,57,0,1,0,31,18,36,5,0,0,0,0,0], #V A
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V B
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V C
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V D
+ [6,2,5,4,4,3,6,4,5,0,1,47,4,120,3,1,0,271,46,24,0,0,1,5,10,0], #V E
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V F
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V G
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V H
+ [37,4,33,23,21,2,8,0,2,0,3,43,0,47,18,0,0,16,65,30,5,16,0,2,0,1], #V I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V J
+ [0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V K
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #V L
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V M
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V N
+ [0,0,23,0,0,0,3,0,9,0,5,48,2,6,1,0,0,10,4,9,10,1,3,0,6,0], #V O
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V Q
+ [0,0,0,0,5,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #V R
+ [0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V S
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V T
+ [0,0,0,0,0,0,0,0,0,0,0,13,0,0,0,0,0,2,2,0,0,0,0,0,0,0], #V U
+ [0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0], #V V
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #V X
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0], #V Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #V Z
+ ],[
+ [1,4,7,8,0,3,12,3,18,0,8,53,5,20,0,4,0,100,27,55,1,9,1,4,71,1], #W A
+ [6,0,0,0,7,0,0,0,1,0,0,0,0,0,10,0,0,3,0,0,1,0,0,0,0,0], #W B
+ [3,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0], #W C
+ [0,0,0,0,5,0,0,0,1,0,0,0,0,0,4,0,0,3,0,0,1,0,0,0,3,0], #W D
+ [30,5,1,9,33,0,2,1,19,0,0,51,0,11,0,2,0,36,21,7,0,2,0,0,2,0], #W E
+ [1,0,0,0,0,0,0,0,3,0,0,3,0,0,4,0,0,0,0,0,3,0,0,0,0,0], #W F
+ [0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #W G
+ [18,0,0,0,47,0,0,0,52,0,0,0,0,0,19,0,0,0,0,0,1,0,0,0,1,0], #W H
+ [0,0,14,18,5,5,15,0,0,0,0,40,2,83,0,2,0,8,38,47,0,4,0,1,0,2], #W I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #W J
+ [0,0,0,0,0,0,0,0,2,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0], #W K
+ [3,0,0,0,9,0,0,0,5,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,3,0], #W L
+ [8,0,0,0,5,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #W M
+ [0,1,1,1,6,1,1,2,3,0,0,0,0,0,0,2,0,1,10,4,1,0,2,0,3,0], #W N
+ [0,1,0,0,3,1,0,0,0,0,3,10,17,8,54,1,0,121,1,1,3,2,1,0,0,0], #W O
+ [1,0,0,0,1,0,0,0,1,0,0,1,0,0,5,0,0,0,0,0,1,0,0,0,0,0], #W P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #W Q
+ [7,0,0,0,12,0,0,0,25,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,6,0], #W R
+ [0,1,1,0,2,0,0,1,1,0,1,2,2,0,5,3,0,1,1,4,1,0,2,0,1,0], #W S
+ [1,0,0,0,1,0,0,3,1,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0], #W T
+ [0,0,0,0,0,0,0,1,0,0,0,1,1,1,0,1,0,2,0,0,0,0,0,0,0,0], #W U
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #W V
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #W W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #W X
+ [2,0,0,0,5,0,0,0,0,0,0,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0], #W Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0], #W Z
+ ],[
+ [0,0,5,1,0,1,3,0,0,0,0,4,6,6,0,0,0,0,3,6,0,1,0,0,0,0], #X A
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #X B
+ [3,0,0,0,11,0,0,3,7,0,0,7,0,0,3,0,0,5,0,0,7,0,0,0,0,0], #X C
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X D
+ [0,0,7,1,0,0,2,0,1,0,0,2,6,9,0,0,0,6,1,1,0,0,0,0,1,0], #X E
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #X F
+ [0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #X G
+ [7,0,0,0,0,0,0,0,4,0,0,0,0,0,4,0,0,0,0,0,2,0,0,0,0,0], #X H
+ [8,2,12,8,4,2,2,0,0,0,0,2,11,4,8,0,0,0,9,2,0,1,1,0,0,0], #X I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X J
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X K
+ [0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X L
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X M
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X N
+ [0,0,0,1,0,0,3,0,0,0,0,1,1,10,0,1,0,6,1,5,0,0,0,0,0,0], #X O
+ [8,0,0,0,27,0,0,0,5,0,0,18,0,0,12,0,0,7,0,0,3,0,0,0,0,0], #X P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #X Q
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X R
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X S
+ [6,1,0,0,22,0,0,1,7,0,0,0,0,0,7,0,0,31,0,0,9,0,0,0,1,0], #X T
+ [4,1,0,2,0,0,0,0,0,0,0,3,0,0,0,1,0,6,0,0,0,0,0,0,0,0], #X U
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X V
+ [0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], #X W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #X X
+ [0,0,0,0,0,0,2,0,0,0,0,6,0,0,0,0,0,2,0,0,0,0,0,0,0,0], #X Y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #X Z
+ ],[
+ [0,0,5,5,0,0,1,1,0,0,2,11,3,29,1,4,1,20,1,3,0,0,3,0,0,0], #Y A
+ [4,0,0,4,7,0,0,0,2,0,0,0,0,0,9,0,0,3,0,0,3,0,0,0,0,0], #Y B
+ [4,0,0,0,18,0,0,31,4,0,0,19,0,0,12,0,0,0,0,0,0,0,0,0,0,0], #Y C
+ [4,1,0,0,12,0,0,0,2,0,0,0,0,2,1,0,0,37,0,0,0,0,0,0,0,0], #Y D
+ [11,3,0,1,1,1,1,0,1,0,0,13,1,6,2,1,0,19,7,6,0,1,1,0,0,0], #Y E
+ [1,0,0,0,1,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,4,0,0,0,0,0], #Y F
+ [0,0,0,1,2,0,0,0,2,0,0,1,3,1,8,0,0,3,0,0,1,0,0,0,2,0], #Y G
+ [0,0,0,0,4,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,1,0], #Y H
+ [0,0,0,1,1,0,0,0,0,0,0,0,0,9,0,2,0,0,2,0,0,0,0,0,0,0], #Y I
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Y J
+ [0,0,0,0,3,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0], #Y K
+ [15,0,0,0,22,0,0,0,13,0,1,19,0,0,11,1,0,0,2,0,3,6,0,0,0,0], #Y L
+ [18,4,1,0,20,0,0,0,5,0,0,0,3,7,11,20,0,0,0,0,2,0,0,0,1,0], #Y M
+ [14,0,11,3,12,0,3,1,2,0,0,0,0,3,11,0,0,0,0,6,0,0,0,2,1,0], #Y N
+ [0,0,2,2,0,4,6,0,0,0,5,2,1,18,0,4,0,8,4,5,17,1,1,0,0,1], #Y O
+ [2,0,0,0,24,0,0,17,5,0,0,2,0,2,21,0,0,5,7,16,3,0,0,0,1,0], #Y P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], #Y Q
+ [15,0,0,2,6,1,0,0,21,0,0,0,0,2,29,0,0,2,0,1,4,0,0,0,1,0], #Y R
+ [3,1,3,0,12,0,0,1,38,0,0,1,2,0,4,3,0,0,6,39,2,0,0,0,0,0], #Y S
+ [2,0,0,0,16,0,0,16,10,0,0,0,0,0,12,0,0,0,0,2,0,0,0,0,1,0], #Y T
+ [0,0,3,0,0,0,3,1,0,0,2,1,0,1,0,1,0,0,2,0,0,0,0,0,0,0], #Y U
+ [1,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Y V
+ [10,0,1,0,3,0,0,2,4,0,0,0,0,0,5,0,0,3,0,0,0,0,0,0,0,0], #Y W
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Y X
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Y Y
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0], #Y Z
+ ],[
+ [1,3,2,0,0,0,5,1,1,0,1,4,1,11,0,1,0,19,0,0,0,1,0,0,0,1], #Z A
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z B
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z C
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z D
+ [5,1,2,1,1,0,0,0,1,0,1,7,0,12,0,0,0,13,3,3,1,0,1,0,0,0], #Z E
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z F
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z G
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z H
+ [1,1,2,0,7,0,5,0,0,0,0,5,4,6,1,1,0,2,1,1,1,0,0,0,0,0], #Z I
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z J
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z K
+ [0,0,0,0,16,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0], #Z L
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z M
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z N
+ [3,0,0,2,2,0,1,0,7,0,0,0,3,10,5,2,0,5,0,0,1,1,0,0,0,0], #Z O
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z P
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z Q
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #Z R
+ [0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z S
+ [0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z T
+ [0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0], #Z U
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #Z V
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], #Z W
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z X
+ [0,1,0,0,0,0,4,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0], #Z Y
+ [7,0,0,0,1,0,0,0,7,0,0,17,0,0,2,0,0,0,0,0,0,0,1,0,5,0], #Z Z
+ ]]
+ return data
+_hooks['gpw'] = orig_data
+
+#=========================================================
+#american english
+#=========================================================
+def american_english_data():
+ "gpw data generated from '/usr/share/dict/american-english'"
+ data = {'alphabet': 'abcdefghijklmnopqrstuvwxyz', 'chars': 590707, 'tris_total': 446791, 'words': 71958}
+ data['tris'] = [
+ [
+ [0,3,2,0,1,0,1,0,1,0,1,6,4,3,0,0,0,9,3,1,0,0,0,0,0,0], # a a
+ [87,142,0,19,85,0,0,7,183,11,0,780,0,11,143,0,0,71,104,0,57,0,3,0,31,0], # a b
+ [87,2,210,1,309,0,0,306,230,0,615,39,3,1,66,0,45,73,30,421,66,0,0,0,54,0], # a c
+ [94,19,14,146,353,11,17,19,253,56,2,54,69,9,127,8,2,68,138,8,51,86,20,0,28,5], # a d
+ [8,0,1,30,0,0,3,0,0,1,0,21,5,3,7,0,0,31,31,3,3,1,0,0,0,1], # a e
+ [16,0,0,0,45,163,3,0,18,0,2,10,0,3,10,0,0,16,5,115,3,0,0,0,1,0], # a f
+ [83,0,0,2,476,1,181,10,153,0,0,26,21,88,122,7,0,71,43,3,67,0,2,0,5,0], # a g
+ [45,0,0,2,17,1,0,1,11,1,0,7,14,3,31,0,0,4,24,1,8,0,1,0,2,0], # a h
+ [9,2,20,88,2,6,38,0,4,2,7,298,54,537,0,11,2,245,107,87,1,12,2,0,2,3], # a i
+ [14,0,0,0,7,0,0,0,3,0,0,0,0,0,20,0,0,0,0,0,4,0,0,0,0,0], # a j
+ [30,1,2,2,292,6,0,11,120,0,5,9,0,5,7,1,0,6,38,2,8,0,2,0,10,0], # a k
+ [220,33,71,84,342,34,42,7,929,0,94,1149,86,8,147,63,3,14,388,188,75,60,6,0,64,2], # a l
+ [188,196,4,0,336,1,0,3,245,0,0,11,158,21,117,255,0,17,109,3,31,0,5,0,12,0], # a m
+ [273,9,568,894,273,8,477,23,490,11,220,31,4,304,127,2,28,0,670,1041,65,18,4,8,43,24], # a n
+ [0,3,1,0,0,0,0,3,8,0,2,5,1,0,0,1,0,8,3,5,2,0,0,0,0,0], # a o
+ [77,10,0,6,186,4,0,230,106,2,2,37,2,4,100,423,0,29,121,83,19,0,2,0,11,0], # a p
+ [0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,48,0,0,0,0,0], # a q
+ [368,150,242,633,334,35,143,11,772,15,199,162,227,147,139,91,19,353,262,541,8,48,17,5,211,5], # a r
+ [58,5,91,1,232,0,2,304,187,0,54,9,35,2,59,96,14,1,542,690,46,0,4,0,25,0], # a s
+ [158,10,152,0,2293,16,1,373,2559,0,6,19,18,14,402,1,0,147,161,402,168,3,5,0,16,11], # a t
+ [1,13,61,91,7,7,98,1,2,1,2,90,13,119,0,12,2,71,118,163,0,6,0,12,0,6], # a u
+ [116,0,0,1,338,0,0,0,180,0,0,4,0,0,84,0,0,1,2,0,8,6,0,0,8,0], # a v
+ [91,17,0,19,37,13,2,4,29,0,38,48,4,57,7,3,0,3,48,3,0,0,0,0,4,0], # a w
+ [12,0,0,0,37,0,0,0,58,0,0,3,0,1,13,2,0,0,0,2,1,0,5,0,3,0], # a x
+ [52,18,6,13,96,14,4,3,67,0,0,17,22,10,33,6,0,7,172,7,3,0,16,0,3,0], # a y
+ [34,0,1,1,78,0,0,1,61,0,0,1,0,0,25,1,1,0,0,4,4,1,0,0,6,35], # a z
+ ],
+ [
+ [4,64,224,36,8,7,57,8,28,0,18,231,14,209,4,13,0,294,153,173,19,4,9,1,24,6], # b a
+ [17,0,0,0,135,0,0,0,125,0,0,110,0,0,18,0,0,6,3,0,4,0,0,0,32,0], # b b
+ [4,0,0,0,1,0,0,1,0,0,0,1,0,0,14,0,0,0,0,0,3,0,0,0,0,0], # b c
+ [1,0,0,0,1,0,0,0,15,0,0,0,0,0,5,0,0,0,0,0,20,0,0,0,0,0], # b d
+ [170,2,39,189,62,27,37,36,14,0,6,223,8,103,1,0,6,482,126,101,2,12,17,2,14,8], # b e
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,5,0,0,1,0,0,5,0,0,0,0,0], # b f
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0], # b g
+ [1,0,0,0,5,0,0,0,0,0,0,0,0,0,9,0,0,0,0,0,4,0,0,0,0,0], # b h
+ [64,17,48,48,77,13,35,0,1,0,17,258,7,199,51,8,4,75,69,176,3,12,2,0,0,6], # b i
+ [0,0,0,0,31,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,13,0,0,0,0,0], # b j
+ [0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # b k
+ [193,0,0,0,1078,0,0,0,246,1,0,0,0,0,153,0,0,0,0,0,90,1,0,0,218,0], # b l
+ [3,0,0,0,13,0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # b m
+ [6,0,0,0,11,0,0,0,1,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0], # b n
+ [163,36,3,54,15,1,28,7,26,3,2,107,33,125,174,7,0,177,50,51,111,5,51,44,42,2], # b o
+ [0,0,0,0,0,0,0,0,0,0,0,2,0,0,4,0,0,2,0,0,0,0,0,0,0,0], # b p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # b q
+ [304,0,0,0,205,0,0,0,255,0,0,0,0,1,205,0,0,0,0,0,86,0,0,0,11,1], # b r
+ [1,0,38,0,54,0,0,2,24,0,5,4,0,0,35,1,0,0,0,95,10,0,0,0,2,0], # b s
+ [11,0,0,0,13,2,0,0,6,0,0,9,0,0,8,0,0,18,3,0,5,0,0,0,0,0], # b t
+ [1,11,54,35,4,26,44,1,23,2,7,152,34,71,7,1,1,214,128,128,0,0,0,4,6,11], # b u
+ [0,0,0,0,9,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # b v
+ [6,0,0,0,5,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # b w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # b x
+ [3,1,0,0,5,0,2,5,8,0,0,10,0,1,0,8,0,6,17,11,0,0,4,0,0,3], # b y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # b z
+ ],
+ [
+ [0,88,38,84,10,13,17,6,17,8,22,760,126,357,4,205,0,463,239,706,64,57,6,1,11,0], # c a
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0], # c b
+ [57,0,0,0,75,0,0,12,36,0,0,29,0,0,87,0,0,15,0,0,84,0,0,0,3,0], # c c
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0], # c d
+ [60,14,11,212,21,25,1,5,57,0,3,141,57,337,7,107,0,245,492,37,5,1,13,0,8,1], # c e
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c f
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0], # c g
+ [540,25,4,10,823,9,2,9,616,0,0,48,58,41,248,3,0,94,31,23,105,0,23,0,36,0], # c h
+ [234,15,13,105,155,55,7,0,0,0,0,103,15,283,121,95,0,80,134,158,8,27,0,0,0,36], # c i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c j
+ [49,44,2,10,466,13,5,25,230,5,7,156,15,37,35,31,0,18,314,18,15,0,22,0,37,0], # c k
+ [267,0,0,0,261,0,0,0,148,0,0,0,0,0,190,0,0,0,0,0,104,0,0,0,8,0], # c l
+ [3,0,0,0,3,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c m
+ [2,0,0,0,2,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c n
+ [95,28,103,54,32,21,40,30,45,0,4,282,705,1323,93,140,5,378,102,86,288,67,47,7,13,11], # c o
+ [0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,48,0,0,0,0,0], # c q
+ [330,0,0,0,312,0,0,0,277,0,0,0,0,0,266,0,0,0,0,0,177,0,0,0,40,0], # c r
+ [0,0,0,0,0,0,0,0,6,0,0,0,0,0,4,0,0,0,0,4,0,0,0,0,0,0], # c s
+ [92,0,0,0,171,8,0,0,684,0,0,19,8,12,190,0,0,78,134,0,148,0,0,0,6,0], # c t
+ [12,36,9,22,19,18,0,0,22,0,0,274,105,23,12,62,0,256,127,114,4,1,0,0,0,6], # c u
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c v
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c x
+ [1,6,57,1,0,0,4,2,1,0,0,4,2,8,3,6,0,4,3,12,0,0,0,2,0,0], # c y
+ [4,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # c z
+ ],
+ [
+ [0,47,30,18,6,9,34,15,51,0,5,108,76,164,1,23,0,104,40,134,26,18,15,0,61,13], # d a
+ [35,0,0,0,10,0,0,0,7,0,0,10,0,0,12,0,0,13,0,0,11,0,0,0,4,0], # d b
+ [29,0,0,0,0,0,0,6,0,0,0,3,0,0,6,0,0,5,0,0,10,0,0,0,0,0], # d c
+ [18,2,0,0,128,0,0,7,103,0,0,135,0,1,13,0,0,9,2,0,5,0,0,0,24,0], # d d
+ [130,94,256,428,62,179,23,18,25,7,6,219,187,452,42,168,8,1017,583,128,9,100,28,18,0,9], # d e
+ [14,0,0,0,2,0,0,0,14,0,0,4,0,0,11,0,0,2,0,0,11,0,0,0,0,0], # d f
+ [4,0,0,0,215,0,0,0,50,0,1,2,13,0,1,0,0,1,1,0,4,0,1,0,4,0], # d g
+ [9,0,0,0,25,0,0,0,10,0,0,0,0,0,28,1,0,0,0,0,3,0,0,0,0,0], # d h
+ [216,23,283,35,253,77,79,0,1,2,7,76,54,628,104,28,0,67,973,202,26,125,2,8,0,58], # d i
+ [7,0,0,0,4,0,0,0,1,0,0,0,0,0,10,0,0,0,0,0,40,0,0,0,0,0], # d j
+ [1,0,0,0,3,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # d k
+ [15,0,0,0,261,0,0,0,117,0,0,0,0,0,23,0,0,0,0,0,4,0,0,0,149,0], # d l
+ [21,0,0,0,11,0,0,0,55,0,0,0,0,0,31,0,0,0,0,0,1,0,0,0,0,0], # d m
+ [14,0,0,0,83,0,0,0,4,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], # d n
+ [2,6,62,18,27,7,61,1,17,0,0,84,102,117,52,16,0,128,41,23,88,16,162,14,3,15], # d o
+ [16,0,0,0,2,0,0,2,10,0,0,2,0,0,6,0,0,8,0,0,0,0,0,0,0,0], # d p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0], # d q
+ [199,0,0,0,135,0,0,0,128,0,0,0,0,0,159,0,0,0,0,0,60,0,0,0,24,0], # d r
+ [2,0,8,1,14,1,0,26,17,0,2,11,15,0,21,7,0,0,0,47,7,0,6,0,2,0], # d s
+ [1,0,0,0,3,0,0,20,2,0,0,0,0,0,3,0,0,2,1,0,0,0,0,0,0,0], # d t
+ [31,15,184,7,26,4,4,0,7,0,9,106,47,42,12,26,0,43,54,9,0,3,0,0,0,1], # d u
+ [20,0,0,0,45,0,0,0,19,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0], # d v
+ [40,0,0,0,18,0,0,0,39,0,0,0,0,0,19,0,0,2,0,0,0,0,0,0,1,0], # d w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # d x
+ [2,5,0,0,9,2,3,0,24,0,3,7,4,22,0,0,0,0,16,0,0,0,1,0,0,0], # d y
+ [0,0,0,0,4,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0], # d z
+ ],
+ [
+ [0,62,128,384,3,39,45,6,1,0,171,214,137,200,0,71,0,491,342,385,68,84,17,0,0,8], # e a
+ [91,16,0,0,62,1,0,0,29,0,0,22,0,0,80,0,0,81,9,6,62,0,0,0,3,0], # e b
+ [144,0,26,4,166,0,0,121,173,0,163,79,0,0,283,0,1,109,9,709,110,0,0,0,10,1], # e c
+ [94,10,5,56,229,8,76,3,346,0,0,89,9,33,83,2,0,43,85,4,88,0,12,0,15,0], # e d
+ [6,24,42,200,0,20,4,13,29,4,73,106,62,171,0,158,0,176,139,135,1,27,13,8,1,37], # e e
+ [69,1,2,0,111,83,0,0,148,0,0,39,0,2,83,0,0,58,20,39,151,0,0,0,11,0], # e f
+ [179,0,0,0,80,2,48,5,117,0,0,22,10,12,73,0,0,104,10,0,59,0,1,0,8,0], # e g
+ [49,0,0,0,86,0,0,0,16,0,0,0,1,1,63,0,0,7,0,0,14,0,0,0,6,0], # e h
+ [7,1,13,23,2,11,124,0,1,2,7,28,24,184,0,6,0,28,71,44,1,42,0,0,0,7], # e i
+ [19,0,0,0,19,0,0,0,1,0,0,0,0,0,15,0,0,0,0,0,18,0,0,0,0,0], # e j
+ [8,2,0,3,39,0,0,2,31,0,3,8,0,7,2,0,0,0,15,0,0,0,0,0,3,0], # e k
+ [197,24,20,98,505,31,10,3,501,1,6,712,34,5,151,39,0,9,201,79,44,48,4,0,342,2], # e l
+ [242,189,4,0,372,1,0,0,261,0,1,18,17,28,218,190,0,0,38,1,50,0,0,0,8,0], # e m
+ [239,19,587,577,610,33,131,31,325,13,12,60,13,124,133,12,6,35,562,2078,88,38,10,0,12,22], # e n
+ [0,0,27,20,2,6,26,0,4,0,1,37,10,88,0,38,0,53,23,22,70,5,7,0,0,1], # e o
+ [117,3,3,4,122,4,0,60,118,0,1,82,2,2,97,52,0,146,56,159,56,0,7,0,6,0], # e p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,189,0,0,0,0,0], # e q
+ [749,188,272,94,871,138,159,59,1046,22,42,196,317,307,236,173,4,296,2359,393,49,215,72,2,171,8], # e r
+ [67,8,208,17,224,0,4,114,227,0,12,14,33,6,101,165,21,0,1832,2058,74,0,11,0,10,0], # e s
+ [195,20,58,2,387,12,0,163,354,0,2,19,6,7,113,6,0,218,268,353,47,0,11,0,46,9], # e t
+ [0,2,15,13,4,0,6,0,1,0,4,10,25,14,0,25,0,82,34,40,0,14,1,3,0,0], # e u
+ [139,0,0,0,357,0,0,0,208,0,0,2,0,0,90,0,0,3,5,1,3,2,0,0,4,0], # e v
+ [92,8,3,14,87,5,2,14,72,0,0,10,3,4,58,4,0,16,80,7,0,0,0,0,5,0], # e w
+ [73,0,124,0,92,0,0,44,95,0,0,1,0,0,28,243,2,0,0,182,35,0,0,1,2,0], # e x
+ [11,12,6,5,82,2,0,4,21,0,1,2,14,12,10,4,0,7,76,0,0,0,6,0,0,0], # e y
+ [2,1,0,0,35,0,0,2,15,0,0,0,0,0,5,0,0,1,0,0,4,4,0,0,2,16], # e z
+ ],
+ [
+ [0,34,121,7,3,1,13,5,66,0,9,94,34,63,0,0,0,78,53,80,33,16,9,7,2,4], # f a
+ [4,0,0,0,2,0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,1,0,0,0,0,0], # f b
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f c
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # f d
+ [51,7,105,99,48,1,6,0,32,0,2,54,15,71,0,0,0,273,99,45,11,5,7,0,3,2], # f e
+ [29,6,0,0,206,0,0,4,178,0,0,77,2,2,32,0,0,21,70,0,23,0,0,0,14,0], # f f
+ [1,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f g
+ [4,0,0,0,3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], # f h
+ [29,16,232,48,294,12,74,0,0,3,0,124,0,270,4,0,0,127,101,71,0,3,0,41,0,14], # f i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], # f j
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f k
+ [237,0,0,0,163,0,0,0,116,0,0,0,0,0,144,0,0,0,0,0,123,0,0,0,37,0], # f l
+ [3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f m
+ [0,0,0,0,6,0,0,0,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0], # f n
+ [13,3,26,4,6,0,26,0,17,0,1,81,8,23,90,4,0,560,11,2,75,0,11,20,2,0], # f o
+ [0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f q
+ [180,0,0,0,183,0,0,0,134,0,0,0,0,0,112,0,0,0,0,0,41,0,0,0,6,0], # f r
+ [1,0,0,0,3,0,0,3,2,0,2,0,0,0,0,2,0,0,0,5,0,0,0,0,0,0], # f s
+ [4,2,0,0,98,0,0,6,62,0,0,5,1,3,8,0,0,0,48,0,0,0,6,0,10,0], # f t
+ [0,0,17,12,13,0,17,0,0,3,1,418,24,80,0,0,0,91,86,27,0,0,0,0,0,15], # f u
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f v
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f x
+ [0,0,0,0,0,0,0,0,84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # f z
+ ],
+ [
+ [1,38,8,23,8,6,39,5,37,0,0,145,74,170,4,14,0,220,65,280,31,9,14,0,8,28], # g a
+ [0,0,0,0,4,0,0,0,6,0,0,0,0,0,16,0,0,0,0,0,0,0,0,0,1,0], # g b
+ [2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # g c
+ [5,0,0,0,1,0,0,0,0,0,0,0,0,0,7,0,0,1,0,0,1,0,0,0,0,0], # g d
+ [41,5,5,288,30,4,0,12,13,0,0,81,43,344,86,5,0,418,340,76,1,3,8,0,11,0], # g e
+ [0,0,0,0,2,0,0,0,8,0,0,1,0,0,2,0,0,3,0,0,5,0,0,0,0,0], # g f
+ [27,2,0,1,185,0,0,2,145,0,0,125,0,1,13,2,0,27,6,0,0,0,0,0,20,0], # g g
+ [37,19,2,1,57,3,1,4,28,4,0,15,0,9,42,1,0,1,27,490,5,0,8,0,2,0], # g h
+ [50,53,91,21,136,4,19,0,0,0,0,48,26,414,45,2,0,40,168,59,3,24,0,0,0,23], # g i
+ [3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], # g j
+ [0,0,0,0,0,0,0,0,2,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0], # g k
+ [112,0,0,0,239,0,0,0,160,0,0,0,0,0,107,0,0,0,0,0,34,0,0,0,169,0], # g l
+ [45,0,0,0,38,0,0,0,8,0,0,0,0,0,3,0,0,0,2,0,2,0,0,0,2,0], # g m
+ [100,2,0,0,91,0,0,0,88,0,0,1,9,0,64,4,0,0,20,1,5,0,0,0,0,0], # g n
+ [29,19,3,40,25,3,27,0,29,0,0,60,5,121,55,2,0,85,42,60,33,27,8,0,5,0], # g o
+ [1,0,0,0,2,0,0,0,6,0,0,4,0,0,2,0,0,0,0,0,1,0,0,0,0,0], # g p
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # g q
+ [622,0,0,0,239,0,0,0,139,0,0,0,0,0,189,0,0,0,0,0,78,0,0,0,2,0], # g r
+ [6,1,0,0,0,0,0,13,2,0,4,0,0,0,6,4,0,0,0,30,0,0,0,0,0,0], # g s
+ [5,0,0,0,0,0,0,20,5,0,0,0,0,0,16,0,0,4,0,0,0,0,0,0,0,1], # g t
+ [92,4,2,1,130,6,1,0,126,3,0,90,21,52,11,5,0,66,57,22,0,0,0,0,6,7], # g u
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # g v
+ [13,0,0,0,5,0,0,2,2,0,0,0,0,0,6,0,0,2,0,0,0,0,0,0,1,0], # g w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # g x
+ [3,8,0,0,0,0,0,0,0,0,0,2,12,9,0,10,0,12,0,0,0,0,2,0,0,1], # g y
+ [4,0,0,0,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # g z
+ ],
+ [
+ [2,99,49,56,12,23,45,7,103,0,35,210,161,450,3,80,0,429,115,105,60,39,55,0,28,28], # h a
+ [20,0,0,0,2,0,0,0,4,0,0,5,0,0,33,0,0,6,0,0,5,0,0,0,0,0], # h b
+ [6,0,0,0,0,0,0,4,0,0,0,4,0,0,4,0,0,2,0,0,0,0,0,0,0,0], # h c
+ [3,0,0,0,2,0,0,0,4,0,0,0,0,0,6,0,0,7,0,0,2,0,0,0,0,0], # h d
+ [408,13,64,373,157,15,4,1,80,0,4,187,119,189,53,20,4,777,518,123,13,26,35,9,10,4], # h e
+ [3,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,37,0,0,0,0,0], # h f
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,2,0,0,0,0,0], # h g
+ [2,0,0,0,3,0,0,0,6,0,0,0,0,0,13,0,0,0,0,0,0,0,0,0,0,0], # h h
+ [49,51,156,31,220,20,55,2,0,8,18,214,63,574,34,195,1,117,145,122,4,30,0,0,0,32], # h i
+ [4,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # h j
+ [2,0,0,0,3,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], # h k
+ [5,0,0,0,50,0,0,0,31,0,0,0,0,0,31,0,0,2,0,0,1,0,0,0,51,0], # h l
+ [54,0,0,0,58,0,0,0,12,0,0,0,0,0,8,0,0,0,6,0,4,0,0,0,0,0], # h m
+ [11,0,0,0,45,0,0,1,26,0,0,0,1,2,14,0,1,0,4,0,3,0,0,0,1,0], # h n
+ [44,44,30,46,51,7,42,5,19,0,17,269,131,194,207,100,0,337,113,107,214,27,79,0,3,0], # h o
+ [6,0,0,0,0,0,0,1,6,0,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0], # h p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0], # h q
+ [61,0,0,0,61,0,0,0,116,0,0,1,1,0,168,0,0,0,0,0,32,0,0,0,13,0], # h r
+ [7,0,1,0,1,0,0,7,3,0,2,0,0,0,6,0,0,0,0,15,0,0,0,0,0,0], # h s
+ [14,0,7,0,120,16,2,23,65,2,0,28,4,10,8,0,0,8,89,1,5,0,4,0,9,1], # h t
+ [19,20,29,14,5,22,21,1,1,0,2,20,154,94,0,3,0,81,75,43,0,1,0,1,3,0], # h u
+ [1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # h v
+ [43,0,0,0,15,0,0,7,8,0,0,0,0,0,17,0,0,0,0,0,0,0,0,0,0,0], # h w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # h x
+ [5,7,0,54,5,0,8,0,5,0,0,15,17,1,0,80,0,6,50,11,1,0,0,7,0,0], # h y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # h z
+ ],
+ [
+ [0,67,46,20,5,1,47,17,7,0,5,408,31,658,1,12,0,86,83,354,3,3,2,0,0,5], # i a
+ [37,72,1,0,130,0,0,0,125,0,0,195,0,3,13,0,0,41,11,0,74,0,2,0,6,0], # i b
+ [956,0,23,0,309,0,0,102,390,0,468,47,1,6,91,0,0,54,286,191,106,0,0,0,20,0], # i c
+ [112,2,0,97,578,0,63,1,177,1,0,52,2,24,33,2,0,5,88,8,28,0,16,0,12,0], # i d
+ [0,9,31,276,0,54,21,3,8,0,4,97,8,230,0,1,0,881,1899,92,15,92,34,0,1,4], # i e
+ [10,0,0,0,96,126,0,0,350,0,0,22,0,0,27,0,0,4,5,105,31,0,0,0,149,0], # i f
+ [145,0,0,0,89,1,114,501,92,0,0,17,31,216,40,2,0,74,28,2,65,0,8,0,2,4], # i g
+ [5,0,0,0,5,0,0,0,14,0,0,0,0,0,5,0,0,0,0,0,4,0,0,0,0,0], # i h
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,0,2,0,0,0,0,0,0,0], # i i
+ [14,0,0,0,0,0,0,0,7,0,1,0,0,0,3,0,0,0,2,0,2,0,0,0,0,0], # i j
+ [21,0,0,2,100,0,0,9,34,0,4,0,0,1,11,0,0,0,6,0,2,0,0,0,1,0], # i k
+ [157,29,18,106,361,20,11,9,539,0,42,694,33,6,91,2,2,7,126,87,12,16,8,0,159,0], # i l
+ [185,88,2,0,244,5,0,0,213,0,0,13,159,11,63,477,0,5,57,1,42,0,3,0,3,0], # i m
+ [608,23,444,526,1191,222,8105,65,440,25,209,32,15,168,146,17,30,5,578,844,92,136,16,9,20,1], # i n
+ [4,1,23,18,2,1,26,0,3,0,1,83,10,2782,0,24,0,59,46,53,323,3,5,3,0,1], # i o
+ [69,7,3,0,107,1,0,36,45,0,2,76,10,0,24,192,0,20,110,72,22,0,6,0,3,1], # i p
+ [1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,100,0,0,0,0,0], # i q
+ [111,11,102,76,316,7,20,2,127,0,33,64,64,8,61,19,0,112,78,105,8,6,16,0,16,1], # i r
+ [181,33,319,33,405,43,36,622,262,13,54,64,388,8,139,168,20,46,295,1365,30,1,0,0,6,1], # i s
+ [417,3,117,0,479,16,0,183,737,4,0,36,10,15,144,6,0,50,155,280,174,0,4,0,526,45], # i t
+ [0,0,1,0,0,0,0,0,0,0,0,1,127,1,1,0,0,4,53,1,0,0,0,0,0,0], # i u
+ [156,0,0,0,895,0,0,0,231,0,0,0,0,0,49,0,0,0,0,0,6,9,0,0,2,0], # i v
+ [6,0,0,0,5,0,0,0,11,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], # i w
+ [9,0,0,0,34,0,0,0,20,0,0,0,0,0,5,2,0,0,0,20,0,0,0,0,1,0], # i x
+ [9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], # i y
+ [155,0,0,0,795,0,0,1,246,0,0,0,6,0,25,0,0,0,0,0,2,1,0,0,0,76], # i z
+ ],
+ [
+ [0,13,100,4,1,0,10,8,14,0,5,4,28,38,0,11,0,15,8,1,14,8,11,2,24,7], # j a
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j b
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j c
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j d
+ [13,0,86,1,10,7,0,2,0,1,1,13,0,9,5,1,0,32,19,14,0,1,19,0,0,2], # j e
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j f
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j g
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j h
+ [4,11,0,1,0,2,18,2,0,0,1,8,6,25,0,0,0,0,0,12,0,5,1,0,0,0], # j i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j j
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0], # j k
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j l
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j m
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j n
+ [5,7,21,4,5,0,10,16,48,0,7,22,1,15,0,1,0,21,24,4,37,5,2,0,37,0], # j o
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j q
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j r
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0], # j s
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j t
+ [6,9,0,64,0,0,25,0,10,3,3,20,19,58,0,1,0,49,43,7,0,8,1,6,0,0], # j u
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j v
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j x
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # j z
+ ],
+ [
+ [2,29,2,16,2,2,24,10,8,0,0,28,9,35,2,4,0,39,17,47,4,0,3,5,12,8], # k a
+ [9,0,0,0,8,0,0,0,12,0,0,2,0,0,22,0,0,3,0,0,3,0,0,0,0,0], # k b
+ [4,0,0,0,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0], # k c
+ [11,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,2,0,0,0,0,0,0,0,0], # k d
+ [7,8,1,371,58,4,5,2,9,0,1,49,7,165,12,7,0,419,151,156,4,4,13,0,76,0], # k e
+ [5,0,0,0,0,0,0,0,10,0,0,0,0,0,5,0,0,0,0,0,12,0,0,0,0,0], # k f
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,2,0,0,0,0,0], # k g
+ [38,0,0,0,8,0,0,0,2,0,0,0,2,1,26,0,0,1,4,0,3,0,1,0,1,0], # k h
+ [14,9,18,33,191,4,1,1,1,0,2,69,22,624,3,14,0,34,39,39,1,4,3,0,1,0], # k i
+ [6,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # k j
+ [5,0,0,0,8,0,0,0,7,0,0,0,0,7,4,0,0,0,0,0,1,0,0,0,0,0], # k k
+ [22,0,0,0,143,0,0,0,66,0,0,0,0,0,13,0,0,0,0,0,9,0,0,0,24,0], # k l
+ [35,0,0,0,7,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], # k m
+ [21,0,0,0,64,0,1,0,37,0,0,0,0,0,71,0,0,0,0,0,8,0,0,0,0,0], # k n
+ [2,1,4,4,4,3,1,5,0,1,3,8,1,15,14,7,0,14,11,7,17,12,6,0,1,0], # k o
+ [7,0,0,0,8,0,0,0,8,0,0,2,0,0,9,0,0,1,0,0,0,0,0,0,0,0], # k p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # k q
+ [23,0,0,0,7,0,0,0,16,0,0,0,0,0,23,0,0,0,0,0,10,0,0,0,4,0], # k r
+ [10,1,6,0,4,0,2,25,5,0,3,10,9,0,9,7,0,0,0,32,3,0,1,0,1,0], # k s
+ [2,0,0,0,1,0,0,4,3,0,0,0,0,0,12,0,0,4,0,0,1,0,0,0,0,0], # k t
+ [1,2,0,3,0,0,1,1,1,0,2,17,5,6,2,20,0,7,5,4,0,0,3,0,1,3], # k u
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # k v
+ [22,0,0,0,9,0,0,1,4,0,0,0,0,0,10,0,0,1,0,0,4,0,0,0,1,0], # k w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # k x
+ [8,0,2,7,2,0,0,0,2,6,0,11,0,0,2,0,0,7,3,0,3,0,5,0,0,0], # k y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # k z
+ ],
+ [
+ [4,143,259,109,18,7,105,24,151,3,26,7,202,544,5,85,3,360,300,774,112,71,56,24,177,41], # l a
+ [28,0,0,0,29,0,0,0,16,0,0,6,0,0,38,0,0,8,3,0,7,1,0,0,4,0], # l b
+ [27,0,0,0,12,0,0,41,15,0,1,3,1,0,35,0,0,5,0,0,22,0,0,0,3,0], # l c
+ [13,8,5,0,161,12,0,3,68,0,0,15,1,5,34,4,0,15,67,3,7,0,6,0,2,0], # l d
+ [326,39,241,860,123,40,178,10,37,5,1,34,146,302,40,50,2,549,1163,297,20,98,34,72,102,4], # l e
+ [8,2,0,0,13,1,1,5,39,0,0,5,0,0,14,3,0,14,7,4,26,0,1,0,0,0], # l f
+ [51,0,0,0,39,0,0,0,20,0,0,0,0,0,10,0,0,6,0,0,1,0,0,0,1,0], # l g
+ [12,0,0,0,9,0,0,0,1,0,0,0,0,0,18,0,0,0,0,0,0,0,0,0,0,0], # l h
+ [223,121,378,99,482,142,228,5,0,1,52,35,177,1435,124,111,37,16,475,571,15,100,4,7,1,400], # l i
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0], # l j
+ [17,2,0,0,41,0,0,3,42,0,0,3,6,2,3,0,0,0,36,0,0,0,6,0,8,0], # l k
+ [319,31,3,14,692,21,2,12,622,2,0,0,14,9,297,14,0,6,205,4,107,1,7,0,670,0], # l l
+ [35,0,0,0,53,0,0,1,39,0,0,1,2,1,25,0,0,0,20,0,3,0,0,0,5,0], # l m
+ [6,0,0,0,73,0,0,0,4,0,0,0,0,0,2,0,0,0,2,0,5,0,0,0,0,0], # l n
+ [98,55,204,60,18,19,382,8,50,0,2,21,53,196,166,127,27,196,131,162,193,68,252,6,60,2], # l o
+ [31,0,0,0,15,4,0,41,24,0,0,6,4,0,11,0,0,5,8,11,1,0,0,0,1,0], # l p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0], # l q
+ [5,0,0,0,2,0,0,0,9,0,0,0,0,0,14,0,0,0,0,0,4,0,0,0,9,0], # l r
+ [21,3,1,0,30,1,0,19,54,0,3,0,1,0,19,9,0,0,1,46,1,0,1,0,3,0], # l s
+ [47,0,2,2,138,1,0,36,147,0,0,3,4,0,30,2,0,47,51,0,26,0,3,0,18,12], # l t
+ [24,51,87,64,79,17,37,1,26,2,13,20,151,95,24,15,0,68,173,137,0,6,0,24,1,1], # l u
+ [43,0,0,1,114,0,0,0,35,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,1,0], # l v
+ [13,0,0,0,5,0,0,0,5,0,0,0,0,0,8,0,0,2,0,0,0,0,0,0,0,0], # l w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # l x
+ [13,7,23,7,8,4,17,14,34,0,0,6,28,34,4,21,0,9,40,29,4,0,13,2,0,19], # l y
+ [1,0,0,0,1,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0], # l z
+ ],
+ [
+ [2,24,138,74,11,4,138,34,127,17,59,235,34,723,6,19,0,436,217,507,30,8,5,30,35,23], # m a
+ [109,2,0,1,174,4,0,0,97,0,2,170,1,4,105,0,0,62,33,2,58,0,2,0,1,0], # m b
+ [3,1,15,4,5,2,5,3,8,0,6,4,2,3,3,1,1,2,0,0,0,1,0,0,0,0], # m c
+ [0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0], # m d
+ [96,13,24,321,17,4,31,3,5,1,6,180,63,1051,20,6,0,459,219,234,3,2,21,5,10,7], # m e
+ [0,0,0,0,2,0,0,0,8,0,0,6,0,0,24,0,0,0,0,0,9,0,0,0,1,0], # m f
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # m g
+ [3,0,0,0,2,0,0,0,0,0,0,0,0,0,7,0,0,0,0,0,1,0,0,0,0,0], # m h
+ [45,3,206,104,118,19,53,0,0,2,9,212,24,814,6,5,0,69,540,224,5,0,2,11,0,53], # m i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # m j
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # m k
+ [4,0,0,0,23,0,0,0,22,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,13,0], # m l
+ [105,0,0,0,259,0,0,0,142,0,0,1,0,0,99,0,0,0,0,0,75,0,0,0,23,0], # m m
+ [29,0,0,1,34,0,0,0,43,0,0,1,0,0,5,0,0,0,6,0,1,0,0,0,1,0], # m n
+ [14,41,39,106,6,0,37,16,35,1,21,111,22,402,82,24,0,287,98,160,166,32,10,6,1,6], # m o
+ [172,5,0,2,283,2,2,54,135,0,5,243,2,3,170,3,0,117,73,92,82,0,0,0,12,0], # m p
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0], # m q
+ [5,0,0,0,2,0,0,0,1,0,0,0,0,0,19,0,0,0,0,0,0,0,0,0,1,0], # m r
+ [1,1,6,1,10,1,0,10,14,0,5,0,2,0,13,2,0,0,0,34,3,0,0,0,3,0], # m s
+ [0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0], # m t
+ [1,1,24,33,8,18,25,5,2,1,2,169,27,87,0,1,0,68,131,85,4,0,0,0,0,8], # m u
+ [0,0,0,0,6,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # m v
+ [2,0,0,0,4,0,0,0,3,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], # m w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # m x
+ [3,0,4,0,2,0,0,0,3,0,2,3,0,4,3,0,0,11,23,9,0,0,0,0,0,0], # m y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # m z
+ ],
+ [
+ [1,111,110,64,30,11,78,14,71,0,23,446,89,164,2,84,0,202,103,595,57,46,13,1,7,13], # n a
+ [27,0,0,0,44,0,0,0,8,0,0,8,0,0,32,0,0,14,0,0,26,0,0,0,0,0], # n b
+ [89,0,0,0,839,0,0,357,264,0,7,84,1,0,238,0,0,59,3,121,64,0,0,0,101,0], # n c
+ [232,46,16,4,895,20,2,18,440,3,3,125,24,11,171,22,2,100,298,8,122,0,19,0,31,0], # n d
+ [97,20,98,545,92,45,75,5,25,0,3,141,57,71,37,16,14,498,1427,198,62,34,85,46,102,9], # n e
+ [57,0,0,0,94,0,0,0,93,0,0,67,0,0,91,0,0,45,0,0,61,0,0,0,0,0], # n f
+ [68,16,1,10,438,11,0,38,147,1,6,317,7,9,74,5,1,99,457,42,103,0,8,0,12,4], # n g
+ [64,0,0,0,46,0,0,0,19,0,0,0,0,1,40,0,0,0,0,0,11,0,0,0,0,0], # n h
+ [239,31,312,17,197,111,88,11,1,1,19,27,78,654,68,57,17,6,393,267,30,37,1,6,0,199], # n i
+ [10,0,0,0,14,0,0,0,2,0,0,0,0,0,22,0,0,0,0,0,42,0,0,0,0,0], # n j
+ [19,4,2,0,163,11,3,8,125,0,0,53,3,14,5,0,0,11,79,1,3,0,2,0,17,0], # n k
+ [32,0,0,0,33,0,0,0,78,0,0,0,0,0,20,0,0,0,0,0,4,0,0,0,45,0], # n l
+ [36,0,0,0,57,0,0,0,7,0,0,0,0,0,12,0,0,0,0,0,1,0,0,0,0,0], # n m
+ [94,1,0,0,312,0,0,1,205,0,2,0,1,0,93,0,0,0,7,0,29,0,0,0,48,1], # n n
+ [5,53,56,22,17,16,35,0,40,0,1,68,123,188,45,39,0,178,110,138,126,57,92,10,10,6], # n o
+ [20,0,0,0,6,0,0,1,13,0,0,24,0,0,22,0,0,32,0,0,7,0,0,0,0,0], # n p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,97,0,0,0,0,0], # n q
+ [17,0,0,0,58,0,0,0,24,0,0,0,0,0,30,0,0,0,0,0,11,0,0,0,6,0], # n r
+ [94,4,90,2,259,39,8,64,302,0,21,30,37,14,111,77,0,0,7,291,138,11,16,0,8,0], # n s
+ [444,3,2,2,939,7,4,171,776,0,0,209,17,9,172,1,0,303,601,0,86,0,12,0,29,10], # n t
+ [47,13,19,17,32,24,13,1,17,1,11,28,86,30,14,9,0,32,47,72,2,0,0,1,0,4], # n u
+ [50,0,0,0,150,0,0,0,76,0,0,0,0,0,35,0,0,0,0,0,11,0,0,0,2,0], # n v
+ [37,0,0,0,5,0,0,8,19,0,0,0,0,0,18,0,0,7,0,0,0,0,0,0,0,0], # n w
+ [0,0,0,0,8,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # n x
+ [20,2,0,1,1,0,0,1,11,0,0,4,25,0,5,1,1,0,5,5,0,1,6,2,0,0], # n y
+ [23,0,0,0,10,0,0,1,13,0,0,0,0,0,8,0,0,0,0,0,1,0,1,0,3,0], # n z
+ ],
+ [
+ [0,1,46,116,0,9,10,3,0,0,20,36,22,45,0,14,1,153,49,144,4,3,0,11,0,0], # o a
+ [52,107,2,8,85,5,2,0,82,17,0,68,0,10,36,0,0,10,116,23,15,9,4,0,6,0], # o b
+ [158,0,72,0,80,0,0,96,80,0,386,22,0,0,52,0,1,73,5,57,73,0,0,0,8,0], # o c
+ [30,5,15,71,188,5,37,10,162,0,3,39,7,3,67,6,0,11,72,5,61,0,12,0,45,2], # o d
+ [2,10,2,53,0,7,1,6,14,0,2,9,7,18,2,0,2,22,112,15,4,9,3,5,5,0], # o e
+ [16,0,0,0,47,142,0,0,50,0,0,7,0,1,18,0,0,5,17,45,8,0,0,0,1,0], # o f
+ [64,4,2,1,88,6,105,7,215,3,0,16,11,42,16,0,0,208,38,4,51,0,6,0,86,0], # o g
+ [26,0,0,0,26,0,0,1,22,0,0,3,2,13,15,0,0,1,5,0,0,0,0,0,2,0], # o h
+ [5,2,33,79,4,10,3,0,0,0,3,113,2,208,0,0,0,22,139,37,0,0,0,2,0,0], # o i
+ [6,0,0,0,14,0,0,0,2,0,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0], # o j
+ [20,2,2,0,182,0,0,1,63,0,5,7,9,1,9,0,0,2,44,1,8,0,4,0,7,0], # o k
+ [174,13,10,219,302,26,10,6,363,0,26,385,21,4,374,6,0,2,75,56,92,55,4,0,78,1], # o l
+ [224,133,2,1,397,28,0,0,264,0,0,4,229,22,68,375,0,3,82,1,7,0,2,0,26,0], # o m
+ [478,14,213,265,608,154,255,12,564,34,24,23,19,123,191,26,21,23,1465,395,29,124,14,1,67,11], # o n
+ [0,8,19,251,18,77,10,6,17,0,203,135,123,188,0,92,0,73,89,214,0,14,0,0,0,32], # o o
+ [73,0,5,0,230,2,4,141,131,1,5,48,9,0,113,169,0,50,125,50,51,0,4,0,29,0], # o p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,42,0,0,0,0,0], # o q
+ [351,56,97,262,478,14,121,6,492,0,162,37,283,200,131,79,5,168,489,604,25,8,28,0,132,1], # o r
+ [48,8,60,0,305,1,1,50,231,0,2,14,29,1,37,80,5,0,199,353,24,1,3,0,23,1], # o s
+ [116,15,41,0,229,4,4,262,223,0,1,31,4,5,131,9,0,26,132,193,25,0,3,0,25,0], # o t
+ [10,48,69,44,11,7,149,2,20,0,2,65,3,504,0,67,2,274,1039,497,0,9,0,2,1,1], # o u
+ [68,0,0,0,722,0,1,0,87,0,0,0,0,1,19,0,0,0,6,0,8,0,0,0,3,0], # o v
+ [43,36,6,50,247,6,7,12,94,0,1,88,15,220,8,20,0,6,145,14,3,0,6,0,5,6], # o w
+ [1,2,4,0,43,2,2,4,70,0,0,0,0,1,3,0,0,0,4,4,1,1,1,0,21,1], # o x
+ [52,2,8,4,38,7,0,2,19,0,0,7,8,4,11,0,0,7,40,0,1,0,0,0,0,0], # o y
+ [11,0,0,0,33,0,0,1,20,0,0,4,0,2,16,0,0,0,0,0,1,0,0,0,5,5], # o z
+ ],
+ [
+ [2,22,142,36,11,0,50,5,78,1,4,168,12,226,0,58,10,510,157,295,34,16,26,2,50,2], # p a
+ [3,0,0,0,5,0,0,0,0,0,0,1,0,0,16,0,0,8,0,0,4,0,0,0,0,0], # p b
+ [2,0,0,0,0,0,0,6,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0], # p c
+ [9,0,0,0,0,0,0,0,1,0,0,0,0,0,6,0,0,4,0,0,0,0,0,0,0,0], # p d
+ [135,7,202,366,76,13,13,0,4,2,11,122,3,345,15,26,1,1088,148,159,4,2,23,2,5,8], # p e
+ [2,0,0,0,0,0,0,0,3,0,0,0,0,0,2,0,0,5,0,0,7,0,0,0,0,0], # p f
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,2,0,0,0,0,0], # p g
+ [140,0,0,0,190,0,0,0,201,0,0,11,0,1,229,0,0,36,28,11,8,0,0,0,91,0], # p h
+ [53,0,163,59,205,11,45,0,1,0,15,118,16,480,24,33,6,119,75,172,5,5,0,5,0,11], # p i
+ [2,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # p j
+ [0,0,0,0,4,0,0,0,8,0,0,0,0,4,0,0,0,0,0,0,0,0,1,0,0,0], # p k
+ [429,0,0,0,332,0,0,0,255,0,0,0,0,0,138,0,0,0,0,0,113,0,0,0,36,0], # p l
+ [10,0,0,0,18,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,2,0,0,0,0,0], # p m
+ [0,0,0,0,14,0,0,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0,0,0,0], # p n
+ [7,0,45,24,22,0,22,0,104,0,33,269,35,140,90,70,0,316,378,135,68,12,42,11,1,1], # p o
+ [46,0,0,0,343,0,0,3,200,0,0,114,1,0,101,0,0,114,4,0,9,0,0,0,25,0], # p p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # p q
+ [133,0,0,0,772,0,0,0,364,0,0,0,0,0,899,0,0,0,0,0,32,0,0,0,8,0], # p r
+ [14,1,8,0,50,1,0,17,49,0,4,0,0,0,20,0,2,0,2,35,20,0,2,0,60,0], # p s
+ [34,0,2,0,63,0,0,2,238,0,0,6,1,6,61,0,0,5,33,0,63,0,2,0,5,0], # p t
+ [0,37,15,21,6,13,16,0,0,0,4,163,23,88,0,18,0,156,62,121,0,0,0,0,0,8], # p u
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # p v
+ [11,0,0,0,1,0,0,0,3,0,0,0,0,0,1,0,0,6,0,0,0,0,0,0,0,0], # p w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # p x
+ [2,0,5,0,0,0,5,2,10,0,0,4,0,1,3,0,0,25,1,5,0,0,2,3,0,0], # p y
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # p z
+ ],
+ [
+ [0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0], # q a
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q b
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q c
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q d
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q e
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q f
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q g
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q h
+ [0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,1,0,1,0,0,0,0,0,0,0], # q i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q j
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q k
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q l
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q m
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q n
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q o
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q q
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q r
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q s
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q t
+ [319,0,0,0,349,0,0,1,406,0,0,0,1,0,43,0,0,0,0,0,1,0,0,0,4,0], # q u
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q v
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q x
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # q z
+ ],
+ [
+ [0,194,482,215,17,93,219,28,294,12,38,326,295,854,8,372,5,65,235,1096,49,169,100,5,106,48], # r a
+ [99,0,0,0,78,0,0,0,95,0,0,24,0,0,102,0,0,27,15,0,31,0,0,0,5,0], # r b
+ [52,0,0,0,146,0,0,254,78,0,5,32,0,0,74,0,0,11,2,6,95,0,0,0,11,0], # r c
+ [47,7,4,0,177,2,0,10,202,0,0,40,3,10,54,2,0,36,214,6,13,2,7,0,8,0], # r d
+ [721,96,550,973,363,260,202,95,98,34,15,273,249,409,84,341,43,196,1272,432,32,217,118,21,45,7], # r e
+ [24,4,0,1,60,1,0,0,31,0,0,11,0,0,33,0,0,5,8,0,41,0,0,0,1,0], # r f
+ [93,0,0,0,204,0,0,9,96,0,0,30,1,2,41,0,0,22,9,0,22,0,0,0,15,0], # r g
+ [32,0,0,0,52,0,0,0,15,0,0,0,0,0,42,0,0,0,0,0,4,0,0,0,11,0], # r h
+ [460,152,495,187,685,135,195,4,1,1,24,169,190,1187,191,176,8,0,424,408,55,147,7,12,3,258], # r i
+ [11,0,0,0,10,0,0,0,0,0,0,0,0,0,7,0,0,0,0,0,9,0,0,0,0,0], # r j
+ [22,4,0,4,110,2,0,8,71,0,0,16,6,5,4,3,0,3,88,1,3,0,6,0,6,0], # r k
+ [71,2,0,13,104,2,0,2,131,0,0,0,0,0,66,2,0,0,24,1,6,0,2,0,77,0], # r l
+ [230,2,2,0,151,5,0,9,217,0,0,13,0,1,108,2,0,2,67,1,35,0,2,0,8,0], # r m
+ [151,6,4,0,203,10,0,3,153,0,2,5,15,2,41,4,0,5,88,8,15,0,2,0,6,0], # r n
+ [140,128,204,129,30,102,150,12,74,15,47,208,189,377,247,302,8,41,329,227,388,153,235,24,30,9], # r o
+ [40,0,0,0,81,0,0,36,39,0,0,37,2,1,73,0,0,62,19,5,13,0,0,0,1,0], # r p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,31,0,0,0,0,0], # r q
+ [170,0,0,0,291,0,0,16,291,0,0,0,0,0,159,0,0,0,5,0,52,0,0,0,87,0], # r r
+ [59,3,14,2,273,1,0,99,151,0,12,5,6,8,86,38,1,1,0,124,31,1,7,0,4,0], # r s
+ [156,11,13,0,284,10,10,232,398,0,0,55,29,19,60,0,0,29,156,4,80,0,17,0,25,9], # r t
+ [18,88,133,94,49,31,39,1,71,1,1,38,162,131,3,89,0,5,258,51,0,2,2,4,1,3], # r u
+ [71,0,0,0,129,0,0,0,96,0,0,0,0,0,14,0,0,0,0,0,1,0,0,0,3,0], # r v
+ [42,0,0,0,24,0,0,9,13,0,0,0,0,0,36,0,0,12,0,0,1,0,0,0,0,0], # r w
+ [0,0,0,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # r x
+ [18,8,6,5,11,0,1,0,42,0,2,19,24,16,14,18,0,0,33,5,1,0,11,0,0,0], # r y
+ [3,0,0,0,6,0,0,1,2,0,1,1,0,0,2,0,0,0,0,0,0,0,0,0,1,0], # r z
+ ],
+ [
+ [4,62,69,56,2,40,64,4,63,1,23,233,51,229,0,58,0,110,52,132,60,56,40,8,30,1], # s a
+ [25,0,0,0,22,0,0,0,5,0,0,0,0,0,17,0,0,7,0,0,21,0,0,0,8,0], # s b
+ [306,0,0,0,175,0,0,190,113,0,0,26,0,0,337,0,0,304,3,0,131,0,0,0,7,0], # s c
+ [20,0,0,0,12,0,0,0,13,0,0,0,0,0,7,0,0,6,0,0,2,0,0,0,0,0], # s d
+ [133,25,168,394,97,19,27,16,25,1,4,223,168,307,7,63,40,381,785,128,21,40,32,44,40,0], # s e
+ [11,0,0,0,11,0,0,0,34,0,0,1,0,0,11,0,0,5,0,0,27,0,0,0,5,0], # s f
+ [1,0,0,0,1,0,0,0,6,0,0,0,0,0,10,0,0,18,0,0,17,0,0,0,0,0], # s g
+ [319,19,13,3,558,5,2,3,494,0,9,42,43,36,302,2,0,103,2,15,69,2,18,0,26,0], # s h
+ [116,96,126,195,198,59,152,2,0,0,7,123,104,575,356,24,2,29,154,255,15,181,0,13,0,73], # s i
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,8,0,0,0,0,0], # s j
+ [26,0,0,0,86,0,0,0,160,1,0,1,4,1,3,0,0,3,22,2,18,0,0,0,66,0], # s k
+ [165,0,0,0,132,0,0,0,142,0,0,0,0,0,100,0,0,0,0,0,59,0,0,0,301,0], # s l
+ [150,0,0,0,79,0,0,0,85,0,0,0,0,0,89,0,0,0,76,0,37,0,0,0,0,0], # s m
+ [79,0,0,0,166,0,0,0,56,0,0,0,0,0,108,0,0,0,0,0,28,0,0,0,1,0], # s n
+ [24,27,71,24,9,25,9,3,15,4,1,209,128,309,28,49,0,189,12,24,115,9,11,0,4,1], # s o
+ [203,2,0,0,352,0,0,62,271,0,0,103,0,1,245,0,0,139,12,0,55,0,0,0,8,0], # s p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,202,0,0,0,0,0], # s q
+ [9,0,0,0,26,0,0,0,2,0,0,0,0,0,8,0,0,0,1,0,11,0,0,0,0,0], # s r
+ [160,20,8,0,567,13,0,10,572,0,2,70,24,50,122,16,0,5,0,19,68,0,16,0,29,0], # s s
+ [808,21,10,7,1039,24,2,54,970,0,0,122,32,12,464,17,0,854,508,0,197,0,29,0,84,0], # s t
+ [51,268,66,12,26,45,23,2,43,0,6,101,99,98,3,200,0,281,72,8,0,2,1,0,0,6], # s u
+ [3,0,0,0,13,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # s v
+ [120,0,0,0,104,0,0,0,87,0,0,0,0,0,54,0,0,0,0,0,2,0,0,0,0,0], # s w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # s x
+ [0,6,56,1,0,4,2,0,9,0,1,33,46,77,0,12,0,14,16,2,0,0,1,0,0,0], # s y
+ [0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0], # s z
+ ],
+ [
+ [1,267,130,20,11,20,135,12,240,1,75,431,116,448,5,63,0,343,122,529,37,16,21,34,16,0], # t a
+ [37,0,0,0,11,0,0,0,13,0,0,2,0,0,21,0,0,19,0,0,8,0,0,0,0,0], # t b
+ [25,0,0,0,2,0,0,381,1,0,0,9,0,0,14,0,0,9,0,0,5,0,0,0,1,0], # t c
+ [5,0,0,0,0,0,0,0,5,0,0,0,0,0,17,0,0,0,0,0,0,0,0,0,0,0], # t d
+ [159,23,97,1589,185,36,38,9,29,0,0,275,152,620,31,64,2,2240,1072,40,21,11,22,32,3,2], # t e
+ [10,0,0,0,2,0,0,0,20,0,0,4,0,0,19,0,0,0,0,0,79,0,0,0,0,0], # t f
+ [11,0,0,0,4,0,0,0,0,0,0,0,0,0,8,0,0,9,0,0,5,0,0,0,0,0], # t g
+ [122,12,1,9,674,27,1,6,271,0,1,48,26,9,241,10,4,206,153,6,75,0,39,0,52,0], # t h
+ [236,66,932,49,501,203,99,5,0,1,7,235,223,1997,2281,87,22,70,240,200,12,504,1,0,0,116], # t i
+ [6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # t j
+ [1,0,0,0,0,0,0,0,6,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0], # t k
+ [37,0,0,0,324,0,0,0,107,0,0,0,0,0,13,0,0,0,0,0,4,0,0,0,249,0], # t l
+ [51,0,0,0,80,0,0,0,5,0,0,0,0,0,18,0,0,0,0,0,2,0,0,0,0,0], # t m
+ [11,0,0,0,91,0,0,0,10,0,0,0,0,0,5,0,0,0,0,0,6,0,0,0,0,0], # t n
+ [25,18,86,22,34,15,48,0,41,4,24,96,151,388,87,130,2,872,79,50,114,10,85,28,10,7], # t o
+ [9,0,0,0,7,0,0,1,8,0,0,9,0,0,13,0,0,10,0,0,4,0,0,0,0,0], # t p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # t q
+ [1032,0,0,0,330,0,0,0,686,0,0,0,0,0,433,0,0,0,0,0,314,0,0,0,78,0], # t r
+ [7,3,12,2,18,0,0,28,28,0,15,2,24,0,12,6,0,0,0,35,11,1,10,0,7,0], # t s
+ [98,0,0,0,638,0,2,7,298,0,0,130,1,1,105,0,0,31,13,0,9,1,1,0,33,0], # t t
+ [150,50,24,78,14,18,8,0,18,0,0,56,80,90,33,38,0,487,81,68,0,1,0,4,1,1], # t u
+ [1,0,0,0,0,0,0,0,3,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0], # t v
+ [46,0,0,0,43,0,0,5,82,0,0,0,0,0,30,0,0,10,0,0,0,0,0,0,0,0], # t w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # t x
+ [2,0,5,2,4,0,1,1,14,0,2,41,14,2,0,82,0,34,2,2,0,0,0,1,0,0], # t y
+ [6,0,0,0,31,0,1,0,18,0,1,0,1,0,6,2,0,1,2,0,0,0,0,0,5,0], # t z
+ ],
+ [
+ [1,16,18,60,1,5,15,4,17,0,11,213,4,64,0,4,0,138,24,155,3,13,7,0,12,0], # u a
+ [34,114,21,12,49,2,2,9,54,20,0,97,27,1,15,8,0,22,105,48,12,7,2,0,2,0], # u b
+ [40,0,53,0,72,0,0,109,86,0,222,10,0,0,10,0,0,14,1,189,15,0,0,0,3,0], # u c
+ [30,2,0,76,166,0,102,2,140,0,0,8,3,1,12,0,0,6,37,0,5,0,2,0,12,2], # u d
+ [28,13,1,57,31,11,2,1,9,4,0,80,3,97,1,4,0,75,169,58,9,2,0,0,2,10], # u e
+ [7,0,0,0,1,223,0,0,2,0,0,10,1,0,2,0,0,0,0,8,2,0,0,0,0,0], # u f
+ [54,5,0,0,38,0,110,195,11,0,0,17,6,10,10,0,0,0,30,0,37,0,0,0,0,0], # u g
+ [12,0,0,0,0,0,0,0,2,0,0,0,0,1,0,0,0,2,1,0,0,0,0,0,0,0], # u h
+ [11,9,36,45,34,2,9,0,0,1,1,94,0,126,4,13,0,66,134,169,2,20,0,3,1,13], # u i
+ [6,0,0,0,0,0,0,0,6,0,0,0,0,0,1,0,0,1,0,0,4,0,0,0,0,0], # u j
+ [4,0,1,0,28,0,0,2,10,0,8,2,0,0,5,0,0,4,3,2,4,0,0,0,2,0], # u k
+ [474,11,44,22,130,31,53,0,83,0,36,343,16,62,47,41,0,3,98,231,21,10,2,0,7,1], # u l
+ [79,227,6,5,120,9,0,0,96,0,0,7,132,26,23,165,3,1,132,1,20,8,0,0,1,0], # u m
+ [106,80,382,677,116,79,146,42,214,7,139,55,35,102,24,54,11,77,195,367,10,8,45,0,4,5], # u n
+ [0,0,0,4,0,1,0,0,10,0,2,0,1,2,0,0,0,31,6,25,76,0,0,0,8,0], # u o
+ [14,10,8,8,171,3,4,30,47,1,1,48,1,0,14,123,0,25,76,71,10,0,3,0,9,0], # u p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0], # u q
+ [215,86,55,64,489,39,140,2,304,0,27,69,33,180,54,61,3,150,170,120,23,37,0,0,27,1], # u r
+ [52,11,52,1,434,0,0,176,207,0,47,242,1,91,8,33,6,0,150,403,15,0,0,0,9,0], # u s
+ [88,23,42,14,245,18,12,138,266,0,0,45,21,10,90,19,0,68,162,191,28,4,16,0,4,12], # u t
+ [0,0,0,0,0,0,0,0,0,0,0,1,8,0,0,0,0,0,1,0,0,0,0,0,0,0], # u u
+ [5,0,0,0,26,0,0,0,13,0,0,0,0,0,2,0,0,6,1,0,5,0,0,0,0,0], # u v
+ [6,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # u w
+ [0,0,0,0,12,0,0,0,4,0,0,1,0,0,3,0,0,0,0,7,12,0,0,0,0,0], # u x
+ [2,0,0,0,5,0,1,1,2,0,0,2,0,0,2,0,0,0,2,1,1,1,0,0,0,0], # u y
+ [3,2,1,0,7,0,0,3,3,0,0,0,1,3,3,0,0,0,0,0,2,0,0,0,2,47], # u z
+ ],
+ [
+ [2,33,52,26,5,0,54,4,24,3,8,201,16,147,1,18,0,87,68,151,14,0,0,0,0,1], # v a
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v b
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v c
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v d
+ [22,4,7,136,10,1,18,7,24,0,1,354,20,423,0,3,0,1121,375,55,0,0,2,11,28,1], # v e
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v f
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # v g
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v h
+ [103,19,94,86,98,9,37,0,0,3,5,129,0,247,64,4,0,54,205,124,3,48,0,5,0,8], # v i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v j
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v k
+ [5,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0], # v l
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v m
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v n
+ [0,0,66,1,1,0,7,0,32,0,22,115,6,12,5,0,0,70,7,35,36,1,18,0,21,0], # v o
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v q
+ [1,0,0,0,5,0,0,0,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], # v r
+ [0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v s
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], # v t
+ [0,0,0,0,2,0,0,0,0,0,0,60,1,1,1,0,0,4,2,1,0,0,0,0,0,0], # v u
+ [0,0,0,0,1,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0], # v v
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v x
+ [0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0], # v y
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # v z
+ ],
+ [
+ [0,11,13,29,0,11,43,3,74,0,28,143,15,83,0,6,0,281,90,131,6,25,4,14,153,2], # w a
+ [13,0,0,0,11,0,0,0,5,0,0,0,0,0,24,0,0,8,0,0,0,0,0,0,0,0], # w b
+ [7,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], # w c
+ [2,0,0,0,16,0,0,0,18,0,0,12,0,2,5,0,0,12,2,0,4,0,0,0,6,0], # w d
+ [119,12,0,127,121,4,3,2,70,0,0,151,0,27,0,4,0,166,63,15,0,1,0,0,2,1], # w e
+ [3,0,0,0,0,0,0,0,4,0,0,2,0,0,3,0,0,0,0,0,12,0,0,0,0,0], # w f
+ [2,0,0,0,0,0,0,0,6,0,0,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0], # w g
+ [43,0,0,0,106,0,0,0,170,0,0,0,0,0,48,0,0,0,0,0,0,0,0,0,1,0], # w h
+ [0,0,28,27,31,20,47,0,0,0,0,109,20,358,0,14,0,32,87,110,0,13,0,1,1,4], # w i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # w j
+ [0,0,0,0,7,0,0,0,13,0,0,0,0,1,0,0,0,0,6,0,0,0,5,0,1,0], # w k
+ [7,0,0,0,52,0,0,0,24,0,0,0,0,0,1,0,0,0,29,0,0,0,0,0,7,0], # w l
+ [9,0,0,0,7,0,0,0,2,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0], # w m
+ [1,4,1,0,40,2,4,5,37,0,0,4,0,0,2,6,0,1,70,5,2,0,3,0,5,0], # w n
+ [0,9,0,1,6,6,4,0,0,0,5,21,82,17,133,4,0,286,4,1,11,5,7,0,0,2], # w o
+ [6,0,0,0,1,0,0,1,3,0,0,6,0,0,8,0,0,0,0,0,2,0,0,0,0,0], # w p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # w q
+ [30,0,0,0,46,0,0,0,100,0,0,0,0,0,31,0,0,0,0,0,1,0,0,0,8,0], # w r
+ [1,2,5,0,16,1,0,8,14,0,2,4,2,0,4,11,0,2,2,8,4,0,4,0,3,0], # w s
+ [2,0,0,0,2,0,0,9,0,0,0,0,0,0,9,0,0,0,1,0,0,0,0,0,0,0], # w t
+ [0,0,0,0,0,0,0,1,0,0,0,1,1,1,0,2,0,6,2,0,0,0,0,0,0,0], # w u
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # w v
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0], # w w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # w x
+ [2,0,2,0,5,0,0,0,0,0,0,1,0,3,3,0,0,0,0,0,0,0,0,0,0,0], # w y
+ [0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0], # w z
+ ],
+ [
+ [0,3,18,1,0,0,12,0,0,0,0,5,23,10,0,0,0,1,6,18,0,1,0,0,0,0], # x a
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], # x b
+ [11,0,0,0,34,0,0,7,18,0,0,17,0,0,14,0,0,14,0,0,13,0,0,0,0,0], # x c
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x d
+ [0,0,23,48,0,0,2,0,1,0,0,2,21,12,0,0,0,31,104,0,0,0,0,0,0,0], # x e
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], # x f
+ [0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x g
+ [16,0,0,0,0,0,0,0,16,0,0,0,0,0,11,0,0,0,0,0,6,0,0,0,0,0], # x h
+ [11,6,32,19,23,5,7,0,1,0,0,10,22,55,19,0,0,2,37,14,0,3,0,0,0,0], # x i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x j
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x k
+ [0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0], # x l
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x m
+ [1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x n
+ [0,0,2,2,0,0,0,0,0,0,0,2,3,17,0,4,0,17,0,6,0,0,0,0,0,0], # x o
+ [25,0,0,0,79,0,0,0,11,0,0,51,0,0,44,0,0,24,0,0,13,0,0,0,0,0], # x p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0], # x q
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x r
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0], # x s
+ [10,2,0,0,58,0,0,2,26,0,0,0,0,0,17,0,0,76,3,0,12,0,0,0,1,0], # x t
+ [16,3,0,4,0,0,0,0,0,0,0,7,1,0,0,0,0,12,6,0,0,0,0,0,0,1], # x u
+ [0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x v
+ [0,0,0,0,1,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], # x w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # x x
+ [1,0,1,0,1,0,6,0,2,0,0,5,3,0,0,0,0,0,2,0,0,0,0,0,0,0], # x y
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # x z
+ ],
+ [
+ [0,9,18,6,0,0,7,5,0,0,11,35,10,67,2,4,2,49,5,10,0,0,12,0,0,0], # y a
+ [17,0,0,2,24,0,0,0,6,0,0,1,0,0,18,0,0,7,1,0,6,0,0,0,2,0], # y b
+ [12,0,0,0,23,0,0,57,2,0,1,58,0,0,26,0,0,1,0,0,1,0,0,0,0,0], # y c
+ [11,1,0,0,16,0,0,0,11,0,0,0,0,2,1,0,0,54,0,0,0,0,0,0,0,0], # y d
+ [26,6,0,91,4,3,2,0,1,0,1,30,3,10,2,3,0,76,43,9,5,4,3,0,1,0], # y e
+ [5,0,0,0,1,0,0,0,6,0,0,4,0,0,4,0,0,2,0,0,11,0,0,0,0,0], # y f
+ [8,0,0,0,8,0,1,0,9,0,0,4,3,3,11,0,0,8,0,0,2,0,0,0,0,0], # y g
+ [0,0,0,0,6,0,0,0,0,0,0,0,0,1,27,0,0,0,0,0,0,0,0,0,1,0], # y h
+ [0,0,0,1,8,0,0,0,0,0,1,0,0,349,0,5,0,0,13,0,0,0,0,0,0,0], # y i
+ [6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # y j
+ [0,0,0,0,6,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # y k
+ [27,0,0,0,38,0,0,0,33,0,1,31,0,0,23,2,0,0,6,0,6,8,0,0,7,0], # y l
+ [37,17,0,0,44,0,0,0,12,0,0,0,9,16,26,46,0,0,6,0,4,0,0,0,2,0], # y m
+ [33,1,32,11,35,0,5,0,8,0,0,0,1,6,20,0,0,0,0,22,0,0,0,6,1,0], # y n
+ [0,0,0,12,0,6,8,0,0,0,9,8,3,32,0,3,0,19,3,7,30,4,4,0,0,0], # y o
+ [8,0,0,0,78,0,0,34,20,0,0,3,0,12,42,2,0,9,10,25,6,0,0,0,0,0], # y p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], # y q
+ [34,0,0,4,15,0,1,0,43,0,0,0,0,1,32,0,0,2,4,3,6,0,0,0,2,0], # y r
+ [11,1,2,0,15,3,0,6,58,0,0,5,9,0,9,7,0,0,10,90,3,0,2,0,3,0], # y s
+ [3,0,0,0,32,0,0,42,15,0,0,0,0,0,6,0,0,0,0,1,0,0,0,0,0,0], # y t
+ [2,1,10,0,0,0,7,0,1,0,5,4,3,4,0,4,0,2,3,1,0,0,0,0,0,1], # y u
+ [1,0,0,0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # y v
+ [26,0,0,0,9,0,0,5,2,0,0,0,0,0,20,0,0,7,0,0,0,0,0,0,0,0], # y w
+ [0,0,0,0,5,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # y x
+ [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], # y y
+ [3,0,0,0,15,0,0,0,4,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0], # y z
+ ],
+ [
+ [5,9,6,1,0,0,5,1,3,0,5,7,12,31,0,7,0,45,8,125,0,1,0,0,0,3], # z a
+ [0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0], # z b
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # z c
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # z d
+ [9,12,8,295,3,6,1,0,0,0,3,14,4,34,0,6,0,96,301,13,1,0,0,0,0,1], # z e
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # z f
+ [0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # z g
+ [0,0,0,0,4,0,0,0,4,0,0,0,0,1,7,0,0,0,0,0,2,0,0,0,1,0], # z h
+ [1,2,5,0,76,0,6,0,2,0,0,22,8,321,6,16,0,3,4,3,0,0,0,0,0,0], # z i
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # z j
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0], # z k
+ [0,0,0,0,50,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0], # z l
+ [4,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0], # z m
+ [1,0,0,0,3,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # z n
+ [4,0,0,4,1,0,0,2,12,0,0,6,5,31,16,3,0,11,8,3,2,3,0,0,0,0], # z o
+ [3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # z p
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0], # z q
+ [1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0], # z r
+ [0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0], # z s
+ [0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # z t
+ [0,2,2,0,4,0,0,0,0,0,2,2,2,2,0,0,0,6,1,0,0,0,0,0,0,0], # z u
+ [1,0,0,0,1,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0], # z v
+ [0,0,0,0,1,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0], # z w
+ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], # z x
+ [0,2,0,0,0,0,2,0,2,0,0,0,4,0,0,0,0,1,0,0,1,0,0,0,0,0], # z y
+ [18,1,0,0,27,0,0,0,42,0,0,67,0,0,2,0,0,0,0,0,0,0,2,0,10,0], # z z
+ ],
+ ]
+ return data
+
+##_hooks['en'] = _hooks['en_us'] = american_english_data
+
+#=========================================================
+#compiler
+#=========================================================
+def compile_gpw_data(
+ source="/usr/share/dict/words", alphabet=default_alphabet,
+ name=None, target="gpw_output.py"
+ ):
+ """this function compiles the gpw frequency tables from a dictionary file,
+ and writes them to ``gpw_output.py`` in the current directory."""
+ #prepare constants
+ num = len(alphabet)
+ rng = range(num)
+ idx = alphabet.index
+
+ #prepare target array
+ tris = [ [ [0] * num for c2 in rng ] for c1 in rng ] #frequency of c3 given c1,c2
+
+ #parse dictionary file
+ source = filepath(source).canonpath
+ words = 0
+ chars = 0
+ seen = set()
+ with source.open() as fh:
+ for row in fh:
+ row = row.strip().lower()
+ if row.endswith("'s"):
+ row = row[:-2]
+ if any(c not in alphabet for c in row):
+ log.warning("ignoring word with out-of-alphabet character: %r", row)
+ continue
+ if len(row) < 4: #don't bother with the small words
+ log.debug("ignoring small word: %r", row)
+ continue
+ if row in seen:
+ log.debug("ignoring duplicate word: %r", row)
+ continue
+ #XXX: is it ok that 'ed' and 's' suffixes are biasing us?
+ seen.add(row)
+ words += 1
+ chars += len(row)
+ c1 = idx(row[0])
+ c2 = idx(row[1])
+ for c3 in row[2:]:
+ c3 = idx(c3)
+ tris[c1][c2][c3] += 1
+ c1 = c2
+ c2 = c3
+
+ #generate
+ target = filepath(target).abspath
+ if name is None:
+ name = source.name.replace("-", "_")
+ def lr(lst):
+ return repr(lst).replace(" ", "")
+ with target.open("w") as th:
+ #
+ #generate header, alphabet & first
+ #
+ data = dict(
+ alphabet=alphabet,
+ words=words, #not used, but useful to have
+ chars=chars, #not used, but useful to have
+ tris_total=chars-2*words,
+ )
+ print >>th, """def {name}_data():
+ "gpw data generated from {path!r}"
+ data = {data!r}""".format(name=name, data=data, path=source)
+
+ #
+ #3rd order frequency table
+ #
+ print >>th, """ data['tris'] = ["""
+ for c1, r1 in zip(alphabet, tris):
+ print >>th, " ["
+ for c2, r2 in zip(alphabet, r1):
+ print >>th, " {0}, # {1} {2}".format(lr(r2),c1,c2)
+ print >>th, " ],"
+ print >>th, """ ]"""
+
+ #
+ #tail
+ #
+ print >>th,""" return data\n"""
+
+#=========================================================
+# eof
+#=========================================================
diff --git a/bps/security/_unix_crypt.py b/bps/security/_unix_crypt.py
new file mode 100644
index 0000000..1e281e7
--- /dev/null
+++ b/bps/security/_unix_crypt.py
@@ -0,0 +1,664 @@
+"""bps.security._unix_crypt -- unix crypt(3) implementation
+
+History
+=======
+This python implementation was created by Eli Collins <elic@astllc.org>,
+by doing a line-by-line conversion of the Java-based implementation found at
+ http://www.dynamic.net.au/christos/crypt/UnixCrypt2.txt
+and then precalculating tables and pythonizing as much as possible.
+
+The copyright & license for that source is as follows::
+
+ UnixCrypt.java 0.9 96/11/25
+ Copyright (c) 1996 Aki Yoshida. All rights reserved.
+ Permission to use, copy, modify and distribute this software
+ for non-commercial or commercial purposes and without fee is
+ hereby granted provided that this copyright notice appears in
+ all copies.
+
+ ---
+
+ Unix crypt(3C) utility
+ @version 0.9, 11/25/96
+ @author Aki Yoshida
+
+ ---
+
+ modified April 2001
+ by Iris Van den Broeke, Daniel Deville
+
+ ---
+ Unix Crypt.
+ Implements the one way cryptography used by Unix systems for
+ simple password protection.
+ @version $Id: UnixCrypt2.txt,v 1.1.1.1 2005/09/13 22:20:13 christos Exp $
+ @author Greg Wilkins (gregw)
+
+ ---
+ converted to python and modified
+ June 2009
+ by Eli Collins <elic@astllc.org>
+"""
+
+#=========================================================
+#simple constants
+#=========================================================
+
+#base64 char sequence
+CHARS = './0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
+b64_encode = CHARS.__getitem__ # int -> char
+
+#inverse map (char->value)
+CHARIDX = dict( (c,i) for i,c in enumerate(CHARS))
+b64_decode = CHARIDX.__getitem__ # char -> int
+
+#precalculated iteration ranges
+R8 = range(8)
+RR8 = range(7, -1, -1)
+RR4 = range(3, -1, -1)
+RR12_1 = range(11, 1, -1)
+
+#=========================================================
+# static tables
+#=========================================================
+PCXROT = IE3264 = SPE = CF6464 = None #placeholders filled in by load_tables
+
+def load_tables():
+ "delay loading data until it's needed for the first time"
+ global PCXROT, IE3264, SPE, CF6464
+
+ #---------------------------------------------------
+ # Initial key schedule permutation
+ # PC1ROT - bit reverse, then PC1, then Rotate, then PC2
+ #---------------------------------------------------
+ PC1ROT=(
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000010000000000, 0x0000010000000000,
+ 0x0000000100000000, 0x0000000100000000, 0x0000010100000000, 0x0000010100000000,
+ 0x0000000000100000, 0x0000000000100000, 0x0000010000100000, 0x0000010000100000,
+ 0x0000000100100000, 0x0000000100100000, 0x0000010100100000, 0x0000010100100000, ),
+ ( 0x0000000000000000, 0x0000000080000000, 0x0000040000000000, 0x0000040080000000,
+ 0x0010000000000000, 0x0010000080000000, 0x0010040000000000, 0x0010040080000000,
+ 0x0000000800000000, 0x0000000880000000, 0x0000040800000000, 0x0000040880000000,
+ 0x0010000800000000, 0x0010000880000000, 0x0010040800000000, 0x0010040880000000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000004000, 0x0000000000004000,
+ 0x0000000000000008, 0x0000000000000008, 0x0000000000004008, 0x0000000000004008,
+ 0x0000000000000010, 0x0000000000000010, 0x0000000000004010, 0x0000000000004010,
+ 0x0000000000000018, 0x0000000000000018, 0x0000000000004018, 0x0000000000004018, ),
+ ( 0x0000000000000000, 0x0000000200000000, 0x0001000000000000, 0x0001000200000000,
+ 0x0400000000000000, 0x0400000200000000, 0x0401000000000000, 0x0401000200000000,
+ 0x0020000000000000, 0x0020000200000000, 0x0021000000000000, 0x0021000200000000,
+ 0x0420000000000000, 0x0420000200000000, 0x0421000000000000, 0x0421000200000000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000400000, 0x0000000000400000,
+ 0x0000000004000000, 0x0000000004000000, 0x0000000004400000, 0x0000000004400000,
+ 0x0000000000000800, 0x0000000000000800, 0x0000000000400800, 0x0000000000400800,
+ 0x0000000004000800, 0x0000000004000800, 0x0000000004400800, 0x0000000004400800, ),
+ ( 0x0000000000000000, 0x0000000000008000, 0x0040000000000000, 0x0040000000008000,
+ 0x0000004000000000, 0x0000004000008000, 0x0040004000000000, 0x0040004000008000,
+ 0x8000000000000000, 0x8000000000008000, 0x8040000000000000, 0x8040000000008000,
+ 0x8000004000000000, 0x8000004000008000, 0x8040004000000000, 0x8040004000008000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000080, 0x0000000000000080,
+ 0x0000000000080000, 0x0000000000080000, 0x0000000000080080, 0x0000000000080080,
+ 0x0000000000800000, 0x0000000000800000, 0x0000000000800080, 0x0000000000800080,
+ 0x0000000000880000, 0x0000000000880000, 0x0000000000880080, 0x0000000000880080, ),
+ ( 0x0000000000000000, 0x0000000008000000, 0x0000002000000000, 0x0000002008000000,
+ 0x0000100000000000, 0x0000100008000000, 0x0000102000000000, 0x0000102008000000,
+ 0x0000200000000000, 0x0000200008000000, 0x0000202000000000, 0x0000202008000000,
+ 0x0000300000000000, 0x0000300008000000, 0x0000302000000000, 0x0000302008000000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000010000000, 0x0000000010000000,
+ 0x0000000000001000, 0x0000000000001000, 0x0000000010001000, 0x0000000010001000,
+ 0x0000000040000000, 0x0000000040000000, 0x0000000050000000, 0x0000000050000000,
+ 0x0000000040001000, 0x0000000040001000, 0x0000000050001000, 0x0000000050001000, ),
+ ( 0x0000000000000000, 0x0000001000000000, 0x0000080000000000, 0x0000081000000000,
+ 0x1000000000000000, 0x1000001000000000, 0x1000080000000000, 0x1000081000000000,
+ 0x0004000000000000, 0x0004001000000000, 0x0004080000000000, 0x0004081000000000,
+ 0x1004000000000000, 0x1004001000000000, 0x1004080000000000, 0x1004081000000000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000040000, 0x0000000000040000,
+ 0x0000020000000000, 0x0000020000000000, 0x0000020000040000, 0x0000020000040000,
+ 0x0000000000000004, 0x0000000000000004, 0x0000000000040004, 0x0000000000040004,
+ 0x0000020000000004, 0x0000020000000004, 0x0000020000040004, 0x0000020000040004, ),
+ ( 0x0000000000000000, 0x0000400000000000, 0x0200000000000000, 0x0200400000000000,
+ 0x0080000000000000, 0x0080400000000000, 0x0280000000000000, 0x0280400000000000,
+ 0x0000008000000000, 0x0000408000000000, 0x0200008000000000, 0x0200408000000000,
+ 0x0080008000000000, 0x0080408000000000, 0x0280008000000000, 0x0280408000000000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000040, 0x0000000000000040,
+ 0x0000000020000000, 0x0000000020000000, 0x0000000020000040, 0x0000000020000040,
+ 0x0000000000200000, 0x0000000000200000, 0x0000000000200040, 0x0000000000200040,
+ 0x0000000020200000, 0x0000000020200000, 0x0000000020200040, 0x0000000020200040, ),
+ ( 0x0000000000000000, 0x0002000000000000, 0x0800000000000000, 0x0802000000000000,
+ 0x0100000000000000, 0x0102000000000000, 0x0900000000000000, 0x0902000000000000,
+ 0x4000000000000000, 0x4002000000000000, 0x4800000000000000, 0x4802000000000000,
+ 0x4100000000000000, 0x4102000000000000, 0x4900000000000000, 0x4902000000000000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000002000, 0x0000000000002000,
+ 0x0000000000000020, 0x0000000000000020, 0x0000000000002020, 0x0000000000002020,
+ 0x0000000000000400, 0x0000000000000400, 0x0000000000002400, 0x0000000000002400,
+ 0x0000000000000420, 0x0000000000000420, 0x0000000000002420, 0x0000000000002420, ),
+ ( 0x0000000000000000, 0x2000000000000000, 0x0000000400000000, 0x2000000400000000,
+ 0x0000800000000000, 0x2000800000000000, 0x0000800400000000, 0x2000800400000000,
+ 0x0008000000000000, 0x2008000000000000, 0x0008000400000000, 0x2008000400000000,
+ 0x0008800000000000, 0x2008800000000000, 0x0008800400000000, 0x2008800400000000, ),
+ )
+ #---------------------------------------------------
+ # Subsequent key schedule rotation permutations
+ # PC2ROT - PC2 inverse, then Rotate, then PC2
+ #---------------------------------------------------
+ PC2ROTA=(
+ ( 0x0000000000000000, 0x0000800000000000, 0x0800000000000000, 0x0800800000000000,
+ 0x0000004000000000, 0x0000804000000000, 0x0800004000000000, 0x0800804000000000,
+ 0x0000000400000000, 0x0000800400000000, 0x0800000400000000, 0x0800800400000000,
+ 0x0000004400000000, 0x0000804400000000, 0x0800004400000000, 0x0800804400000000, ),
+ ( 0x0000000000000000, 0x0080000000000000, 0x0000040000000000, 0x0080040000000000,
+ 0x0008000000000000, 0x0088000000000000, 0x0008040000000000, 0x0088040000000000,
+ 0x0000200000000000, 0x0080200000000000, 0x0000240000000000, 0x0080240000000000,
+ 0x0008200000000000, 0x0088200000000000, 0x0008240000000000, 0x0088240000000000, ),
+ ( 0x0000000000000000, 0x0040000000000000, 0x2000000000000000, 0x2040000000000000,
+ 0x0000008000000000, 0x0040008000000000, 0x2000008000000000, 0x2040008000000000,
+ 0x0000001000000000, 0x0040001000000000, 0x2000001000000000, 0x2040001000000000,
+ 0x0000009000000000, 0x0040009000000000, 0x2000009000000000, 0x2040009000000000, ),
+ ( 0x0000000000000000, 0x0400000000000000, 0x8000000000000000, 0x8400000000000000,
+ 0x0000002000000000, 0x0400002000000000, 0x8000002000000000, 0x8400002000000000,
+ 0x0100000000000000, 0x0500000000000000, 0x8100000000000000, 0x8500000000000000,
+ 0x0100002000000000, 0x0500002000000000, 0x8100002000000000, 0x8500002000000000, ),
+ ( 0x0000000000000000, 0x0000000000004000, 0x0000000020000000, 0x0000000020004000,
+ 0x0001000000000000, 0x0001000000004000, 0x0001000020000000, 0x0001000020004000,
+ 0x0200000000000000, 0x0200000000004000, 0x0200000020000000, 0x0200000020004000,
+ 0x0201000000000000, 0x0201000000004000, 0x0201000020000000, 0x0201000020004000, ),
+ ( 0x0000000000000000, 0x1000000000000000, 0x0004000000000000, 0x1004000000000000,
+ 0x0002000000000000, 0x1002000000000000, 0x0006000000000000, 0x1006000000000000,
+ 0x0000000800000000, 0x1000000800000000, 0x0004000800000000, 0x1004000800000000,
+ 0x0002000800000000, 0x1002000800000000, 0x0006000800000000, 0x1006000800000000, ),
+ ( 0x0000000000000000, 0x0000000000000008, 0x0000000000008000, 0x0000000000008008,
+ 0x0010000000000000, 0x0010000000000008, 0x0010000000008000, 0x0010000000008008,
+ 0x0020000000000000, 0x0020000000000008, 0x0020000000008000, 0x0020000000008008,
+ 0x0030000000000000, 0x0030000000000008, 0x0030000000008000, 0x0030000000008008, ),
+ ( 0x0000000000000000, 0x0000400000000000, 0x0000080000000000, 0x0000480000000000,
+ 0x0000100000000000, 0x0000500000000000, 0x0000180000000000, 0x0000580000000000,
+ 0x4000000000000000, 0x4000400000000000, 0x4000080000000000, 0x4000480000000000,
+ 0x4000100000000000, 0x4000500000000000, 0x4000180000000000, 0x4000580000000000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000,
+ 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000,
+ 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, 0x0000000000100000,
+ 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, ),
+ ( 0x0000000000000000, 0x0000000000040000, 0x0000000000000020, 0x0000000000040020,
+ 0x0000000000000004, 0x0000000000040004, 0x0000000000000024, 0x0000000000040024,
+ 0x0000000200000000, 0x0000000200040000, 0x0000000200000020, 0x0000000200040020,
+ 0x0000000200000004, 0x0000000200040004, 0x0000000200000024, 0x0000000200040024, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000,
+ 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, 0x0000000000000040,
+ 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000,
+ 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, ),
+ ( 0x0000000000000000, 0x0000000000000010, 0x0000000000000400, 0x0000000000000410,
+ 0x0000000000000080, 0x0000000000000090, 0x0000000000000480, 0x0000000000000490,
+ 0x0000000040000000, 0x0000000040000010, 0x0000000040000400, 0x0000000040000410,
+ 0x0000000040000080, 0x0000000040000090, 0x0000000040000480, 0x0000000040000490, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000,
+ 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, 0x0000000100000000,
+ 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, 0x0000000000800000,
+ 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, ),
+ ( 0x0000000000000000, 0x0000020000000000, 0x0000000080000000, 0x0000020080000000,
+ 0x0000000000400000, 0x0000020000400000, 0x0000000080400000, 0x0000020080400000,
+ 0x0000000008000000, 0x0000020008000000, 0x0000000088000000, 0x0000020088000000,
+ 0x0000000008400000, 0x0000020008400000, 0x0000000088400000, 0x0000020088400000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000,
+ 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, 0x0000000000200000,
+ 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, 0x0000000004000000,
+ 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, ),
+ ( 0x0000000000000000, 0x0000000000000800, 0x0000010000000000, 0x0000010000000800,
+ 0x0000000000002000, 0x0000000000002800, 0x0000010000002000, 0x0000010000002800,
+ 0x0000000010000000, 0x0000000010000800, 0x0000010010000000, 0x0000010010000800,
+ 0x0000000010002000, 0x0000000010002800, 0x0000010010002000, 0x0000010010002800, ),
+ )
+ PC2ROTB=(
+ ( 0x0000000000000000, 0x0000000800000000, 0x0000000400000000, 0x0000000c00000000,
+ 0x0000100000000000, 0x0000100800000000, 0x0000100400000000, 0x0000100c00000000,
+ 0x0010000000000000, 0x0010000800000000, 0x0010000400000000, 0x0010000c00000000,
+ 0x0010100000000000, 0x0010100800000000, 0x0010100400000000, 0x0010100c00000000, ),
+ ( 0x0000000000000000, 0x0100000000000000, 0x0001000000000000, 0x0101000000000000,
+ 0x0000001000000000, 0x0100001000000000, 0x0001001000000000, 0x0101001000000000,
+ 0x0004000000000000, 0x0104000000000000, 0x0005000000000000, 0x0105000000000000,
+ 0x0004001000000000, 0x0104001000000000, 0x0005001000000000, 0x0105001000000000, ),
+ ( 0x0000000000000000, 0x0000002000000000, 0x0000040000000000, 0x0000042000000000,
+ 0x4000000000000000, 0x4000002000000000, 0x4000040000000000, 0x4000042000000000,
+ 0x0000400000000000, 0x0000402000000000, 0x0000440000000000, 0x0000442000000000,
+ 0x4000400000000000, 0x4000402000000000, 0x4000440000000000, 0x4000442000000000, ),
+ ( 0x0000000000000000, 0x0000004000000000, 0x0000200000000000, 0x0000204000000000,
+ 0x0000080000000000, 0x0000084000000000, 0x0000280000000000, 0x0000284000000000,
+ 0x0000800000000000, 0x0000804000000000, 0x0000a00000000000, 0x0000a04000000000,
+ 0x0000880000000000, 0x0000884000000000, 0x0000a80000000000, 0x0000a84000000000, ),
+ ( 0x0000000000000000, 0x0000000000400000, 0x0000000000000020, 0x0000000000400020,
+ 0x0040000000000000, 0x0040000000400000, 0x0040000000000020, 0x0040000000400020,
+ 0x0800000000000000, 0x0800000000400000, 0x0800000000000020, 0x0800000000400020,
+ 0x0840000000000000, 0x0840000000400000, 0x0840000000000020, 0x0840000000400020, ),
+ ( 0x0000000000000000, 0x0080000000000000, 0x0000008000000000, 0x0080008000000000,
+ 0x2000000000000000, 0x2080000000000000, 0x2000008000000000, 0x2080008000000000,
+ 0x0020000000000000, 0x00a0000000000000, 0x0020008000000000, 0x00a0008000000000,
+ 0x2020000000000000, 0x20a0000000000000, 0x2020008000000000, 0x20a0008000000000, ),
+ ( 0x0000000000000000, 0x0000000004000000, 0x0000000008000000, 0x000000000c000000,
+ 0x0400000000000000, 0x0400000004000000, 0x0400000008000000, 0x040000000c000000,
+ 0x8000000000000000, 0x8000000004000000, 0x8000000008000000, 0x800000000c000000,
+ 0x8400000000000000, 0x8400000004000000, 0x8400000008000000, 0x840000000c000000, ),
+ ( 0x0000000000000000, 0x0002000000000000, 0x0200000000000000, 0x0202000000000000,
+ 0x1000000000000000, 0x1002000000000000, 0x1200000000000000, 0x1202000000000000,
+ 0x0008000000000000, 0x000a000000000000, 0x0208000000000000, 0x020a000000000000,
+ 0x1008000000000000, 0x100a000000000000, 0x1208000000000000, 0x120a000000000000, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000,
+ 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000,
+ 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, 0x0000000000000010,
+ 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, ),
+ ( 0x0000000000000000, 0x0000000000000040, 0x0000010000000000, 0x0000010000000040,
+ 0x0000000000200000, 0x0000000000200040, 0x0000010000200000, 0x0000010000200040,
+ 0x0000000000008000, 0x0000000000008040, 0x0000010000008000, 0x0000010000008040,
+ 0x0000000000208000, 0x0000000000208040, 0x0000010000208000, 0x0000010000208040, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000,
+ 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, 0x0000000000002000,
+ 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, 0x0000020000000000,
+ 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, ),
+ ( 0x0000000000000000, 0x0000000000000800, 0x0000000100000000, 0x0000000100000800,
+ 0x0000000010000000, 0x0000000010000800, 0x0000000110000000, 0x0000000110000800,
+ 0x0000000000000004, 0x0000000000000804, 0x0000000100000004, 0x0000000100000804,
+ 0x0000000010000004, 0x0000000010000804, 0x0000000110000004, 0x0000000110000804, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000,
+ 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, 0x0000000000000008,
+ 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, 0x0000000040000000,
+ 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, ),
+ ( 0x0000000000000000, 0x0000000020000000, 0x0000000200000000, 0x0000000220000000,
+ 0x0000000000000080, 0x0000000020000080, 0x0000000200000080, 0x0000000220000080,
+ 0x0000000000100000, 0x0000000020100000, 0x0000000200100000, 0x0000000220100000,
+ 0x0000000000100080, 0x0000000020100080, 0x0000000200100080, 0x0000000220100080, ),
+ ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000,
+ 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, 0x0000000000000400,
+ 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000,
+ 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, ),
+ ( 0x0000000000000000, 0x0000000000800000, 0x0000000000004000, 0x0000000000804000,
+ 0x0000000080000000, 0x0000000080800000, 0x0000000080004000, 0x0000000080804000,
+ 0x0000000000040000, 0x0000000000840000, 0x0000000000044000, 0x0000000000844000,
+ 0x0000000080040000, 0x0000000080840000, 0x0000000080044000, 0x0000000080844000, ),
+ )
+ #---------------------------------------------------
+ #PCXROT - PC1ROT, PC2ROTA, PC2ROTB listed in order
+ # of the PC1 rotation schedule, as used by des_setkey
+ #---------------------------------------------------
+ ##ROTATES = (1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1)
+ PCXROT = (
+ PC1ROT, PC2ROTA, PC2ROTB, PC2ROTB,
+ PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTB,
+ PC2ROTA, PC2ROTB, PC2ROTB, PC2ROTB,
+ PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTA,
+ )
+
+ #---------------------------------------------------
+ # Bit reverse, intial permupation, expantion
+ # Initial permutation/expansion table
+ #---------------------------------------------------
+ IE3264=(
+ ( 0x0000000000000000, 0x0000000000004004, 0x0400000000000040, 0x0400000000004044,
+ 0x0000000040040000, 0x0000000040044004, 0x0400000040040040, 0x0400000040044044,
+ 0x0000000000400400, 0x0000000000404404, 0x0400000000400440, 0x0400000000404444,
+ 0x0000000040440400, 0x0000000040444404, 0x0400000040440440, 0x0400000040444444, ),
+ ( 0x0000000000000000, 0x0000400400000000, 0x0000004004000000, 0x0000404404000000,
+ 0x4004000000000000, 0x4004400400000000, 0x4004004004000000, 0x4004404404000000,
+ 0x0040040000000000, 0x0040440400000000, 0x0040044004000000, 0x0040444404000000,
+ 0x4044040000000000, 0x4044440400000000, 0x4044044004000000, 0x4044444404000000, ),
+ ( 0x0000000000000000, 0x0000000000002000, 0x0000000000000020, 0x0000000000002020,
+ 0x0000000020000000, 0x0000000020002000, 0x0000000020000020, 0x0000000020002020,
+ 0x0000000000200000, 0x0000000000202000, 0x0000000000200020, 0x0000000000202020,
+ 0x0000000020200000, 0x0000000020202000, 0x0000000020200020, 0x0000000020202020, ),
+ ( 0x0000000000000000, 0x0000200000000000, 0x0000002000000000, 0x0000202000000000,
+ 0x2000000000000000, 0x2000200000000000, 0x2000002000000000, 0x2000202000000000,
+ 0x0020000000000000, 0x0020200000000000, 0x0020002000000000, 0x0020202000000000,
+ 0x2020000000000000, 0x2020200000000000, 0x2020002000000000, 0x2020202000000000, ),
+ ( 0x0000000000000000, 0x0000000000001000, 0x0000000000000010, 0x0000000000001010,
+ 0x0000000010000000, 0x0000000010001000, 0x0000000010000010, 0x0000000010001010,
+ 0x0000000000100000, 0x0000000000101000, 0x0000000000100010, 0x0000000000101010,
+ 0x0000000010100000, 0x0000000010101000, 0x0000000010100010, 0x0000000010101010, ),
+ ( 0x0000000000000000, 0x0000100000000000, 0x0000001000000000, 0x0000101000000000,
+ 0x1000000000000000, 0x1000100000000000, 0x1000001000000000, 0x1000101000000000,
+ 0x0010000000000000, 0x0010100000000000, 0x0010001000000000, 0x0010101000000000,
+ 0x1010000000000000, 0x1010100000000000, 0x1010001000000000, 0x1010101000000000, ),
+ ( 0x0000000000000000, 0x0000000000800800, 0x0000000000008008, 0x0000000000808808,
+ 0x0000008008000000, 0x0000008008800800, 0x0000008008008008, 0x0000008008808808,
+ 0x0000000080080000, 0x0000000080880800, 0x0000000080088008, 0x0000000080888808,
+ 0x0000008088080000, 0x0000008088880800, 0x0000008088088008, 0x0000008088888808, ),
+ ( 0x0000000000000000, 0x0080080000000000, 0x0000800800000000, 0x0080880800000000,
+ 0x0800000000000080, 0x0880080000000080, 0x0800800800000080, 0x0880880800000080,
+ 0x8008000000000000, 0x8088080000000000, 0x8008800800000000, 0x8088880800000000,
+ 0x8808000000000080, 0x8888080000000080, 0x8808800800000080, 0x8888880800000080, ),
+ )
+ #---------------------------------------------------
+ # Table that combines the S, P, and E operations.
+ #---------------------------------------------------
+ SPE=(
+ ( 0x0080088008200000, 0x0000008008000000, 0x0000000000200020, 0x0080088008200020,
+ 0x0000000000200000, 0x0080088008000020, 0x0000008008000020, 0x0000000000200020,
+ 0x0080088008000020, 0x0080088008200000, 0x0000008008200000, 0x0080080000000020,
+ 0x0080080000200020, 0x0000000000200000, 0x0000000000000000, 0x0000008008000020,
+ 0x0000008008000000, 0x0000000000000020, 0x0080080000200000, 0x0080088008000000,
+ 0x0080088008200020, 0x0000008008200000, 0x0080080000000020, 0x0080080000200000,
+ 0x0000000000000020, 0x0080080000000000, 0x0080088008000000, 0x0000008008200020,
+ 0x0080080000000000, 0x0080080000200020, 0x0000008008200020, 0x0000000000000000,
+ 0x0000000000000000, 0x0080088008200020, 0x0080080000200000, 0x0000008008000020,
+ 0x0080088008200000, 0x0000008008000000, 0x0080080000000020, 0x0080080000200000,
+ 0x0000008008200020, 0x0080080000000000, 0x0080088008000000, 0x0000000000200020,
+ 0x0080088008000020, 0x0000000000000020, 0x0000000000200020, 0x0000008008200000,
+ 0x0080088008200020, 0x0080088008000000, 0x0000008008200000, 0x0080080000200020,
+ 0x0000000000200000, 0x0080080000000020, 0x0000008008000020, 0x0000000000000000,
+ 0x0000008008000000, 0x0000000000200000, 0x0080080000200020, 0x0080088008200000,
+ 0x0000000000000020, 0x0000008008200020, 0x0080080000000000, 0x0080088008000020, ),
+ ( 0x1000800810004004, 0x0000000000000000, 0x0000800810000000, 0x0000000010004004,
+ 0x1000000000004004, 0x1000800800000000, 0x0000800800004004, 0x0000800810000000,
+ 0x0000800800000000, 0x1000000010004004, 0x1000000000000000, 0x0000800800004004,
+ 0x1000000010000000, 0x0000800810004004, 0x0000000010004004, 0x1000000000000000,
+ 0x0000000010000000, 0x1000800800004004, 0x1000000010004004, 0x0000800800000000,
+ 0x1000800810000000, 0x0000000000004004, 0x0000000000000000, 0x1000000010000000,
+ 0x1000800800004004, 0x1000800810000000, 0x0000800810004004, 0x1000000000004004,
+ 0x0000000000004004, 0x0000000010000000, 0x1000800800000000, 0x1000800810004004,
+ 0x1000000010000000, 0x0000800810004004, 0x0000800800004004, 0x1000800810000000,
+ 0x1000800810004004, 0x1000000010000000, 0x1000000000004004, 0x0000000000000000,
+ 0x0000000000004004, 0x1000800800000000, 0x0000000010000000, 0x1000000010004004,
+ 0x0000800800000000, 0x0000000000004004, 0x1000800810000000, 0x1000800800004004,
+ 0x0000800810004004, 0x0000800800000000, 0x0000000000000000, 0x1000000000004004,
+ 0x1000000000000000, 0x1000800810004004, 0x0000800810000000, 0x0000000010004004,
+ 0x1000000010004004, 0x0000000010000000, 0x1000800800000000, 0x0000800800004004,
+ 0x1000800800004004, 0x1000000000000000, 0x0000000010004004, 0x0000800810000000, ),
+ ( 0x0000000000400410, 0x0010004004400400, 0x0010000000000000, 0x0010000000400410,
+ 0x0000004004000010, 0x0000000000400400, 0x0010000000400410, 0x0010004004000000,
+ 0x0010000000400400, 0x0000004004000000, 0x0000004004400400, 0x0000000000000010,
+ 0x0010004004400410, 0x0010000000000010, 0x0000000000000010, 0x0000004004400410,
+ 0x0000000000000000, 0x0000004004000010, 0x0010004004400400, 0x0010000000000000,
+ 0x0010000000000010, 0x0010004004400410, 0x0000004004000000, 0x0000000000400410,
+ 0x0000004004400410, 0x0010000000400400, 0x0010004004000010, 0x0000004004400400,
+ 0x0010004004000000, 0x0000000000000000, 0x0000000000400400, 0x0010004004000010,
+ 0x0010004004400400, 0x0010000000000000, 0x0000000000000010, 0x0000004004000000,
+ 0x0010000000000010, 0x0000004004000010, 0x0000004004400400, 0x0010000000400410,
+ 0x0000000000000000, 0x0010004004400400, 0x0010004004000000, 0x0000004004400410,
+ 0x0000004004000010, 0x0000000000400400, 0x0010004004400410, 0x0000000000000010,
+ 0x0010004004000010, 0x0000000000400410, 0x0000000000400400, 0x0010004004400410,
+ 0x0000004004000000, 0x0010000000400400, 0x0010000000400410, 0x0010004004000000,
+ 0x0010000000400400, 0x0000000000000000, 0x0000004004400410, 0x0010000000000010,
+ 0x0000000000400410, 0x0010004004000010, 0x0010000000000000, 0x0000004004400400, ),
+ ( 0x0800100040040080, 0x0000100000001000, 0x0800000000000080, 0x0800100040041080,
+ 0x0000000000000000, 0x0000000040041000, 0x0800100000001080, 0x0800000040040080,
+ 0x0000100040041000, 0x0800000000001080, 0x0000000000001000, 0x0800100000000080,
+ 0x0800000000001080, 0x0800100040040080, 0x0000000040040000, 0x0000000000001000,
+ 0x0800000040041080, 0x0000100040040000, 0x0000100000000000, 0x0800000000000080,
+ 0x0000100040040000, 0x0800100000001080, 0x0000000040041000, 0x0000100000000000,
+ 0x0800100000000080, 0x0000000000000000, 0x0800000040040080, 0x0000100040041000,
+ 0x0000100000001000, 0x0800000040041080, 0x0800100040041080, 0x0000000040040000,
+ 0x0800000040041080, 0x0800100000000080, 0x0000000040040000, 0x0800000000001080,
+ 0x0000100040040000, 0x0000100000001000, 0x0800000000000080, 0x0000000040041000,
+ 0x0800100000001080, 0x0000000000000000, 0x0000100000000000, 0x0800000040040080,
+ 0x0000000000000000, 0x0800000040041080, 0x0000100040041000, 0x0000100000000000,
+ 0x0000000000001000, 0x0800100040041080, 0x0800100040040080, 0x0000000040040000,
+ 0x0800100040041080, 0x0800000000000080, 0x0000100000001000, 0x0800100040040080,
+ 0x0800000040040080, 0x0000100040040000, 0x0000000040041000, 0x0800100000001080,
+ 0x0800100000000080, 0x0000000000001000, 0x0800000000001080, 0x0000100040041000, ),
+ ( 0x0000000000800800, 0x0000001000000000, 0x0040040000000000, 0x2040041000800800,
+ 0x2000001000800800, 0x0040040000800800, 0x2040041000000000, 0x0000001000800800,
+ 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040041000000000,
+ 0x2040040000800800, 0x2000001000800800, 0x0040041000800800, 0x0000000000000000,
+ 0x0040041000000000, 0x0000000000800800, 0x2000001000000000, 0x2040040000000000,
+ 0x0040040000800800, 0x2040041000000000, 0x0000000000000000, 0x2000000000800800,
+ 0x2000000000000000, 0x2040040000800800, 0x2040041000800800, 0x2000001000000000,
+ 0x0000001000800800, 0x0040040000000000, 0x2040040000000000, 0x0040041000800800,
+ 0x0040041000800800, 0x2040040000800800, 0x2000001000000000, 0x0000001000800800,
+ 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040040000800800,
+ 0x0000000000800800, 0x0040041000000000, 0x2040041000800800, 0x0000000000000000,
+ 0x2040041000000000, 0x0000000000800800, 0x0040040000000000, 0x2000001000000000,
+ 0x2040040000800800, 0x0040040000000000, 0x0000000000000000, 0x2040041000800800,
+ 0x2000001000800800, 0x0040041000800800, 0x2040040000000000, 0x0000001000000000,
+ 0x0040041000000000, 0x2000001000800800, 0x0040040000800800, 0x2040040000000000,
+ 0x2000000000000000, 0x2040041000000000, 0x0000001000800800, 0x2000000000800800, ),
+ ( 0x4004000000008008, 0x4004000020000000, 0x0000000000000000, 0x0000200020008008,
+ 0x4004000020000000, 0x0000200000000000, 0x4004200000008008, 0x0000000020000000,
+ 0x4004200000000000, 0x4004200020008008, 0x0000200020000000, 0x0000000000008008,
+ 0x0000200000008008, 0x4004000000008008, 0x0000000020008008, 0x4004200020000000,
+ 0x0000000020000000, 0x4004200000008008, 0x4004000020008008, 0x0000000000000000,
+ 0x0000200000000000, 0x4004000000000000, 0x0000200020008008, 0x4004000020008008,
+ 0x4004200020008008, 0x0000000020008008, 0x0000000000008008, 0x4004200000000000,
+ 0x4004000000000000, 0x0000200020000000, 0x4004200020000000, 0x0000200000008008,
+ 0x4004200000000000, 0x0000000000008008, 0x0000200000008008, 0x4004200020000000,
+ 0x0000200020008008, 0x4004000020000000, 0x0000000000000000, 0x0000200000008008,
+ 0x0000000000008008, 0x0000200000000000, 0x4004000020008008, 0x0000000020000000,
+ 0x4004000020000000, 0x4004200020008008, 0x0000200020000000, 0x4004000000000000,
+ 0x4004200020008008, 0x0000200020000000, 0x0000000020000000, 0x4004200000008008,
+ 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, 0x0000000000000000,
+ 0x0000200000000000, 0x4004000000008008, 0x4004200000008008, 0x0000200020008008,
+ 0x0000000020008008, 0x4004200000000000, 0x4004000000000000, 0x4004000020008008, ),
+ ( 0x0000400400000000, 0x0020000000000000, 0x0020000000100000, 0x0400000000100040,
+ 0x0420400400100040, 0x0400400400000040, 0x0020400400000000, 0x0000000000000000,
+ 0x0000000000100000, 0x0420000000100040, 0x0420000000000040, 0x0000400400100000,
+ 0x0400000000000040, 0x0020400400100000, 0x0000400400100000, 0x0420000000000040,
+ 0x0420000000100040, 0x0000400400000000, 0x0400400400000040, 0x0420400400100040,
+ 0x0000000000000000, 0x0020000000100000, 0x0400000000100040, 0x0020400400000000,
+ 0x0400400400100040, 0x0420400400000040, 0x0020400400100000, 0x0400000000000040,
+ 0x0420400400000040, 0x0400400400100040, 0x0020000000000000, 0x0000000000100000,
+ 0x0420400400000040, 0x0000400400100000, 0x0400400400100040, 0x0420000000000040,
+ 0x0000400400000000, 0x0020000000000000, 0x0000000000100000, 0x0400400400100040,
+ 0x0420000000100040, 0x0420400400000040, 0x0020400400000000, 0x0000000000000000,
+ 0x0020000000000000, 0x0400000000100040, 0x0400000000000040, 0x0020000000100000,
+ 0x0000000000000000, 0x0420000000100040, 0x0020000000100000, 0x0020400400000000,
+ 0x0420000000000040, 0x0000400400000000, 0x0420400400100040, 0x0000000000100000,
+ 0x0020400400100000, 0x0400000000000040, 0x0400400400000040, 0x0420400400100040,
+ 0x0400000000100040, 0x0020400400100000, 0x0000400400100000, 0x0400400400000040, ),
+ ( 0x8008000080082000, 0x0000002080082000, 0x8008002000000000, 0x0000000000000000,
+ 0x0000002000002000, 0x8008000080080000, 0x0000000080082000, 0x8008002080082000,
+ 0x8008000000000000, 0x0000000000002000, 0x0000002080080000, 0x8008002000000000,
+ 0x8008002080080000, 0x8008002000002000, 0x8008000000002000, 0x0000000080082000,
+ 0x0000002000000000, 0x8008002080080000, 0x8008000080080000, 0x0000002000002000,
+ 0x8008002080082000, 0x8008000000002000, 0x0000000000000000, 0x0000002080080000,
+ 0x0000000000002000, 0x0000000080080000, 0x8008002000002000, 0x8008000080082000,
+ 0x0000000080080000, 0x0000002000000000, 0x0000002080082000, 0x8008000000000000,
+ 0x0000000080080000, 0x0000002000000000, 0x8008000000002000, 0x8008002080082000,
+ 0x8008002000000000, 0x0000000000002000, 0x0000000000000000, 0x0000002080080000,
+ 0x8008000080082000, 0x8008002000002000, 0x0000002000002000, 0x8008000080080000,
+ 0x0000002080082000, 0x8008000000000000, 0x8008000080080000, 0x0000002000002000,
+ 0x8008002080082000, 0x0000000080080000, 0x0000000080082000, 0x8008000000002000,
+ 0x0000002080080000, 0x8008002000000000, 0x8008002000002000, 0x0000000080082000,
+ 0x8008000000000000, 0x0000002080082000, 0x8008002080080000, 0x0000000000000000,
+ 0x0000000000002000, 0x8008000080082000, 0x0000002000000000, 0x8008002080080000, ),
+ )
+ #---------------------------------------------------
+ # compressed/interleaved => final permutation table
+ # Compression, final permutation, bit reverse
+ #---------------------------------------------------
+ CF6464=(
+ ( 0x0000000000000000, 0x0000000000000040, 0x0000000000004000, 0x0000000000004040,
+ 0x0000000000400000, 0x0000000000400040, 0x0000000000404000, 0x0000000000404040,
+ 0x0000000040000000, 0x0000000040000040, 0x0000000040004000, 0x0000000040004040,
+ 0x0000000040400000, 0x0000000040400040, 0x0000000040404000, 0x0000000040404040, ),
+ ( 0x0000000000000000, 0x0000000000000004, 0x0000000000000400, 0x0000000000000404,
+ 0x0000000000040000, 0x0000000000040004, 0x0000000000040400, 0x0000000000040404,
+ 0x0000000004000000, 0x0000000004000004, 0x0000000004000400, 0x0000000004000404,
+ 0x0000000004040000, 0x0000000004040004, 0x0000000004040400, 0x0000000004040404, ),
+ ( 0x0000000000000000, 0x0000004000000000, 0x0000400000000000, 0x0000404000000000,
+ 0x0040000000000000, 0x0040004000000000, 0x0040400000000000, 0x0040404000000000,
+ 0x4000000000000000, 0x4000004000000000, 0x4000400000000000, 0x4000404000000000,
+ 0x4040000000000000, 0x4040004000000000, 0x4040400000000000, 0x4040404000000000, ),
+ ( 0x0000000000000000, 0x0000000400000000, 0x0000040000000000, 0x0000040400000000,
+ 0x0004000000000000, 0x0004000400000000, 0x0004040000000000, 0x0004040400000000,
+ 0x0400000000000000, 0x0400000400000000, 0x0400040000000000, 0x0400040400000000,
+ 0x0404000000000000, 0x0404000400000000, 0x0404040000000000, 0x0404040400000000, ),
+ ( 0x0000000000000000, 0x0000000000000010, 0x0000000000001000, 0x0000000000001010,
+ 0x0000000000100000, 0x0000000000100010, 0x0000000000101000, 0x0000000000101010,
+ 0x0000000010000000, 0x0000000010000010, 0x0000000010001000, 0x0000000010001010,
+ 0x0000000010100000, 0x0000000010100010, 0x0000000010101000, 0x0000000010101010, ),
+ ( 0x0000000000000000, 0x0000000000000001, 0x0000000000000100, 0x0000000000000101,
+ 0x0000000000010000, 0x0000000000010001, 0x0000000000010100, 0x0000000000010101,
+ 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, 0x0000000001000101,
+ 0x0000000001010000, 0x0000000001010001, 0x0000000001010100, 0x0000000001010101, ),
+ ( 0x0000000000000000, 0x0000001000000000, 0x0000100000000000, 0x0000101000000000,
+ 0x0010000000000000, 0x0010001000000000, 0x0010100000000000, 0x0010101000000000,
+ 0x1000000000000000, 0x1000001000000000, 0x1000100000000000, 0x1000101000000000,
+ 0x1010000000000000, 0x1010001000000000, 0x1010100000000000, 0x1010101000000000, ),
+ ( 0x0000000000000000, 0x0000000100000000, 0x0000010000000000, 0x0000010100000000,
+ 0x0001000000000000, 0x0001000100000000, 0x0001010000000000, 0x0001010100000000,
+ 0x0100000000000000, 0x0100000100000000, 0x0100010000000000, 0x0100010100000000,
+ 0x0101000000000000, 0x0101000100000000, 0x0101010000000000, 0x0101010100000000, ),
+ ( 0x0000000000000000, 0x0000000000000080, 0x0000000000008000, 0x0000000000008080,
+ 0x0000000000800000, 0x0000000000800080, 0x0000000000808000, 0x0000000000808080,
+ 0x0000000080000000, 0x0000000080000080, 0x0000000080008000, 0x0000000080008080,
+ 0x0000000080800000, 0x0000000080800080, 0x0000000080808000, 0x0000000080808080, ),
+ ( 0x0000000000000000, 0x0000000000000008, 0x0000000000000800, 0x0000000000000808,
+ 0x0000000000080000, 0x0000000000080008, 0x0000000000080800, 0x0000000000080808,
+ 0x0000000008000000, 0x0000000008000008, 0x0000000008000800, 0x0000000008000808,
+ 0x0000000008080000, 0x0000000008080008, 0x0000000008080800, 0x0000000008080808, ),
+ ( 0x0000000000000000, 0x0000008000000000, 0x0000800000000000, 0x0000808000000000,
+ 0x0080000000000000, 0x0080008000000000, 0x0080800000000000, 0x0080808000000000,
+ 0x8000000000000000, 0x8000008000000000, 0x8000800000000000, 0x8000808000000000,
+ 0x8080000000000000, 0x8080008000000000, 0x8080800000000000, 0x8080808000000000, ),
+ ( 0x0000000000000000, 0x0000000800000000, 0x0000080000000000, 0x0000080800000000,
+ 0x0008000000000000, 0x0008000800000000, 0x0008080000000000, 0x0008080800000000,
+ 0x0800000000000000, 0x0800000800000000, 0x0800080000000000, 0x0800080800000000,
+ 0x0808000000000000, 0x0808000800000000, 0x0808080000000000, 0x0808080800000000, ),
+ ( 0x0000000000000000, 0x0000000000000020, 0x0000000000002000, 0x0000000000002020,
+ 0x0000000000200000, 0x0000000000200020, 0x0000000000202000, 0x0000000000202020,
+ 0x0000000020000000, 0x0000000020000020, 0x0000000020002000, 0x0000000020002020,
+ 0x0000000020200000, 0x0000000020200020, 0x0000000020202000, 0x0000000020202020, ),
+ ( 0x0000000000000000, 0x0000000000000002, 0x0000000000000200, 0x0000000000000202,
+ 0x0000000000020000, 0x0000000000020002, 0x0000000000020200, 0x0000000000020202,
+ 0x0000000002000000, 0x0000000002000002, 0x0000000002000200, 0x0000000002000202,
+ 0x0000000002020000, 0x0000000002020002, 0x0000000002020200, 0x0000000002020202, ),
+ ( 0x0000000000000000, 0x0000002000000000, 0x0000200000000000, 0x0000202000000000,
+ 0x0020000000000000, 0x0020002000000000, 0x0020200000000000, 0x0020202000000000,
+ 0x2000000000000000, 0x2000002000000000, 0x2000200000000000, 0x2000202000000000,
+ 0x2020000000000000, 0x2020002000000000, 0x2020200000000000, 0x2020202000000000, ),
+ ( 0x0000000000000000, 0x0000000200000000, 0x0000020000000000, 0x0000020200000000,
+ 0x0002000000000000, 0x0002000200000000, 0x0002020000000000, 0x0002020200000000,
+ 0x0200000000000000, 0x0200000200000000, 0x0200020000000000, 0x0200020200000000,
+ 0x0202000000000000, 0x0202000200000000, 0x0202020000000000, 0x0202020200000000, ),
+ )
+ #=========================================================
+ #eof load_data
+ #=========================================================
+
+#=========================================================
+#helpers
+#=========================================================
+
+
+def perm6464(c, p):
+ """Returns the permutation of the given 64-bit code with
+ the specified permutataion table."""
+ out = 0
+ for i in RR8:
+ out |= p[i<<1][c&0x0f] | p[(i<<1)+1][(c>>4) & 0x0f]
+ c >>= 8
+ return out
+
+def perm3264(c, p):
+ """Returns the permutation of the given 32-bit code with
+ the specified permutataion table."""
+ out = 0
+ for i in RR4:
+ out |= p[i<<1][c&0x0f] | p[(i<<1)+1][(c>>4)&0x0f]
+ c >>= 8
+ return out
+
+def des_setkey(keyword):
+ "Returns the key schedule for the given key."
+ def _gen(K):
+ for p in PCXROT:
+ K = perm6464(K, p)
+ yield K & ~0x0303030300000000
+ return list(_gen(keyword))
+
+def to_six_bit_int(num):
+ return (((num << 26) & 0xfc000000) | ((num << 12) & 0xfc0000) |
+ ((num >> 2) & 0xfc00) | ((num >> 16) & 0xfc))
+
+def des_cipher(input, salt, num_iter, KS):
+ """Returns the DES encrypted code of the given word with the specified environment."""
+ salt = to_six_bit_int(salt)
+ R = L = input
+ L &= 0x5555555555555555
+ R = (R & 0xaaaaaaaa00000000L) | ((R >> 1) & 0x0000000055555555L)
+ L = ((((L << 1) | (L << 32)) & 0xffffffff00000000L) |
+ ((R | (R >> 32)) & 0x00000000ffffffffL))
+
+ L = perm3264((L>>32), IE3264)
+ R = perm3264((L&0xffffffff), IE3264)
+
+ #run specified number of passed
+ while num_iter >= 1:
+ num_iter -= 1
+ #run over each part of the schedule
+ for loop_count in R8:
+ kp = KS[(loop_count<<1)]
+ k = ((R>>32) ^ R) & salt & 0xffffffff
+ k |= (k<<32)
+ B = (k ^ R ^ kp)
+
+ L ^= (SPE[0][(B>>58)&0x3f] ^ SPE[1][(B>>50)&0x3f] ^
+ SPE[2][(B>>42)&0x3f] ^ SPE[3][(B>>34)&0x3f] ^
+ SPE[4][(B>>26)&0x3f] ^ SPE[5][(B>>18)&0x3f] ^
+ SPE[6][(B>>10)&0x3f] ^ SPE[7][(B>>2)&0x3f])
+
+ kp = KS[(loop_count<<1)+1]
+ k = ((L>>32) ^ L) & salt & 0xffffffff
+ k |= (k<<32)
+ B = (k ^ L ^ kp)
+
+ R ^= (SPE[0][(B>>58)&0x3f] ^ SPE[1][(B>>50)&0x3f] ^
+ SPE[2][(B>>42)&0x3f] ^ SPE[3][(B>>34)&0x3f] ^
+ SPE[4][(B>>26)&0x3f] ^ SPE[5][(B>>18)&0x3f] ^
+ SPE[6][(B>>10)&0x3f] ^ SPE[7][(B>>2)&0x3f])
+
+ # swap L and R
+ L, R = R, L
+
+ L = ((((L>>35) & 0x0f0f0f0fL) | (((L&0xffffffff)<<1) & 0xf0f0f0f0L))<<32 |
+ (((R>>35) & 0x0f0f0f0fL) | (((R&0xffffffff)<<1) & 0xf0f0f0f0L)))
+
+ L = perm6464(L, CF6464)
+
+ return L
+
+#=========================================================
+#frontend
+#=========================================================
+
+def crypt(key, salt):
+ "encrypt string using unix-crypt (des) algorithm"
+ if PCXROT is None:
+ load_tables()
+
+ if '\x00' in key:
+ #builtin linux crypt doesn't like this,
+ #so we don't either
+ raise ValueError, "key must not contain null characters"
+
+ #convert key string into an integer
+ if len(key) < 8:
+ key = key + '\x00' * 8
+ key_value = 0
+ for c in key[:8]:
+ key_value <<= 8
+ key_value |= 2 * ord(c)
+
+ #convert key int -> key schedule
+ key_sched = des_setkey(key_value)
+
+ #parse salt into int
+ if not salt:
+ sa = sb = '.' #no std behavior in this case
+ elif len(salt) < 2:
+ sa = sb = salt #linux's behavior, probably no real standard
+ else:
+ sa, sb = salt[0:2]
+ salt_value = (b64_decode(sb) << 6) + b64_decode(sa)
+ #FIXME: ^ this will throw a KeyError if bad salt chars are used
+ # whereas linux crypt does something with it
+
+ #run data through des using input of 0
+ result = des_cipher(0, salt_value, 25, key_sched)
+
+ #run b64 encode on result
+ out = [sa, sb] + [None] * 10 + [ b64_encode((result<<2)&0x3f) ]
+ result >>= 4
+ for i in RR12_1:
+ out[i] = b64_encode(result&0x3f)
+ result >>= 6
+ return "".join(out)
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/security/policy.py b/bps/security/policy.py
new file mode 100644
index 0000000..bec57f2
--- /dev/null
+++ b/bps/security/policy.py
@@ -0,0 +1,1405 @@
+"""bps.security.policy -- simple framework for defining internal application security policies.
+
+.. todo::
+
+ * finish documentation for this module
+
+ * go through whole thing and mark out the funcs
+ which accept a (comma-separated) string in place of a list of roles.
+ make handling of that uniform and add tests
+ (maybe via norm_role_seq() helper)
+
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from sys import version_info as pyver
+from collections import deque
+import inspect
+#pkg
+from bps import *
+from bps.text import split_condense, condense
+from bps.meta import isstr, isseq
+from bps.basic import intersects
+#local
+__all__ = [
+ 'Policy',
+## 'PERM',
+]
+
+#=========================================================
+#constants
+#=========================================================
+class PERM:
+ """class holding permission-response constants.
+
+ This class is used to contain the constants
+ which are used by :meth:`Permission.check` and the :class:`Permission`
+ class' ``guard`` function to signal it's response to a given permission question:
+
+ .. attribute:: ALLOW
+
+ Indicates permission is explicitly *allowed*.
+
+ .. attribute:: DENY
+
+ Indicates permission is explicitly *denied*.
+
+ .. attribute:: PENDING
+
+ Indicates permission is neither granted *or* denied,
+ and that the decision is still pending, and should
+ be decided by the other permission instances in the policy.
+ """
+ ALLOW = "allow"
+ DENY = "deny"
+ PENDING = "pending"
+
+ values = (ALLOW, DENY, PENDING)
+
+#=========================================================
+#role class - used internally by Policy
+#=========================================================
+class Role(BaseClass):
+ """Defines a single role within a Policy.
+
+ Instances of this class have the following attributes (passed in via constructor):
+
+ :type name: str
+ :arg name:
+ The name of the role.
+
+ :type inherits: seq(str)|None
+ :param inherits:
+ Specifies the names of other roles which this should "inherit"
+ the permissions of. this allows roles to be nested hierarchically.
+
+ :type desc: str|None
+ :param desc:
+ Optional string describing role for user-display purposes.
+ Defaults to empty string.
+
+ :type title: str|None
+ :param title:
+ Optional string to identify role for user-display purposes,
+ defaults to capitalized version of ``name``.
+
+ :type grantable: bool
+ :param grantable:
+ This is a flag used to differentiate between
+ roles that can be granted to the user (default, ``True``),
+ and roles that the user cannot be granted,
+ such roles can only be used to inherit from.
+
+ This is more a matter of policy enforcement for your application,
+ if you don't wish to use this feature, leave it alone,
+ and the default will cause it to never activate.
+
+ .. note::
+
+ Only name should be specified as positional arguments.
+ (previous inherits & desc were, but rescinding that for now).
+ """
+ #=========================================================
+ #class attrs
+ #=========================================================
+
+ #=========================================================
+ #instance attrs
+ #=========================================================
+## policy = weakref_property("_policy")
+
+ name = None #name of role used for matching
+ title = None #human readable version of name
+ desc = None #optional human readable description of role
+ inherits = None #list of other roles which this inherits the permissions of
+ grantable = True
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, name, inherits=None, desc=None, title=None, grantable=None):
+ if not name:
+ raise ValueError, "Role name must be specified"
+ self.name = name
+ self.title = title or self.name.capitalize()
+ self.desc = desc
+ if grantable is not None:
+ self.grantable = grantable
+ if inherits is None:
+ self.inherits = frozenset()
+ elif isstr(inherits):
+ self.inherits = frozenset(split_condense(inherits))
+ else:
+ self.inherits = frozenset(inherits)
+
+ #=========================================================
+ #helpers
+ #=========================================================
+ def __repr__(self):
+ return '<Role 0x%x %r>'%(id(self), self.name)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#permission class - used internally by Policy
+#=========================================================
+class Permission(BaseClass):
+ """Defines a single permission with a policy.
+
+ Instances of this class have the following attributes (passed in via constructor):
+
+ :type action: str
+ :arg action:
+ the action string which this permission must match
+
+ :type klass: str|None|False|True
+ :arg klass:
+ The name of the class which the action is performed on.
+ ``False`` means match only if klass is ``None``.
+ ``None`` (the default) means match ignoring the value of klass.
+ ``True`` means match only if klass is NOT ``None``.
+ Any other string means match only if klass matches the string.
+
+ :type attrs: seq(str)|None
+ :arg attrs:
+ If ``None`` (the default), matches any and all attributes.
+ If specified, matches if the attribute specified is within the list,
+ or if no attribute is specified.
+
+ :type guard: callable|None
+ :arg guard:
+ Optional callable function which controls whether permission
+ will be granted. If permission does not match all the above
+ parameters, guard will not be called. If permission does match,
+ and guard is specified, it will be invoked with the following keywords:
+ "user", "action", "klass", "item", "attr", "target".
+
+ Inspection will be used to determine which keywords the guard accepts,
+ and only those which are both defined and accepted by the guard function
+ will be passed in.
+
+ * The action is permitted if the guard returns ``True``
+ * Checking will continue with other permission objects if the guard
+ returns ``False``.
+ * The action will be explictly denied (no further checks made)
+ if the guard returns the special singleton :data:`PERM_DENIED`.
+
+ :type desc: str|None
+ :param desc:
+ optional string describing what this permits in human-readable terms;
+ eg, for display to the user.
+ if not specified, the docstring from the guard function will be used,
+ if available.
+
+ :type deny: bool
+ :param deny:
+ If this is set to ``True``, when the permission matches,
+ it will deny access rather than allow it.
+ (Thus, if the guard returns ``True``, access is denied).
+
+ :type priority: int
+ :param priority:
+ Optionally, you can specify a priority for the permission.
+ Permissions with a higher priority will match before
+ permissions with a lower priority.
+ This is useful to issue generic permission patterns,
+ and then override specific sub-matches with a higher
+ priority ``deny=True`` permission.
+ The default priority is 0.
+
+ See :ref:`permission-question` for an overview of how
+ these attributes should be used to represent
+ real-world permission issues.
+
+ Instances of this class should not be created directly,
+ but via :meth:`Policy.permit`.
+
+ The policy class defines only one method of note:
+
+ .. automethod:: check
+ """
+ #=========================================================
+ #class attrs
+ #=========================================================
+ #list of all possible kwds that could be sent to guard func
+ all_guard_kwds = ("user", "action", "klass", "attr", "item", "scope", "perm")
+
+ #=========================================================
+ # instance attrs
+ #=========================================================
+## policy = weakref_property("_policy")
+ action = None #name of action this matches
+ desc = None #human readable text describing permission
+ klass = None #name of klass this matches, if False it matches NO class, if None if matches all
+ attrs = None #list of klass' attrs this matches, False if matches no property, or None if matches all
+ guard = None #optional guard func which is checked if all other parts match
+ guard_kwds = None #subset of all_guard_kwds which guard func actually accepts
+ deny = False #if true, permission will DENY rather than ALLOW
+ priority = 0
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, action, klass=None,
+ attrs=None, guard=None, desc=None, deny=False,
+ priority=0):
+
+ #store actions
+ self.action = action
+ self.desc = desc or (guard and guard.__doc__) or None
+ ##if klass and hasattr(klass, "__name__"):
+ ## klass = klass.__name__
+ self.klass = klass
+ if attrs is not None:
+ if not attrs:
+ self.attrs = frozenset([None])
+ else:
+ self.attrs = frozenset(attrs)
+ self.guard = guard
+ self.deny = deny
+ self.priority = priority
+
+ #build list of kwds which guard accepts
+ if guard:
+ fmt = inspect.getargspec(guard)
+ if fmt[2]: #accepts **kwds
+ self.guard_kwds = frozenset(self.all_guard_kwds)
+ else:
+ self.guard_kwds = frozenset(
+ key for key in self.all_guard_kwds
+ if key in fmt[0]
+ )
+
+ #=========================================================
+ #main interface
+ #=========================================================
+ def check(self, user, action,
+ klass=None, item=None, attr=None,
+ scope=None, _enable_guard=True):
+ """check if user has permission to perform the specified action.
+
+ :arg user:
+ [required]
+ The user object to check against.
+ This is not used inside Permission for anything, but is passed
+ on to the guard function (if present).
+
+ :arg action:
+ [required]
+ The name of action to check if user can perform.
+ This should be a string matching the name of a registered permission.
+
+ :arg klass:
+ Optionally, the name of the class which the action is being performed on.
+ This will usually need to be specified, but may not in some cases
+ where the action being performed deals more with global state.
+
+ :param item:
+ Optionally, the exact instance of klass which action will be performed on.
+ If not specified, permission will usually want to return ``True``
+ if there exists at least 1 item for which the permission is true.
+
+ :param attr:
+ Optionally, the name of the attribute w/in the class
+ which the action is being performed on.
+ If not specified, it should be assumed action will be performed
+ on any/all attributes of the object.
+
+ :param scope:
+ Optionally, in rare cases, the action requires a second object
+ which represents the scope under which
+ that it's acting upon, using the first, in the manner of a direct object.
+ This kwd should be used in that case.
+ For example, ``dict(action='Grant', klass='Role', item=role, scope=another_user)``
+ describes the user attempting to grant a role to a specified user object.
+
+ There is no requirement within the policy code as to the types
+ used in "user", "klass", "attr", "item", or "scope",
+ this is left up to the application. The suggested use is that
+ "user", "item", "scope" should be objects,
+ and "klass", "attr" should be strings.
+
+ .. note::
+
+ Only "user", "action", and "klass", "item" should ever be provided as positional arguments to this method.
+ """
+ #haven't decided the policy for these border cases,
+ #so they're currently forbidden..
+ if attr is False or attr == "":
+ raise ValueError, "invalid attr: %r" % (attr,)
+ if klass is False or klass == "":
+ raise ValueError, "invalid klass: %r" % (klass,)
+ #XXX: what if guard wants to accept custom kwds? would be issue for all other rules that don't match
+
+ if action != self.action:
+ return PERM.PENDING
+ if self.klass is not None:
+ if self.klass is False:
+ if klass:
+ return PERM.PENDING
+ elif self.klass is True:
+ if not klass:
+ return PERM.PENDING
+ elif self.klass != klass:
+ return PERM.PENDING
+ if self.attrs is not None and attr not in self.attrs:
+ return PERM.PENDING
+ if self.guard and _enable_guard:
+ opts = dict()
+ #TODO: could build a wrapper func inside __init__ time which takes care of this
+ guard_kwds = self.guard_kwds
+ if 'user' in guard_kwds:
+ opts['user'] = user
+ if 'action' in guard_kwds:
+ opts['action'] = action
+ if 'klass' in guard_kwds:
+ opts['klass'] = klass
+ if 'attr' in guard_kwds:
+ opts['attr'] = attr
+ if 'item' in guard_kwds:
+ opts['item'] = item
+ if 'scope' in guard_kwds:
+ opts['scope'] = scope
+ if 'perm' in guard_kwds:
+ opts['perm'] = self
+ #NOTE: not officially documented yet
+ #should return one of True, False, or AccessDenied
+ ##value = self.guard(**opts)
+ ##if value in PERM.values:
+ ## return value
+ ##if not value:
+ ## return PERM.PENDING
+ if not self.guard(**opts):
+ return PERM.PENDING
+ if self.deny:
+ return PERM.DENY
+ else:
+ return PERM.ALLOW
+
+ def could_allow(self, action, klass=None, item=None, attr=None, scope=None):
+ "check if permission could potentially be allowed"
+ #NOTE: the fake-user & _enable_guard features are internal only, and subject to change.
+ # use this function instead!
+ result = self.check("fake-user", action, klass, item, attr, scope, _enable_guard=False)
+ return (result == PERM.ALLOW)
+
+ #=========================================================
+ #helpers
+ #=========================================================
+ def __repr__(self):
+ out = "Permission("
+ if self.action:
+ out += "action=%r, " % (self.action,)
+ if self.klass is not None:
+ out += "klass=%r, " % (self.klass,)
+ if self.attrs:
+ out += "attrs=%r, " % (list(self.attrs),)
+ if self.guard:
+ out += "guard=%r, " % (self.guard,)
+ if self.deny:
+ out += "deny=True, "
+ if out[-1] != "(":
+ out = out[:-2]
+ return out + ")"
+
+ def __eq__(self, other):
+ if not hasattr(other, "check"):
+ return False #not a perm object
+ if self.action != other.action:
+ return False
+ if self.klass != other.klass:
+ return False
+ if self.attrs != other.attrs:
+ return False
+ if self.guard != other.guard:
+ return False
+ if self.deny != other.deny:
+ return False
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#link - used by policy to track relationship of perms & roles
+#=========================================================
+class Link(BaseClass):
+ """Defines link between permission objects and role objects."""
+ perm_objs = None #list of permissions involved
+ base_roles = None #set of roles explicitly linked to permissions
+## expanded_roles = None #set of roles which have permission (including inherited ones)
+
+ def __init__(self, perm_objs, base_roles): ##, expanded_roles):
+ self.perm_objs = perm_objs
+ self.base_roles = base_roles
+## self.expanded_roles = expanded_roles
+
+#=========================================================
+#policy - base class for managing security
+#=========================================================
+class Policy(BaseClass):
+ """This provided handling of security for a given context.
+
+ Instance of this class contain a list of roles and permission mappings,
+ and should be used as a frontend for managing permissions & roles
+ for a given application.
+
+ Main Methods
+ ============
+ The following are the main methods users of this class will need,
+ listing roughly in the order they will need them:
+
+ * :meth:`create_role` to add new roles to the policy
+ * :meth:`permit` and :meth:`permit_list` to add new
+ permissions and link them to previously created roles.
+ * :meth:`freeze` to lock out changes, valid and prepare the policy for use.
+ * :meth:`user_has_permission` to check if the user has
+ permission to perform a given action.
+
+ All other methods exposed by this class exist either
+ to support the internal operation of the framework or to allow
+ introspection of the existing policy by the application.
+
+ Role Management
+ ===============
+ The following methods can be used to examine and alter
+ the roles which the policy allows.
+
+ .. automethod:: create_role
+
+ .. automethod:: get_roles
+ .. automethod:: has_role
+
+ .. automethod:: get_user_roles
+ .. automethod:: user_has_role
+ .. automethod:: user_has_any_role
+
+ .. automethod:: get_role_obj
+ .. automethod:: get_role_objs
+
+ Role Helpers
+ ============
+ The following methods are useful for manipulating
+ sets of roles for various purposes:
+
+ .. automethod:: expand_roles
+ .. automethod:: collapse_roles
+ .. automethod:: ascend_roles
+ .. automethod:: descend_roles
+ .. automethod:: ensure_valid_roles
+
+ Permission Creation
+ ===================
+ The following methods can be used to create
+ permission rules, and link them to roles:
+
+ .. automethod:: permit
+ .. automethod:: permit_list
+
+ The following methods are used internally
+ by permit and permit_list, and generally
+ are not needed by external code:
+
+ .. automethod:: create_permission
+ .. automethod:: create_link
+
+ Permission Examination
+ ======================
+ .. automethod:: user_has_permission
+
+ The following methods are useful when examining
+ the permissions, such as for displaying the permitted
+ actions in a gui:
+
+ .. automethod:: get_user_permissions
+ .. automethod:: get_role_permissions
+ .. automethod:: get_linked_roles
+
+ Application Interface
+ =====================
+ Applications should overide the following methods
+ either a kwd to the Policy class constructor,
+ or by overriding the method via a subclass.
+
+ .. automethod:: inspect_user_roles
+
+ Policy Compilation
+ ==================
+ The following methods are related to locking
+ out changes to the policy:
+
+ .. automethod:: freeze
+ .. attribute:: frozen
+
+ This boolean attribute indicates whether
+ the policy has been frozen or not.
+
+ .. automethod:: ensure_frozen
+ .. automethod:: ensure_thawed
+
+ .. todo::
+ flag to enable case-insensitive handling of role & klass names.
+ """
+ #=========================================================
+ #class attrs
+ #=========================================================
+
+ #code should use these attrs when referencing Role or Permission classes,
+ #allowing applications to override them by subclassing Policy.
+ Role = Role
+ Permission = Permission
+ Link = Link
+
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ _priority_in_use = False #set if any perm has non-zero priority
+ _roles = None #dict mapping name of role -> Role object
+ _links = None #list of link objects binding perms <-> roles
+ _roleset_cache = None #cache of roleset -> perms derived from _links
+ frozen = False #flag used to indicate policy has been frozen
+
+## rolesep = "," #separator used when parsing roles from string
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, inspect_user_roles=None):
+ self._roles = {}
+ self._links = []
+
+ #override user access options
+ if inspect_user_roles:
+ self.inspect_user_roles = inspect_user_roles
+
+ def freeze(self):
+ """Compiles and validates security policy.
+
+ This validates and initializes internal caches,
+ and should be called after all roles and permissions
+ have been added, but before policy is actually used.
+
+ Once called, no more changes can be made to the policy.
+ """
+ self.ensure_thawed()
+ self._freeze_links()
+ self.frozen = True
+
+ ##def thaw(self):
+ ## self.ensure_frozen()
+ ## self.frozen = False
+
+ def ensure_frozen(self):
+ "helper for methods that require policy is frozen"
+ if not self.frozen:
+ raise AssertionError, "this method can only be called AFTER the policy is frozen"
+ return True
+
+ def ensure_thawed(self):
+ "helper for methods that require policy is not frozen"
+ if self.frozen:
+ raise AssertionError, "this method can only be called BEFORE the policy is frozen"
+ return True
+
+ #=========================================================
+ #application interface
+ # all of these methods should be implemented by
+ # the application using the Policy object,
+ # with code specific to the application.
+ #=========================================================
+ #TODO: offer ability to translate class names -> classes,
+ # if defined, should act w/cache via weakref from inside Perm.check()
+
+ def inspect_user_roles(self, user):
+ """return names of all roles granted to a given user.
+
+ The default implementation attempts to return
+ the value of the ``roles`` attribute of any provided user object;
+ thus this function should probably be overridden with an implementation
+ which understands your application's user account system.
+ You may override it by provided a replacement via
+ ``Policy(inspect_user_roles=my_user_roles_func)``, or by subclassing Policy.
+
+ .. note::
+ Your implementation should *NOT* return roles which the user
+ implicitly inherited from other roles, it should only return
+ the roles they were explicitly granted.
+ """
+ return user.roles
+
+ #=========================================================
+ #role management
+ #=========================================================
+
+ #-------------------------------------------
+ #role objects
+ #-------------------------------------------
+ def create_role(self, name, *args, **kwds):
+ """create a new role and attach to Policy.
+
+ ``name`` and all kwds are passed direclty to :class:`Role`;
+ see it for a list of parameters.
+
+ .. note::
+ Policy enforces the restriction that any roles
+ inherited by this one *must* be defined already.
+ """
+ self.ensure_thawed()
+ role = self.Role(name, *args, **kwds)
+ self._validate_role_obj(role)
+## role.policy = self
+ self._roles[role.name] = role
+ return role
+
+ def _validate_role_obj(self, role):
+ "validates role's inheritance chain before adding it"
+ if self.has_role(role.name):
+ raise KeyError, "A role with that name already exists: %r" % (role.name,)
+ if role.inherits:
+ if role.name in role.inherits:
+ raise ValueError, "Role %r cannot inherit from itself" % (role.name,)
+ self.ensure_valid_roles(role.inherits)
+
+ def get_role_obj(self, role, default=Undef):
+ "return :class:`Role` instance tied to name"
+ if default is Undef:
+ return self._roles[role]
+ else:
+ return self._roles.get(role,default)
+
+ def get_role_objs(self, roles=None, grantable=None, rtype=set):
+ "return all :class:`Role` instances in policy"
+ if roles is None:
+ result = self._roles.itervalues()
+ else:
+ result = (self.get_role_obj(r) for r in roles)
+ if grantable is not None:
+ result = (
+ r
+ for r in result
+ if r.grantable == grantable
+ )
+ if rtype is iter:
+ return result
+ else:
+ return rtype(result)
+
+ #-------------------------------------------
+ #role examination
+ #-------------------------------------------
+ def get_roles(self, grantable=None, rtype=set):
+ """return all roles in policy, or for specified user.
+
+ :param grantable:
+ If ``True``, only grantable roles will be returned.
+ If ``False``, only non-grantable roles will be returned.
+
+ :param rtype:
+ Alternate class to use for return type (eg: list, tuple),
+ defaults to ``set``.
+ """
+ result = (
+ r.name
+ for r in self.get_role_objs(
+ grantable=grantable, rtype=iter)
+ )
+ if rtype is iter:
+ return result
+ else:
+ return rtype(result)
+
+ def has_role(self, role, grantable=None, inherits=None):
+ """check if role is defined in policy."""
+ robj = self.get_role_obj(role,None)
+ if robj is None:
+ return False
+ if grantable is not None and robj.grantable != grantable:
+ return False
+ if inherits and not self._inherits_from_role(role, inherits):
+ return False
+ return True
+
+ def _inherits_from_role(self, role, parents):
+ "check if role inherits from parent"
+ if isstr(parents):
+ parents = set([parents])
+ get = self._roles.__getitem__
+ stack = deque([role])
+ while stack:
+ role_obj = get(stack.pop())
+ if role_obj.inherits:
+ if intersects(role_obj.inherits, parents):
+ return True
+ stack.extend(role_obj.inherits)
+ return False
+
+ #-------------------------------------------
+ #user role queries
+ #-------------------------------------------
+ def get_user_roles(self, user, inherited=True, rtype=set):
+ """returns set of roles granted to user.
+
+ :param user: user to return roles for
+
+ :param inherited:
+ * ``True`` (the default): returns roles user was granted or inherited.
+ * ``False``: returns only roles user was granted.
+
+ :param rtype:
+ This specifies an alternative return type,
+ defaults to ``set``.
+
+ :returns:
+ set of roles granted to use
+ (this is mainly a validating wrapper for :meth:`inspect_user_roles`).
+ """
+ roles = self.inspect_user_roles(user)
+ self.ensure_valid_roles(roles, grantable=True)
+ if inherited:
+ return self.expand_roles(roles, rtype=rtype)
+ elif hasattr(rtype, "__bases__") and isinstance(roles, rtype):
+ return roles
+ else:
+ return rtype(roles)
+
+ def user_has_role(self, user, role, inherited=True):
+ """check if user has specified role.
+
+ :arg user: the user to check against
+
+ :type role: str
+ :arg role: The role to check.
+
+ :param inherited:
+ * ``True`` (the default): consider roles user was granted or inherited.
+ * ``False``: only consider roles user was granted.
+
+ :returns:
+ ``True`` if user had role, else ``False``.
+ """
+ assert self.ensure_valid_role(role)
+ return role in self.get_user_roles(user, inherited=inherited)
+
+ def user_has_any_role(self, user, roles, inherited=True):
+ """check if user has *any* of the specified roles.
+
+ :arg user: the user to check against
+
+ :type roles: seq(str)
+ :arg roles: the set of roles to check
+
+ :param inherited:
+ * ``True`` (the default): consider roles user was granted or inherited.
+ * ``False``: only consider roles user was granted.
+
+ :returns:
+ ``True`` if any of the roles were held by user.
+ ``False`` if none of them were.
+ """
+ assert self.ensure_valid_roles(roles)
+ user_roles = self.get_user_roles(user, inherited=inherited)
+ return intersects(user_roles, roles)
+
+ #-------------------------------------------
+ #role list filtering
+ #-------------------------------------------
+ def expand_roles(self, roles, rtype=set):
+ """given list of roles, expand list to include inherited roles.
+
+ :type roles: seq(str)
+ :arg roles: sequence of role names
+
+ :param rtype:
+ Alternate class to use for return type (eg: list, tuple),
+ defaults to ``set``.
+
+ :returns:
+ expanded set of role names which included all inherited roles.
+ """
+ #NOTE: expand_roles() is equivalent to descend_roles(keep=True),
+ # but is slightly more efficient
+ ##return self.descend_roles(roles, keep=True, rtype=rtype)
+ target = set()
+ add = target.add
+ get = self._roles.__getitem__
+ stack = deque(roles)
+ while stack:
+ name = stack.pop()
+ if name not in target:
+ add(name)
+ role = get(name)
+ if role.inherits:
+ stack.extend(role.inherits)
+ if rtype is set:
+ return target
+ else:
+ return rtype(target)
+
+ def collapse_roles(self, roles, rtype=set):
+ "inverse of expand_roles: removes any roles redundantly inherited from list"
+ out = set(roles)
+ inh = self.descend_roles(out)
+ out.difference_update(inh)
+ if rtype is set:
+ return out
+ else:
+ return rtype(out)
+
+ def ascend_roles(self, roles, keep=False, rtype=set):
+ """return all roles which inherit from input set of roles"""
+ #FIXME: this algorithm is very inefficient,
+ # and could be rewriten to use something besides brute force
+ #TODO: one improvment would be compiliation role_obj.inherited_by tree
+ # once frozen (or just flushing cache when role altered)
+ if keep:
+ target = set(roles)
+ else:
+ target = set()
+ stack = deque(roles)
+ while stack:
+ role = stack.pop()
+ for role_obj in self.get_role_objs(rtype=iter):
+ if role in role_obj.inherits:
+ target.add(role_obj.name)
+ stack.append(role_obj.name)
+ if rtype is set:
+ return target
+ else:
+ return rtype(target)
+
+ def descend_roles(self, roles, keep=False, rtype=set):
+ """return all roles which are inherited by input set of roles"""
+ if keep:
+ target = set(roles)
+ else:
+ target = set()
+ get = self._roles.__getitem__
+ stack = deque(roles)
+ while stack:
+ role = get(stack.pop())
+ if role.inherits:
+ target.update(role.inherits)
+ stack.extend(role.inherits)
+ if rtype is set:
+ return target
+ else:
+ return rtype(target)
+
+ #-------------------------------------------
+ #role list validation
+ #-------------------------------------------
+ def ensure_valid_roles(self, roles, grantable=None):
+ """validates that all provided roles exist, raising ValueError if they don't.
+
+ :type roles: seq(str)
+ :arg roles: roles to check
+
+ :type grantable: bool|None
+ :param grantable:
+ * ``True``: raises ValueError if any of the roles *aren't* grantable.
+ * ``False``: raises ValueError if any of the roles *are* grantable.
+ * ``None`` (the default): grantable status is not checked.
+
+ :returns:
+ ``True`` if all roles pass.
+ Raises ``ValueError`` if any of them fail.
+
+ mainly useful for sanity checks and assert statements.
+ """
+ if isstr(roles):
+ warn("Please use ensure_valid_role() for single role strings; this may be removed in the future")
+ return self.ensure_valid_role(roles, grantable=grantable)
+ missing = []
+ badgrant = []
+ for role in roles:
+ robj = self.get_role_obj(role,None)
+ if robj is None:
+ missing.append(role)
+ continue
+ if grantable is not None and robj.grantable != grantable:
+ badgrant.append(role)
+ if missing:
+ raise ValueError, "Unknown roles: %r" % (missing,)
+ if badgrant:
+ assert grantable is not None
+ if grantable:
+ raise ValueError, "Ungrantable roles: %r" % (badgrant,)
+ else:
+ raise ValueError, "Grantable roles: %r" % (badgrant,)
+ return True
+
+ def ensure_valid_role(self, role, grantable=None):
+ """validates that roles exists, raising ValueError if it doesn't.
+
+ :type roles: str
+ :arg roles: role to check
+
+ :type grantable: bool|None
+ :param grantable:
+ * ``True``: raises ValueError if the role *isn't* grantable.
+ * ``False``: raises ValueError if the role *is* grantable.
+ * ``None`` (the default): grantable status is not checked.
+
+ :returns:
+ ``True`` true if the role passes, raises ``ValueError`` on any failure.
+
+ mainly useful for sanity checks and assert statements.
+ """
+ robj = self.get_role_obj(role,None)
+ if robj is None:
+ raise ValueError, "Unknown role: %r" % (role,)
+ if grantable is not None and robj.grantable != grantable:
+ if grantable:
+ raise ValueError, "Role is not grantable: %r" % (role,)
+ else:
+ raise ValueError, "Role is grantable: %r" % (role,)
+ return True
+
+ #=========================================================
+ #link & permission management
+ #=========================================================
+ def _norm_role_seq(self, roles):
+ """helper which ensures ``roles`` is a sequence of roles.
+
+ if ``roles`` is a sequence, returned unchanged.
+ if ``roles`` is a string, converted to a list.
+ """
+ if isstr(roles):
+ ##rs = self.rolesep
+ ##if rs:
+ ## roles = split_condense(roles, rs, empty="strip")
+ ##else:
+ roles = [roles]
+ return roles
+
+ #--------------------------------------------------------
+ #creation frontends
+ #--------------------------------------------------------
+ def permit(self, roles, action, klass=None, **kwds):
+ """create & add new Permission object allowing the specified action.
+
+ :type roles: str|seq of strs
+ :arg roles:
+ This can be either the name of a role (eg ``"admin"``),
+ or a sequence of roles (eg ``["admin", "manager"]``).
+ The user must possess at least one of these roles
+ in order for the Permission object to be queried.
+
+ :type action: str
+ :arg action:
+ The string specifying the action which his permission should match.
+ This is passed to the :class:`Permission` constructor.
+
+ :type klass: str|None|False
+ :arg klass:
+ Optional string specifying the class which this permission should match.
+ This is passed to the :class:`Permission` constructor.
+
+ :param \*\*kwds:
+ All other keywords are passed directly to the the :class:`Permission` constructor.
+
+ :returns:
+ The resulting permission object,
+ after having linked it to all the specified roles.
+ """
+ self.ensure_thawed()
+ perm = self.create_permission(action, klass=klass, **kwds)
+ self.create_link([perm], roles)
+ return perm
+
+ def permit_list(self, roles, perm_descs):
+ """add multiple permissions at once.
+
+ This function is the equivalent of calling
+ :meth:`permit` with the same set of ``roles``
+ for each dict in ``perm_desc``.
+
+ :type roles: str|seq of strs
+ :arg roles:
+ This can be either the name of a role (eg ``"admin"``),
+ or a sequence of roles (eg ``["admin", "manager"]``).
+ The user must possess at least one of these roles
+ in order for the Permission object to be queried.
+
+ :type perm_desc: list of dicts
+ :arg perm_descs:
+ This should be a sequence of dictionaries,
+ whose key/value pairs will be passed
+ to the Permission constructor directly.
+
+ :returns:
+ list of the permission objects that were created,
+ after having linked them to all the specified roles.
+ """
+ self.ensure_thawed()
+ assert all('roles' not in perm for perm in perm_descs), "legacy syntax"
+ perm_objs = [
+ self.create_permission(**perm_desc)
+ for perm_desc in perm_descs
+ ]
+ self.create_link(perm_objs, roles)
+ return perm_objs
+
+ #--------------------------------------------------------
+ #creation backends
+ #--------------------------------------------------------
+ def create_permission(self, action, klass=None, **kwds):
+ """create new Permission object, register with policy, and return it.
+
+ :type action: str
+ :arg action:
+ The string specifying the action which his permission should match.
+ This is passed to the :class:`Permission` constructor.
+
+ :type klass: str|None|False
+ :arg klass:
+ Optional string specifying the class which this permission should match.
+ This is passed to the :class:`Permission` constructor.
+
+ :param \*\*kwds:
+ All other keywords are passed directly to the the :class:`Permission` constructor.
+
+ :rtype: Permission
+ :returns:
+ the resulting permission object
+
+ .. note::
+ The returned permission object will not be linked
+ with any roles, to do that you must use :meth:`link_permissions`.
+ """
+ self.ensure_thawed()
+ perm = self.Permission(action=action, klass=klass, **kwds)
+## perm.policy = self
+ if perm.priority != 0 and not self._priority_in_use:
+ log.debug("enabled priority sorting for permissions")
+ self._priority_in_use = True
+ return perm
+
+ def create_link(self, perm_objs, roles):
+ """add permissions objects to all specified roles.
+
+ Once called, the user will be permitted to perform
+ any action allowed by one of the ``perm_objs``,
+ as long as the user has at least one of the given ``roles``.
+
+ :arg perm_objs:
+ list of permission objects.
+ :arg roles:
+ This can be either the name of a role (eg ``"admin"``),
+ or a sequence of roles (eg ``["admin", "manager"]``).
+ The user must possess at least one of these roles
+ in order for the Permission object to be queried.
+
+ .. note::
+ This enforces the restriction that any roles referenced
+ must be defined before this function is called.
+ """
+ self.ensure_thawed()
+ roles = self._norm_role_seq(roles)
+ self.ensure_valid_roles(roles)
+ link = self.Link(perm_objs, frozenset(roles))
+ self._links.append(link)
+
+ #--------------------------------------------------------
+ #link/perm examination
+ #--------------------------------------------------------
+ def get_user_permissions(self, user, rtype=tuple):
+ "iterate through all perms belonging to user"
+ #NOTE: we let iter_role_permissions expand roles,
+ # since that way expand_roles() is also cached
+ roles = self.get_user_roles(user, inherited=False)
+ return self.get_role_permissions(roles, inherited=True, rtype=rtype)
+
+ def get_role_permissions(self, roles, inherited=True, rtype=tuple):
+ """return all permission objects attached to specified roles.
+
+ This returns a list of all perm objs attached to any of the
+ specified roles, in the order they should be checked to determine
+ whether a specified actions is permitted.
+
+ :type roles: str|seq(str)
+ :arg roles: role or set of roles which should be checked.
+
+ :param inherited:
+ whether function should consider roles inherited
+ from specified ``roles`` when searching for permission
+ (defaults to True, rarely not needed).
+
+ :param rtype:
+ Though it defaults to ``list``,
+ you can optionally specify the return type
+ of this function, allowing for optimal conversion
+ from the internal representation.
+ Common values are ``tuple``, ``list``, ``set``,
+ and ``iter``, the last one resulting in an iterator.
+
+ :returns:
+ The list (or other structure, per ``rtype`` param)
+ containing all permission objects which were linked
+ to 1 or more of the specified roles.
+ """
+ roles = frozenset(self._norm_role_seq(roles))
+ if self.frozen:
+ #if policy is frozen, cache the result before returning it
+ key = (roles, inherited)
+ cache = self._roleset_cache
+ #XXX: make cache-disabling flag for testing?
+ if key in cache:
+ log.debug("iter_perm_objs: cache hit for %r", key)
+ out = cache[key]
+ else:
+ log.debug("iter_perm_objs: cache miss for %r", key)
+ out = cache[key] = self._get_role_permissions(roles, inherited)
+ else:
+ #if policy isn't frozen, perform the somewhat more expensive query each time.
+ out = self._get_role_permissions(roles, inherited)
+ assert isinstance(out,tuple)
+ if rtype is tuple:
+ return out
+ else:
+ return rtype(out)
+
+ def _get_role_permissions(self, roles, inherited):
+ "return list of perms; used by get_role_permissions()"
+ if inherited:
+ roles = self.expand_roles(roles)
+ if hasattr(roles, "isdisjoint"): #py >= 26
+ def test(link_roles):
+ return not roles.isdisjoint(link_roles)
+ else:
+ def test(link_roles):
+ return bool(roles.intersection(link_roles))
+ out = [] #master list, in order
+ for link in self._links:
+ if test(link.base_roles):
+ for perm in link.perm_objs:
+ if perm not in out:
+ out.append(perm)
+ if self._priority_in_use:
+ #sort list based on priority,
+ #but otherwise preserving original order
+ posmap = dict(
+ (perm,idx)
+ for idx,perm in enumerate(out)
+ )
+ def sk(perm):
+ #make higher numbers sort first,
+ #and perms w/ same priority keep original order
+ return -perm.priority, posmap[perm]
+ out = sorted(out, key=sk)
+ return tuple(out)
+
+ def get_linked_roles(self, perm_obj, inherited=True, limit_roles=None, rtype=set):
+ "return set of all roles which are linked to a given perm_obj"
+ #TODO: document this
+ #NOTE: this just uses brute force to check everything,
+ # but since it's not called except during introspection,
+ # it doesn't really have to be very time-efficient.
+ found = set()
+ for link in self._links:
+ if perm_obj in link.perm_objs:
+ found.update(link.base_roles)
+ if inherited:
+ found.update(self.ascend_roles(link.base_roles))
+ if limit_roles is not None:
+ found.intersection_update(limit_roles)
+ if rtype is set:
+ return found
+ else:
+ return rtype(found)
+
+ #--------------------------------------------------------
+ #finalize links
+ #--------------------------------------------------------
+ def _freeze_links(self):
+ """finalize links policy."""
+ #cache so we don't have to re-scan for the common pairs
+ #this is main point of finalize(), since we'd have to purge
+ #cache whenever something changed otherwise
+ self._roleset_cache = {}
+
+ #TODO: we could fill in link.expanded_roles now that role tree is frozen,
+ #so that get_linked_roles() could run faster.
+
+ #=========================================================
+ #permission checking
+ #=========================================================
+ def user_has_permission(self, user, action,
+ klass=None, item=None, **kwds):
+ """check if user has permission for a specific action.
+
+ This runs through all :class:`Permission` instances
+ within the policy which are linked to any of the roles
+ possessed by the user. If any of them make a definitive statement
+ about the :ref:`permission question <permission-question>` being asked,
+ the result (``True`` or ``False``) will be returned.
+ If no match is found, the default is to return ``False``.
+ """
+ perm_objs = self.get_user_permissions(user, rtype=iter)
+ return self._check_permissions(perm_objs, user, action, klass, item, **kwds)
+ #XXX: rename/provide alias named "check()" to match Permission?
+
+ def _check_permissions(self, perm_objs, user, action,
+ klass=None, item=None, **kwds):
+ """check query against a list of permission objects.
+
+ :param perm_obj:
+ sequence or iterator containing permission objects to check in order
+
+ :returns:
+ ``True`` if permitted, ``False`` if denied, ``None`` if neither
+ permitted or denied (should usually be treated like Denied).
+ """
+ for perm_obj in perm_objs:
+ result = perm_obj.check(user, action, klass, item, **kwds)
+ if result == PERM.ALLOW:
+ return True
+ if result == PERM.DENY:
+ return False
+ assert result == PERM.PENDING
+ return None
+
+ def could_allow(self, action, klass=None, item=None, **kwds):
+ """check if policy could even *potentially* allow a permission to pass for some role.
+
+ this disables all guards, looks at all permissions attached to all roles,
+ and returns true if there exists some combination of roles
+ which potentially could permit a to gain this permission.
+ """
+ #NOTE: this bypasses internal links database caching,
+ #since it's faster to just scan everything.
+ #XXX: we could cache the results of *this* func
+ seen = set()
+ for link in self._links:
+ for perm in link.perm_objs:
+ if perm not in seen:
+ if perm.could_allow(action, klass, item, **kwds):
+ return True
+ seen.add(perm)
+ return False
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#helpers
+#=========================================================
+class UserPermSummary(BaseClass):
+ """This is a helper which tries to summarize the permissions a user has,
+ ordered in a manner suitable for handing to a pretty-printer.
+
+ .. warning::
+
+ This is an experimental class, not listed in the main documentation,
+ which may be removed / altered without warning.
+
+ It's implemented as a class which can be iterated over to
+ yeild ``(perm_obj,roles)`` pairs, where
+ each ``perm_obj`` is one which the user has been granted,
+ and ``roles`` are all the roles which the user has which link to that permission
+ """
+ #=========================================================
+ #attrs
+ #=========================================================
+
+ #filled in by constructor
+ policy = None
+ user = None
+
+ #optionally overridden by constructor
+ ordered_actions = ()
+ ordered_roles = ()
+ sort_perms = True
+
+ #filled in by prepare()
+ all_roles = None
+ user_roles = None
+ granted_user_roles = None
+ inherited_user_roles = None
+ user_perms = None
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, policy, user,
+ ordered_actions=None, ordered_roles=None,
+ sort_perms=None,
+ ):
+ self.policy = policy
+ self.user = user
+ if ordered_actions is None:
+ ordered_actions = self.ordered_actions
+ self.ordered_actions = list(ordered_actions)
+ if ordered_roles is None:
+ ordered_roles = self.ordered_roles
+ self.ordered_roles = list(ordered_roles)
+ if sort_perms is not None:
+ self.sort_perms = sort_perms
+ self.prepare()
+
+ def prepare(self):
+ "prepare all lists for the iterator and program to access"
+ policy, user = self.policy, self.user
+ self.all_roles = sorted(policy.get_roles(rtype=iter), key=self.sk_role)
+ self.user_roles = sorted(policy.get_user_roles(user,rtype=iter), key=self.sk_role)
+ self.granted_user_roles = sorted(policy.get_user_roles(user,rtype=iter,inherited=False), key=self.sk_role)
+ self.inherited_user_roles = sorted(set(self.user_roles).difference(self.granted_user_roles), key=self.sk_role)
+ if self.sort_perms:
+ self.user_perms = sorted(policy.get_user_permissions(user,rtype=iter), key=self.sk_perm_obj)
+ else:
+ self.user_perms = policy.get_user_permissions(user,rtype=list)
+
+ def get_linked_roles(self, perm_obj):
+ "returns all user's granted roles which gave them this permission, returned pre-sorted"
+ itr = self.policy.get_linked_roles(perm_obj, inherited=False, rtype=iter, limit_roles=self.user_roles)
+ return sorted(itr, key=self.sk_role)
+
+ #=========================================================
+ #sort key functions
+ #=========================================================
+ def sk_string(self, value):
+ "func to generate sort key for ordering strings (eg action and klass)"
+ if isstr(value):
+ return condense(value, " -+.").lower(), value
+ else:
+ return value
+
+ def sk_role(self, role):
+ "func to generate sort key for ordering roles"
+ #TODO: could probably factor out index code into sk_from_index(source,value)
+ #TODO: default sort after ordered roles should be via ordering of reverse hierarchy, tallest first
+ try:
+ return self.ordered_roles.index(role), self.sk_string(role)
+ except ValueError:
+ return len(self.ordered_roles), self.sk_string(role)
+
+ def sk_granted_role(self, role):
+ "func to generate sort key for ordering roles, with addition that granted roles are listed first"
+ try:
+ return self.granted_user_roles.index(role), self.sk_role(role)
+ except ValueError:
+ return len(self.granted_user_roles), self.sk_role(role)
+
+ def sk_perm_obj(self, perm_obj):
+ "func to generate sort key for ordering permissions"
+ try:
+ ci = self.ordered_actions.index(perm_obj.action)
+ except ValueError:
+ ci = len(self.ordered_actions)
+ return self.sk_string(perm_obj.klass), ci, self.sk_string(perm_obj.action), self.get_linked_roles(perm_obj)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#eoc
+#=========================================================
diff --git a/bps/security/pwgen.py b/bps/security/pwgen.py
new file mode 100644
index 0000000..ec050b1
--- /dev/null
+++ b/bps/security/pwgen.py
@@ -0,0 +1,453 @@
+"""bps.security.pwhash - password hashing tools"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import inspect
+import re
+import hashlib
+import time
+import os
+#site
+#libs
+from bps import *
+#pkg
+from bps.rng import srandom
+from bps.security._gpw_data import get_gpw_data as _get_gpw_data
+#local
+__all__ = [
+ #frontend
+ 'generate_secret',
+
+ #base classes
+ 'PasswordGenerator',
+ 'PhoneticGenerator',
+
+ #algorithms
+ 'RandomGenerator',
+ 'CvcGenerator',
+ 'GpwGenerator',
+
+]
+
+#=========================================================
+#base generation classes
+#=========================================================
+class PasswordGenerator(BaseClass):
+ size = 16 #default password size
+ padding = 0 #padding being added (used by phonetic algs)
+
+ def __init__(self, size=None, **kwds):
+ self.__super.__init__(**kwds)
+ if size is not None:
+ self.size = size
+
+ def get_size(self):
+ size = self.size
+ if isinstance(size, tuple):
+ start, end = size
+ return srandom.randrange(start, end)-self.padding
+ else:
+ return size-self.padding
+
+ def get_size_range(self):
+ if isinstance(self.size, tuple):
+ min_size, max_size = self.size
+ else:
+ min_size = max_size = self.size
+ return min_size-self.padding, max_size-self.padding
+
+ def __call__(self, count=None):
+ srandom.reseed() #shake the prng before generating passwords
+ if count == "iter": #mild hack to return an iterator
+ return self
+ elif count is None:
+ return self.next()
+ else:
+ next = self.next
+ return [ next() for i in xrange(count) ]
+
+ def __iter__(self):
+ srandom.reseed() #shake the prng before generating passwords
+ return self
+
+ @abstractmethod
+ def next(self):
+ "generate and return a new password"
+
+class PhoneticGenerator(PasswordGenerator):
+ size = (10, 13) #default password size for phonetic generators
+ numeric_head = 0
+ numeric_tail = 0
+
+ def __init__(self, numeric_head=None, numeric_tail=None, **kwds):
+ self.__super.__init__(**kwds)
+ if numeric_head > 0:
+ self.numeric_head = numeric_head
+ if numeric_tail > 0:
+ self.numeric_tail = numeric_tail
+ self.padding = self.numeric_head + self.numeric_tail
+
+ def pad_secret(self, secret):
+ if self.numeric_head:
+ secret = self.gen_digits(self.numeric_head) + secret
+ if self.numeric_tail:
+ secret += self.gen_digits(self.numeric_tail)
+ return secret
+
+ def gen_digits(self, size):
+ return ("%0" + str(size) + "d") % srandom.randrange(0, 10**size)
+
+
+#=========================================================
+#hex & alphanumeric dialect helpers
+#=========================================================
+_alphanum_chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+_other_chars = "!@#$%^&*()_+-=[]{}\\|/?\"';:,.<>"
+class RandomGenerator(PasswordGenerator):
+ "generate for generating random passwords from an alphabet"
+ charsets = dict(
+ hex="0123456789abcdef",
+ alphanum=_alphanum_chars,
+ ascii=_alphanum_chars + _other_chars,
+ )
+ def __init__(self, charset=None, alphabet=None, **kwds):
+ self.__super.__init__(**kwds)
+ if alphabet:
+ self.alphabet = alphabet
+ else:
+ self.alphabet = self.charsets[charset.lower()]
+
+ def next(self):
+ size = self.get_size()
+ return ''.join(srandom.choice(self.alphabet) for x in xrange(size))
+
+#=========================================================
+#cvc dialect helpers
+#=========================================================
+class CvcGenerator(PhoneticGenerator):
+ #FIXME: like to support non-english groups here too
+ patterns = dict(cv=25, cvv=10) ##, dv=4, dvv=1)
+ consonants = "bcdfghjklmnprstvwxyz"
+ doubled = "bdfglmprst"
+ start_vowel = .05
+ end_cons = .4
+ vowels = "aeiou"
+
+ def next(self):
+ #NOTE: we ignore size, and work it out ourselves
+ min_size, max_size = self.get_size_range()
+ min_size = srandom.randrange(min_size, max_size+1)
+ out = ""
+ if srandom.random() < self.start_vowel:
+ out += srandom.choice(self.vowels)
+ while True:
+ choice = srandom.weighted_choice(self.patterns)
+ if not out and choice.startswith("d"):
+ continue
+ i = 0
+ m = len(choice)-1
+ while i < len(choice):
+ s = choice[i]
+ if s == "c":
+ buf = srandom.choice(self.consonants)
+ #make sure u follows q
+ if buf == "q":
+ if i < m and choice[i+1] == "v": #check for quV
+ buf += "u"
+ if i+1 < m and choice[i+2] == "v": #prevent quVV
+ i += 1
+ else: #else don't put q and end of syllable
+ continue
+ elif s == "d":
+ buf = srandom.choice(self.doubled) * 2
+ else:
+ buf = srandom.choice(self.vowels)
+ out += buf
+ i += 1
+ if len(out) >= min_size:
+ out = out[:max_size]
+ if srandom.random() < self.end_cons:
+ c = srandom.choice(self.consonants)
+ if len(out) == max_size:
+ out = out[:-1] + c
+ else:
+ out += c
+ return self.pad_secret(out)
+
+#=========================================================
+#gpw dialect helpers
+#this algorithm (and probabilty table) taken from www.multicians.org/thvv/gpw.html
+#=========================================================
+
+class GpwGenerator(PhoneticGenerator):
+ def __init__(self, language=None, **kwds):
+ self.__super.__init__(**kwds)
+ data = _get_gpw_data(language)
+ self.alphabet = data['alphabet']
+ self.tris = data['tris']
+ self.tris_total = data['tris_total']
+
+ def next(self):
+ size = self.get_size()
+ while True:
+ secret = self.next_word(size)
+ if secret is not None:
+ break
+ return self.pad_secret(secret)
+
+ def pick_start(self):
+ pik = 1+srandom.randrange(0, self.tris_total)
+ cur = 0
+ for c1, r1 in enumerate(self.tris):
+ for c2, r2 in enumerate(r1):
+ for c3, w in enumerate(r2):
+ cur += w
+ if cur >= pik:
+ return c1, c2, c3
+ raise RuntimeError, "sigma < sum of weights!"
+
+ def next_word(self, size):
+ alphabet = self.alphabet
+ tris = self.tris
+
+ #pick random starting point, weighted by char occurrence
+ c1, c2, c3 = self.pick_start()
+ out = alphabet[c1]+alphabet[c2]+alphabet[c3]
+ num = 3
+
+ #do random walk
+ while num < size:
+ #find current weight row
+ row = tris[c1][c2]
+
+ #pick random point in weight range
+ total = sum(row)
+ if total == 0:
+ #no chars follow this sequence, give up and try again
+ return None
+ pik = 1+srandom.randrange(0, total)
+
+ #use first char > that weighted choice
+ cur = 0
+ for c3, weight in enumerate(row):
+ cur += weight
+ if cur >= pik:
+ break
+ else:
+ raise RuntimeError, "pick out of bounds"
+
+ #add c3 and advance
+ out += alphabet[c3]
+ num += 1
+ c1 = c2
+ c2 = c3
+ return out
+
+#=========================================================
+#frontend
+#=========================================================
+
+#dictionary which maps algorithm names to driver class,
+#used by generate_secret
+_gs_algs = dict(
+ random=RandomGenerator,
+ cvc=CvcGenerator,
+ gpw=GpwGenerator,
+ )
+
+#set name of generate secret's default algorithm
+_gs_default = "alphanum"
+
+#dict containing all presets for generate_secret
+_gs_presets = dict(
+ #global presets
+ human=dict(alg="cvc", size=(10, 13)),
+ strong=dict(alg="random", charset="ascii", size=16),
+
+ #hex presets
+ hex=dict(alg="random", charset="hex"),
+ alphanum=dict(alg="random", charset="alphanum"),
+ ascii=dict(alg="random", charset="ascii"),
+)
+
+def generate_secret(alg=None, count=None, **kwds):
+ """Generate a random password.
+
+ *count* lets you generate multiple passwords at once.
+ If count is not specified, a single string is returned.
+ Otherwise, a list of *count* passwords will be returned.
+
+ *alg* lets you select the algorithm used when generating the password.
+ This value may be either the name of a preset, or the name of an actual algorithm
+ (see the list of presets and algorithms below).
+
+ Any additional keywords will be passed to the algorithm implementation.
+
+ Examples
+ ========
+ The follow are some usages examples (your results may vary,
+ depending on the random number state)::
+
+ >>> from bps.security.pwgen import generate_secret
+
+ >>> # generate a bunch of passwords using the gpw algorithm
+ >>> generate_secret(count=10, alg="gpw")
+ ['preatioc',
+ 'mirenencet',
+ 'blackessse',
+ 'shantesita',
+ 'sonsimena',
+ 'mestongesho',
+ 'amilitterl',
+ 'lonisantr',
+ 'onsesenone',
+ 'astensult']
+
+ >>> # generate a single alphanumeric password
+ >>> generate_secret(alg="alphanum")
+ 'l9u09f3N8Squ23q2'
+
+ >>> # generate a single password using default algorithm
+ >>> generate_secret()
+ 'bablistre'
+
+ >>> #generate a strong password
+ >>> generate_secret("strong")
+ "g_)'sP?Z'Zhi]6hL"
+
+ Presets
+ =======
+ This function defines a number of presets, which can be passed
+ in as the *alg* string, and will load the most appropriate underlying algorithm,
+ as well as various presets:
+
+ ``alphanum`` (the default)
+ Generates a random sequence of mixed-case letters, and numbers.
+ This was chosen as the default because it's reasonably strong
+ for average purposes, to lessen the security risk if users
+ just call ``generate_secret()`` without options.
+ If you want a more memorable (and therefore weaker) password,
+ you have to explicitly chose another preset. As a matter
+ of balance, this is not the *strongest* algorithm available,
+ just one that's reasonably strong for most uses.
+
+ ``hex``
+ Generates a random sequence of 16 hexidecimal digits.
+
+ ``ascii``
+ This algorithm uses a wide range of 92 ascii characters (letters, numbers, punctuation).
+
+ ``human``
+ This generates a medium-strength phonetic password,
+ which should be (relatively) easy to remember,
+ yet reasonably unlikely to be guessed.
+
+ Currently this uses the ``cvc`` algorithm with a size of 9-12,
+ but if stronger-yet-memorable algorithm is found,
+ this preset will be changed to refer to that algorithm instead.
+
+ ``strong``
+ This generates a password that's as strong enough to be unguessable.
+
+ This is currently an alias for the ``ascii`` preset, which creates a
+ 16 character password made up of all possible ascii characters.
+ The odds of this password being guessed / generated again are 1 in 2e31,
+ making it reasonably strong for most purposes.
+ As computers get more powerful, this preset's nature may be upped
+ as needed to keep it strong.
+
+ Algorithms
+ ==========
+ The following algorithms are available:
+
+ ``random``
+ This generates a random password from a specified alphabet.
+ You can specify a *charset* of ``hex``, ``alphanum``, or ``ascii``,
+ or specifiy an *alphabet* string directly.
+
+ ``cvc``
+ This implements a simple phonetic algorithm which generates
+ sequences of letters using some basic english syllable patterns.
+ While not as frequently pronouncable as the ``gpw`` algorithm's results,
+ this algorithm has a much larger effective key space,
+ and so it a much better choice for phonetic password generation.
+
+ ``gpw``
+ This is a complex phonetic algorithm, which attempts to generate
+ pronouncable words via a markov walk using a compiled dictionary
+ of 3-order letter frequencies. This is a python implementation of
+ Tom Van Vleck's phonetic password algorithm, found at http://www.multicians.org/thvv/gpw.html.
+
+ .. warning::
+ While more memorable, it's probablistic nature severely constrains
+ the effective keyspace, so this should not be used where
+ strong passwords are needed, especially if the attacker
+ knows you have used this algorithm and can brute force their attack.
+
+ For size=10, this will generate a duplicate 1 out of 20,000 times.
+
+ This algorithm accepts a *language* keyword, letting you specify
+ the dictionary to load from. The following languages are implemented:
+
+ en_US (the default)
+ uses a dictionary built from Ubuntu's american-english word list.
+ en_UK
+ uses a dictionary built from Ubuntu's british-english word list.
+ gpw
+ uses the original dictionary from Tom Van Vleck's implementation.
+
+ .. note::
+
+ Due to keyspace issues, the en_US and en_UK tables have been removed for now.
+
+ Additional Keywords
+ ===================
+ The following keywords are recognized by all algorithms:
+
+ size
+ This lets you explicitly set the size of the generated password.
+ If the size is not specified, a algorithm-dependant default is used.
+ If size is a tuple of two integers, a random size is used with the
+ specified range.
+
+ reshake
+ If true (the default), the random number generator will be
+
+ The following keywords are recognized by the phonetic algorithms (cvc,gpw):
+
+ numeric_head
+ If set to a positive integer, that many digits will be added to the start
+ of the generated password.
+
+ numeric_tail
+ If set to a positive integer, that many digits will be added to the end
+ of the generated password.
+ """
+
+ if 'dialect' in kwds:
+ alg = kwds.pop("dialect")
+ warnings.warn("generate_password(): 'dialect' kwd is deprecated, use 'alg' instead")
+
+ #load preset
+ if not alg or alg == "default":
+ alg = _gs_default
+ if alg in _gs_presets:
+ preset = _gs_presets[alg]
+ alg = preset['alg']
+ for k in preset:
+ if k not in kwds:
+ kwds[k] = preset[k]
+
+ #create generator
+ if alg in _gs_algs:
+ cls = _gs_algs[alg]
+ else:
+ raise ValueError, "unknown algorithm or preset: %r" % (alg,)
+ gen = cls(**kwds)
+ return gen(count)
+
+#=========================================================
+# eof
+#=========================================================
diff --git a/bps/security/pwhash.py b/bps/security/pwhash.py
new file mode 100644
index 0000000..877fd99
--- /dev/null
+++ b/bps/security/pwhash.py
@@ -0,0 +1,1693 @@
+"""bps.security.pwhash - implementation of various password hashing functions"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import inspect
+import re
+import hashlib
+import time
+import os
+#site
+#libs
+from bps import *
+from bps.basic import enum_slice
+from bps.rng import srandom
+from bps.meta import abstractmethod, isseq
+from bps.types import stub
+from bps.misc import class_property
+
+try:
+ #try stdlib module, which is only present under posix
+ from crypt import crypt as unix_crypt
+ #XXX: might want to reconcile our implementation's behavior
+ # with the posix behavior so error types/messages and limitations
+ # are reliable
+except ImportError:
+ from bps.security._unix_crypt import crypt as unix_crypt
+
+try:
+ #try importing py-bcrypt, it's much faster
+ import bcrypt
+except ImportError:
+ #fall back to our slow pure-python implementation
+ import bps.security._bcrypt as bcrypt
+
+#pkg
+#local
+__all__ = [
+ #crypt algorithms
+ 'CryptAlgorithm',
+ 'UnixCrypt',
+ 'Md5Crypt',
+ 'Sha256Crypt',
+ 'Sha512Crypt',
+ 'BCrypt',
+
+ #crypt context
+ 'CryptContext',
+ 'default_context',
+ 'linux_context',
+ 'bsd_context',
+
+ #quick helpers
+ 'identify_secret',
+ 'encrypt_secret',
+ 'verify_secret',
+
+]
+
+#=========================================================
+#common helper funcs for passwords
+#=========================================================
+#charmap for "hash64" encoding
+#most unix hash algorithms use this mapping (though bcrypt put it's numerals at the end)
+CHARS = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+CHARIDX = dict( (c,i) for i,c in enumerate(CHARS))
+
+def _enc64(value, offset=0, num=False):
+ if num:
+ x, y, z = value[offset], value[offset+1], value[offset+2]
+ else:
+ x, y, z = ord(value[offset]), ord(value[offset+1]), ord(value[offset+2])
+ #xxxxxx xxyyyy yyyyzz zzzzzz
+ #aaaaaa bbbbbb cccccc dddddd
+ a = (x >> 2) # x [8..3]
+ b = ((x & 0x3) << 4) + (y>>4) # x[2..1] + y [8..5]
+ c = ((y & 0xf) << 2) + (z>>6) #y[4..1] + d[8..7]
+ d = z & 0x3f
+ return CHARS[a] + CHARS[b] + CHARS[c] + CHARS[d]
+
+def _dec64(value, offset=0, num=False):
+ a, b, c, d = CHARIDX[value[offset]], CHARIDX[value[offset+1]], \
+ CHARIDX[value[offset+2]], CHARIDX[value[offset+3]]
+ #aaaaaabb bbbbcccc ccdddddd
+ #xxxxxxxx yyyyyyyy zzzzzzzz
+ x = (a<<2) + (b >> 4) #a[6..1] + b[6..5]
+ y = ((b & 0xf) << 4) + (c >> 2) #b[4..1] + c[6..3]
+ z = ((c & 0x3) << 6) + d #c[2..1] + d[6..1]
+ if num:
+ return x, y, z
+ return chr(x) + chr(y) + chr(z)
+
+def h64_encode(value, pad=False, num=False):
+ "encode string of bytes into hash64 format"
+ if num:
+ value = list(value)
+ #pad value to align w/ 3 byte chunks
+ x = len(value) % 3
+ if x == 2:
+ if num:
+ value += [0]
+ else:
+ value += "\x00"
+ p = 1
+ elif x == 1:
+ if num:
+ value += [0, 0]
+ else:
+ value += "\x00\x00"
+ p = 2
+ else:
+ p = 0
+ assert len(value) % 3 == 0
+ out = "".join( _enc64(value, offset, num=num) for offset in xrange(0, len(value), 3))
+ assert len(out) % 4 == 0
+ if p:
+ if pad:
+ out = out[:-p] + "=" * p
+ else:
+ out = out[:-p]
+ return out
+
+def h64_decode(value, pad=False, num=False):
+ "decode string of bytes from hash64 format"
+ if value.endswith("="):
+ assert len(value) % 4 == 0, value
+ if value.endswith('=='):
+ p = 2
+ value = value[:-2] + '..'
+ else:
+ p = 1
+ value = value[:-1] + '.'
+ else:
+ #else add padding if needed
+ x = len(value) % 4
+ if x == 0:
+ p = 0
+ elif pad:
+ raise ValueError, "size must be multiple of 4"
+ elif x == 3:
+ p = 1
+ value += "."
+ elif x == 2:
+ p = 2
+ value += ".."
+ elif x == 1:
+ p = 3
+ value += "..."
+ assert len(value) % 4 == 0, value
+ if num:
+ out = []
+ for offset in xrange(0, len(value), 4):
+ out.extend(_dec64(value, offset, num=True))
+ else:
+ out = "".join( _dec64(value, offset) for offset in xrange(0, len(value), 4))
+ assert len(out) % 3 == 0
+ if p: #strip out garbage chars
+ out = out[:-p]
+ return out
+
+def _enc64b(a, b, c, n=4):
+ "std hash64 bit encoding"
+ v = (ord(a) << 16) + (ord(b) << 8) + ord(c)
+ return "".join(
+ CHARS[(v >> (i*6)) & 0x3F]
+ for i in range(n)
+ )
+
+def _enc64b1(buffer, a):
+ "do 64bit encode of single element of a buffer"
+ return _enc64b('\x00', '\x00', buffer[a], 2)
+
+def _enc64b2(buffer, a, b):
+ "do 64bit encode of 2 elements of a buffer"
+ return _enc64b('\x00', buffer[a], buffer[b], 3)
+
+def _enc64b3(buffer, a, b, c):
+ "do 64bit encode of 3 elements of a buffer"
+ return _enc64b(buffer[a], buffer[b], buffer[c], 4)
+
+def h64_gen_salt(size, pad=False):
+ "generate hash64 salt of arbitrary length"
+ srandom.reseed()
+ out = ''.join(
+ srandom.choice(CHARS)
+ for idx in xrange(size)
+ )
+ if pad and size % 4:
+ out += "=" * (-size % 4)
+ return out
+
+class UnixHash(stub):
+ "helper used by various hash algorithm utilities"
+ alg = None
+ salt = None
+ chk = None
+ rounds = None
+ source = None
+
+ def __init__(self, alg, salt, chk=None, **kwds):
+ self.alg = alg
+ self.salt = salt
+ self.chk = chk
+ self.__super.__init__(**kwds)
+
+#==========================================================
+#base interface for all the crypt algorithm implementations
+#==========================================================
+class CryptAlgorithm(BaseClass):
+ """base class for holding information about password algorithm.
+
+ The following should be filled out for all crypt algorithm subclasses.
+ Additional methods, attributes, and features may vary.
+
+ Informational Attributes
+ ========================
+ .. attribute:: name
+
+ This should be a globally unique name to identify
+ the hash algorithm with.
+
+ .. attribute:: salt_bits
+
+ This is a purely informational attribute
+ listing how many bits are in the salt your algorithm uses.
+ (defaults to ``None`` if information is not available).
+
+ .. attribute:: hash_bits
+
+ This is a purely informational attribute
+ listing how many bits are in the cheksum part of your algorithm's hash.
+ (defaults to ``None`` if information is not available).
+
+ .. note::
+
+ Note that all the bit counts should measure
+ the number of bits of entropy, not the number of bits
+ a given encoding takes up.
+
+ .. attribute:: has_salt
+
+ This is a virtual attribute,
+ calculated based on the value of the salt_bits attribute.
+ It returns ``True`` if the algorithm contains any salt bits,
+ else ``False``.
+
+ .. attribute:: secret_chars
+
+ Number of characters in secret which are used.
+ If ``None`` (the default), all chars are used.
+ BCrypt, for example, only uses the first 55 chars.
+
+ .. attribute:: has_rounds
+
+ This is a purely informational attribute
+ listing whether the algorithm can be scaled
+ by increasing the number of rounds it contains.
+ It is not required (defaults to False).
+
+ .. attribute:: has_named_rounds
+
+ If this flag is true, then the algorithm's
+ encrypt method supports a ``rounds`` keyword
+ which (at the very least) accepts the following
+ strings as possible values:
+
+ * ``fast`` -- number of rounds will be selected
+ to provide adequate security for most user accounts.
+ This is retuned perodically to take around .25 seconds.
+
+ * ``medium`` -- number of rounds will be selected
+ to provide adequate security for most root/administrative accounts
+ This is retuned perodically to take around .75 seconds.
+
+ * ``slow`` -- number of rounds will be selected
+ to require a large amount of calculation time.
+ This is retuned perodically to take around 1.5 seconds.
+
+ .. note::
+ Last retuning of the default round sizes was done
+ on 2009-07-06 using a 2ghz system.
+
+ Common Methods
+ ==============
+ .. automethod:: identify
+
+ .. automethod:: encrypt
+
+ .. automethod:: verify
+
+ Implementing a new crypt algorithm
+ ==================================
+ Subclass this class, and implement :meth:`identify`
+ and :meth:`encrypt` so that they implement your
+ algorithm according to it's documentation
+ and the specifications of the methods themselves.
+ You must also specify :attr:``name``.
+ Optionally, you may override :meth:`verify`
+ and set various informational attributes.
+
+ .. note::
+ It is recommended to use ``from bps.rng import srandom``
+ as your random number generator, since it should (hopefully)
+ be the strongest rng BPS can find on your system.
+ To help this, you should call ``srandom.reseed()``
+ before generating your salt.
+
+ """
+
+ #=========================================================
+ #informational attrs
+ #=========================================================
+ name = None #globally unique name to identify algorithm
+ salt_bits = None #number of bits in salt
+ hash_bits = None #number of bits in hash
+ secret_chars = None #number of chars in secret that are used. None if all chars used.
+ has_rounds = False #has_rounds (via rounds, etc) as computers get more powerful
+ has_named_rounds = False #supports round aliases
+
+ @class_property
+ def has_salt(self):
+ if self.salt_bits is None:
+ return None
+ return self.salt_bits > 0
+
+ #=========================================================
+ #class config
+ #=========================================================
+ #keywords which will be set by constructor
+ init_attrs = ("name", "salt_bits", "hash_bits", "has_rounds",
+ "identify", "encrypt", "verify",
+ )
+
+ #=========================================================
+ #init & internal methods
+ #=========================================================
+ def __init__(self, **kwds):
+ #load in kwds, letting options be overridden on a per-instance basis
+ for key in self.init_attrs:
+ if key in kwds:
+ setattr(self, key, kwds.pop(key))
+ self.__super.__init__(**kwds)
+ self._validate()
+
+ def _validate(self):
+ #make sure instance has everything defined
+ if not self.name:
+ raise ValueError, "no name specified"
+
+ def __repr__(self):
+ c = self.__class__
+ return '<%s.%s object, name=%r>' % (c.__module__, c.__name__, self.name)
+
+## def __repr__(self):
+## c = self.__class__
+## tail = ''
+## for key in ("name",):
+## if key in self.__dict__:
+## tail += "%s=%r, " % (key, getattr(self, key))
+## if tail:
+## tail = tail[:-2]
+## return "%s.%s(%s)" % (c.__module__,c.__name__, tail)
+
+ #=========================================================
+ #subclass-provided methods
+ #=========================================================
+
+ @abstractmethod
+ def identify(self, hash):
+ """identify if a hash string belongs to this algorithm.
+
+ :arg hash:
+ the hash string to check
+ :returns:
+ ``True`` if provided hash string is handled by
+ this class, otherwise ``False``.
+
+ .. note::
+ For some of the simplist algorithms (eg plaintext),
+ there is no globally unambiguous way to identify
+ a given hash. In this case, identify() should
+ at the very least be able to distinguish
+ it's hashes from the other algorithms
+ in use within a given context.
+ """
+
+ @abstractmethod
+ def encrypt(self, secret, hash=None, keep_salt=False):
+ """encrypt secret, returning resulting hash string.
+
+ :arg secret:
+ A string containing the secret to encode.
+ Unicode behavior is specified on a per-hash basis,
+ but the common case is to encode into utf-8
+ before processing.
+
+ :arg hash:
+ Optional hash string, containing a salt and other
+ configuration parameters (rounds, etc). If a salt is not specified,
+ a new salt should be generated with default configuration
+ parameters set.
+
+ :type keep_salt: bool
+ :param keep_salt:
+ *This option is rarely needed by end users,
+ you can safely ignore it if you are not writing a hash algorithm.*
+
+ By default (``keep_salt=False``), a new salt will
+ be generated for each call to encrypt, for added security.
+ If a salt string is provided, only the configuration
+ parameters (number of rounds, etc) should be preserved.
+
+ However, it is sometimes useful to preserve the original salt
+ bytes, instead of generating new ones (such as when verifying
+ the hash of an existing password). In that case,
+ set ``keep_salt=True``. Note that most end-users will want
+ to call ``self.verify(secret,hash)`` instead of using this flag.
+
+ .. note::
+ Various password algorithms may accept addition keyword
+ arguments, usually to override default configuration parameters.
+ For example, most has_rounds algorithms will have a *rounds* keyword.
+ Such details vary on a per-algorithm basis, consult their encrypt method
+ for details.
+
+ :returns:
+ The encoded hash string, with any chrome and identifiers.
+ All values returned by this function should
+ pass ``identify(hash) -> True``
+ and ``verify(secret,hash) -> True``.
+
+ Usage Example::
+
+ >>> from bps.security.pwhash import Md5Crypt
+ >>> crypt = Md5Crypt()
+ >>> #encrypt a secret, creating a new hash
+ >>> hash = crypt.encrypt("it's a secret")
+ >>> hash
+ '$1$2xYRz6ta$IWpg/auAdyc8.CyZ0K6QK/'
+ >>> #verify our secret
+ >>> crypt.verify("fluffy bunnies", hash)
+ False
+ >>> crypt.verify("it's a secret", hash)
+ True
+ >>> #encrypting again should generate a new salt,
+ >>> #even if we pass in the old one
+ >>> crypt.encrypt("it's a secret", hash)
+ '$1$ZS9HCWrt$dRT5Q5R9YRoc5/SLA.WkD/'
+ >>> _ == hash
+ False
+ """
+
+ def verify(self, secret, hash):
+ """verify a secret against an existing hash.
+
+ This checks if a secret matches against the one stored
+ inside the specified hash. By default this uses :meth:`encrypt`
+ to re-crypt the secret, and compares it to the provided hash;
+ though some algorithms may implement this in a more efficient manner.
+
+ :param secret:
+ A string containing the secret to check.
+ :param hash:
+ A string containing the hash to check against.
+
+ :returns:
+ ``True`` if the secret matches, otherwise ``False``.
+
+ See :meth:`encrypt` for a usage example.
+ """
+ #NOTE: this implementation works most of the time,
+ # but if hash algorithm is funky, or input hash
+ # is not in the proper normalized form that encrypt returns,
+ # there will be false negatives.
+ if hash is None:
+ return False
+ return hash == self.encrypt(secret, hash, keep_salt=True)
+
+## def decrypt(self, hash):
+## """decrypt hash, recovering original password.
+##
+## Most (good) password algorithms will not be recoverable.
+## For those, this will raise a NotImplementedError.
+## For the few which are weak enough, or can be recovered
+## with the aid of external information such as a private key,
+## this method should be overridden to provide an implementation.
+##
+## Subclasses may add arbitrary options (external keys, etc)
+## to aid with decryption.
+##
+## If decrypt is implemented, but does not succeed in the end,
+## it should raise a ValueError.
+## """
+## raise NotImplementedError, "this algorithm does not support decryption"
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+def is_crypt_alg(obj):
+ "check if obj following CryptAlgorithm protocol"
+ #NOTE: this isn't an exhaustive check of all required attrs,
+ #just a quick check of the most uniquely identifying ones
+ return all(hasattr(obj, name) for name in (
+ "name", "verify", "encrypt", "identify",
+ ))
+
+#=========================================================
+#sql database hashes
+#=========================================================
+class Mysql10Crypt(CryptAlgorithm):
+ """This implements Mysql's OLD_PASSWORD algorithm, used prior to version 4.1.
+
+ See :class:`Mysql41Crypt` for the new algorithm was put in place in version 4.1
+
+ This function is known to be very insecure,
+ and should only be used to verify existing password hashes.
+
+ """
+ name = "mysql-1.0-crypt"
+ salt_bits = 0
+ hash_bits = 16*16
+
+ _pat = re.compile(r"^[0-9a-f]{16}$", re.I)
+
+ def identify(self, hash):
+ if hash is None:
+ return False
+ return self._pat.match(hash) is not None
+
+ def encrypt(self, secret, hash=None, keep_salt=False):
+ nr1 = 1345345333
+ nr2 = 0x12345671
+ add = 7
+ for c in secret:
+ if c in ' \t':
+ continue
+ tmp = ord(c)
+ nr1 ^= ((((nr1 & 63)+add)*tmp) + (nr1 << 8)) & 0xffffffff
+ nr2 = (nr2+((nr2 << 8) ^ nr1)) & 0xffffffff
+ add = (add+tmp) & 0xffffffff
+ return "%08x%08x" % (nr1 & 0x7fffffff, nr2 & 0x7fffffff)
+
+ def verify(self, secret, hash):
+ if hash is None:
+ return False
+ return hash.lower() == self.encrypt(secret)
+
+class Mysql41Crypt(CryptAlgorithm):
+ """This implements Mysql new PASSWORD algorithm, introduced in version 4.1.
+
+ This function is unsalted, and therefore not very secure against rainbow attacks.
+ It should only be used when dealing with mysql passwords,
+ for all other purposes, you should use a salted hash function.
+
+ Description taken from http://dev.mysql.com/doc/refman/6.0/en/password-hashing.html
+ """
+ name = "mysql-4.1-crypt"
+ salt_bits = 0
+ hash_bits = 16*40
+
+ _pat = re.compile(r"^\*[0-9A-F]{40}$", re.I)
+
+ def identify(self, hash):
+ if hash is None:
+ return False
+ return self._pat.match(hash) is not None
+
+ def encrypt(self, secret, hash=None, keep_salt=False):
+ return '*' + hashlib.sha1(hashlib.sha1(secret).digest()).hexdigest().upper()
+
+ def verify(self, secret, hash):
+ if hash is None:
+ return False
+ return hash.upper() == self.encrypt(secret)
+
+class PostgresMd5Crypt(CryptAlgorithm):
+ """This implements the md5-based hash algorithm used by Postgres to store
+ passwords in the pg_shadow table.
+
+ This algorithm shouldn't be used for any purpose besides Postgres interaction,
+ it's a weak unsalted algorithm which could easily be attacked with a rainbow table.
+
+ .. warning::
+ This algorithm is slightly different from most of the others,
+ in that both encrypt() and verify() require you pass in
+ the name of the user account via the required 'user' keyword,
+ since postgres uses this in place of a salt :(
+
+ Usage Example::
+
+ >>> from bps.security import pwhash
+ >>> crypt = pwhash.PostgresMd5Crypt()
+ >>> crypt.encrypt("mypass", user="postgres")
+ 'md55fba2ea04fd36069d2574ea71c8efe9d'
+ >>> crypt.verify("mypass", 'md55fba2ea04fd36069d2574ea71c8efe9d', user="postgres")
+ True
+ """
+ name = "postgres-md5-crypt"
+ salt_bits = 0
+ hash_bits = 16*32
+
+ _pat = re.compile(r"^md5[0-9a-f]{32}$")
+
+ def identify(self, hash):
+ if hash is None:
+ return False
+ return self._pat.match(hash) is not None
+
+ def encrypt(self, secret, hash=None, keep_salt=False, user=None):
+ if isinstance(secret, tuple):
+ if user:
+ raise TypeError, "user specified in secret & in kwd"
+ secret, user = secret
+ if not user:
+ raise ValueError, "user keyword must be specified for this algorithm"
+ return "md5" + hashlib.md5(secret + user).hexdigest().lower()
+
+ def verify(self, secret, hash, user=None):
+ if hash is None:
+ return False
+ return hash == self.encrypt(secret, user=user)
+
+#=========================================================
+#old unix crypt
+#=========================================================
+class UnixCrypt(CryptAlgorithm):
+ """Old Unix-Crypt Algorithm, as originally used on unix before md5-crypt arrived.
+ This implementation uses the builtin ``crypt`` module when available,
+ but contains a pure-python fallback so that this algorithm can always be used.
+ """
+ name = "unix-crypt"
+ salt_bits = 6*2
+ hash_bits = 6*11
+ has_rounds = False
+ secret_chars = 8
+
+ _pat = re.compile(r"""
+ ^
+ (?P<salt>[./a-z0-9]{2})
+ (?P<hash>[./a-z0-9]{11})
+ $""", re.X|re.I)
+
+ def identify(self, hash):
+ if hash is None:
+ return False
+ return self._pat.match(hash) is not None
+
+ def encrypt(self, secret, hash=None, keep_salt=False):
+ if hash and keep_salt:
+ salt = hash[:2]
+ else:
+ salt = h64_gen_salt(2)
+ return unix_crypt(secret, salt)
+
+ #default verify used
+
+#=========================================================
+#id 1 -- md5
+#=========================================================
+
+#TODO: never seen it, but read references to a Sun-specific
+# md5-crypt which supports rounds, format supposedly something like
+# "$md5,rounds=XXX$salt$chk" , could add support under SunMd5Crypt()
+
+class Md5Crypt(CryptAlgorithm):
+ """This provides the MD5-crypt algorithm, used in many 1990's era unix systems.
+ It should be byte compatible with unix shadow hashes beginning with ``$1$``.
+ """
+ name = 'md5-crypt'
+ salt_bits = 48
+ hash_bits = 96
+ has_rounds = False
+
+ def _md5_crypt_raw(self, secret, salt):
+ #init salt
+ if not salt:
+ salt = h64_gen_salt(8)
+ assert len(salt) == 8
+
+ h = hashlib.md5()
+ assert h.digestsize == 16
+ h.update(secret)
+ h.update(salt)
+ h.update(secret)
+ tmp_digest = h.digest()
+
+ h = hashlib.md5()
+ h.update(secret)
+ h.update("$1$")
+ h.update(salt)
+
+ idx = len(secret)
+ while idx > 0:
+ h.update(tmp_digest[0:min(16, idx)])
+ idx -= 16
+
+ idx = len(secret)
+ while idx > 0:
+ if idx & 1:
+ h.update('\x00')
+ else:
+ h.update(secret[0])
+ idx >>= 1
+
+ hash = h.digest()
+ for idx in xrange(1000):
+ assert len(hash) == 16
+ h = hashlib.md5()
+ if idx & 1:
+ h.update(secret)
+ else:
+ h.update(hash)
+ if idx % 3:
+ h.update(salt)
+ if idx % 7:
+ h.update(secret)
+ if idx & 1:
+ h.update(hash)
+ else:
+ h.update(secret)
+ hash = h.digest()
+
+ out = ''.join(
+ _enc64b3(hash,
+ idx,
+ idx+6,
+ idx+12 if idx < 4 else 5,
+ )
+ for idx in xrange(5)
+ ) + _enc64b1(hash, 11)
+ return UnixHash('1', salt, out)
+
+ _pat = re.compile(r"""
+ ^
+ \$(?P<alg>1)
+ \$(?P<salt>[A-Za-z0-9./]+)
+ (\$(?P<chk>[A-Za-z0-9./]+))?
+ $
+ """, re.X)
+
+ def identify(self, hash):
+ "identify md5-crypt hash"
+ if hash is None:
+ return False
+ return self._pat.match(hash) is not None
+
+ def parse(self, hash):
+ "parse an md5-crypt hash"
+ m = self._pat.match(hash)
+ if not m:
+ raise ValueError, "invalid md5 salt"
+ return UnixHash(m.group("alg"), m.group("salt"), m.group("chk"))
+
+ def encrypt(self, secret, salt=None, keep_salt=False):
+ "encrypt an md5-crypt hash"
+ real_salt = None
+ if salt:
+ rec = self.parse(salt)
+ if keep_salt:
+ real_salt = rec.salt
+ rec = self._md5_crypt_raw(secret, real_salt)
+ return "$1$%s$%s" % (rec.salt, rec.chk)
+
+ def verify(self, secret, hash):
+ "verify an md5-crypt hash"
+ if hash is None:
+ return False
+ rec = self.parse(hash)
+ other = self._md5_crypt_raw(secret, rec.salt)
+ return other.chk == rec.chk
+
+#=========================================================
+#ids 5,6 -- sha
+#algorithm defined on this page:
+# http://people.redhat.com/drepper/SHA-crypt.txt
+#=========================================================
+class _ShaCrypt(CryptAlgorithm):
+ "this is the base class used by SHA-256 & SHA-512. don't use directly."
+ #=========================================================
+ #algorithm info
+ #=========================================================
+ #hash_bits, name filled in for subclass
+ salt_bits = 96
+ has_rounds = True
+ has_named_rounds = True
+
+ #tuning the round aliases
+ rounds_per_second = 156000 #last tuned 2009-7-6 on a 2gz system
+ fast_rounds = int(rounds_per_second * .25)
+ medium_rounds = int(rounds_per_second * .75)
+ slow_rounds = int(rounds_per_second * 1.5)
+
+ #=========================================================
+ #internals required from subclass
+ #=========================================================
+ _key = None #alg id (5, 6) of specific sha alg
+ _hash = None #callable to use for hashing
+ _chunk_size = None #bytes at a time to input secret
+ _hash_size = None #bytes in hash
+ _pat = None #regexp for sha variant
+
+ @abstractmethod
+ def _encode(self, result):
+ "encode raw result into h64 style"
+
+ #=========================================================
+ #core sha crypt algorithm
+ #=========================================================
+ def _sha_crypt_raw(self, rounds, salt, secret):
+ "perform sha crypt, returning just the checksum"
+ #setup alg-specific parameters
+ hash = self._hash
+ chunk_size = self._chunk_size
+
+ #init salt
+ if salt is None:
+ salt = h64_gen_salt(16)
+ elif len(salt) > 16:
+ salt = salt[:16] #spec says to use up to first chars 16 only
+
+ #init rounds
+ if rounds == -1:
+ real_rounds = 5000
+ else:
+ if rounds < 1000:
+ rounds = 1000
+ if rounds > 999999999:
+ rounds = 999999999
+ real_rounds = rounds
+
+ def extend(source, size_ref):
+ size = len(size_ref)
+ return source * int(size/chunk_size) + source[:size % chunk_size]
+
+ #calc digest B
+ b = hash()
+ b.update(secret)
+ b.update(salt)
+ b.update(secret)
+ b_result = b.digest()
+ b_extend = extend(b_result, secret)
+
+ #begin digest A
+ a = hash()
+ a.update(secret)
+ a.update(salt)
+ a.update(b_extend)
+
+ #for each bit in slen, add B or SECRET
+ value = len(secret)
+ while value > 0:
+ if value % 2:
+ a.update(b_result)
+ else:
+ a.update(secret)
+ value >>= 1
+
+ #finish A
+ a_result = a.digest()
+
+ #calc DP
+ dp = hash()
+ dp.update(secret * len(secret))
+ dp_result = extend(dp.digest(), secret)
+
+ #calc DS
+ ds = hash()
+ for i in xrange(0, 16+ord(a_result[0])):
+ ds.update(salt)
+ ds_result = extend(ds.digest(), salt) #aka 'S'
+
+ #calc digest C
+ last_result = a_result
+ for i in xrange(0, real_rounds):
+ c = hash()
+ if i % 2:
+ c.update(dp_result)
+ else:
+ c.update(last_result)
+ if i % 3:
+ c.update(ds_result)
+ if i % 7:
+ c.update(dp_result)
+ if i % 2:
+ c.update(last_result)
+ else:
+ c.update(dp_result)
+ last_result = c.digest()
+
+ #encode result using 256/512 specific func
+ out = self._encode(last_result)
+ assert len(out) == self._hash_size, "wrong length: %r" % (out,)
+ return UnixHash(self._key, salt, out, rounds=rounds)
+
+ def _sha_crypt(self, rounds, salt, secret):
+ rec = self._sha_crypt_raw(rounds, salt, secret)
+ if rec.rounds == -1:
+ return "$%s$%s$%s" % (rec.alg, rec.salt, rec.chk)
+ else:
+ return "$%s$rounds=%d$%s$%s" % (rec.alg, rec.rounds, rec.salt, rec.chk)
+
+ #=========================================================
+ #frontend helpers
+ #=========================================================
+ def identify(self, hash):
+ "identify bcrypt hash"
+ if hash is None:
+ return False
+ return self._pat.match(hash) is not None
+
+ def parse(self, hash):
+ "parse bcrypt hash"
+ m = self._pat.match(hash)
+ if not m:
+ raise ValueError, "invalid sha hash/salt"
+ alg, rounds, salt, chk = m.group("alg", "rounds", "salt", "chk")
+ if rounds is None:
+ rounds = -1 #indicate we're using the default mode
+ else:
+ rounds = int(rounds)
+ assert alg == self._key
+ return UnixHash(alg, salt, chk, rounds=rounds, source=hash)
+
+ def encrypt(self, secret, hash=None, rounds=None, keep_salt=False):
+ """encrypt using sha256/512-crypt.
+
+ In addition to the normal options that :meth:`CryptAlgorithm.encrypt` takes,
+ this function also accepts the following:
+
+ :param rounds:
+ Optionally specify the number of rounds to use.
+ This can be one of "fast", "medium", "slow",
+ or an integer in the range 1000...999999999.
+
+ See :attr:`CryptAlgorithm.has_named_rounds` for details
+ on the meaning of "fast", "medium" and "slow".
+ """
+ salt = None
+ if hash:
+ rec = self.parse(hash)
+ if keep_salt:
+ salt = rec.salt
+ if rounds is None:
+ rounds = rec.rounds
+ rounds = self._norm_rounds(rounds)
+ return self._sha_crypt(rounds, salt, secret)
+
+ def _norm_rounds(self, rounds):
+ if isinstance(rounds, int):
+ return rounds
+ elif rounds == "fast" or rounds is None:
+ return self.fast_rounds
+ elif rounds == "slow":
+ return self.slow_rounds
+ else:
+ if rounds != "medium":
+ log.warning("unknown rounds alias %r, using 'medium'", rounds)
+ return self.medium_rounds
+
+ def verify(self, secret, hash):
+ if hash is None:
+ return False
+ rec = self.parse(hash)
+ other = self._sha_crypt_raw(rec.rounds, rec.salt, secret)
+ return other.chk == rec.chk
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+class Sha256Crypt(_ShaCrypt):
+ """This class implements the SHA-256 Crypt Algorithm,
+ according to the specification at `<http://people.redhat.com/drepper/SHA-crypt.txt>`_.
+ It should be byte-compatible with unix shadow hashes beginning with ``$5$``.
+
+ See Sha512Crypt for usage examples and details.
+ """
+ #=========================================================
+ #algorithm info
+ #=========================================================
+ name='sha256-crypt'
+ hash_bits = 256
+
+ #=========================================================
+ #internals
+ #=========================================================
+ _hash = hashlib.sha256
+ _key = '5'
+ _chunk_size = 32
+ _hash_size = 43
+
+ def _encode(self, result):
+ out = ''
+ a, b, c = [0, 10, 20]
+ while a < 30:
+ out += _enc64b3(result, a, b, c)
+ a, b, c = c+1, a+1, b+1
+ assert a == 30, "loop to far: %r" % (a,)
+ out += _enc64b2(result, 31, 30)
+ return out
+
+ #=========================================================
+ #frontend
+ #=========================================================
+ _pat = re.compile(r"""
+ ^
+ \$(?P<alg>5)
+ (\$rounds=(?P<rounds>\d+))?
+ \$(?P<salt>[A-Za-z0-9./]+)
+ (\$(?P<chk>[A-Za-z0-9./]+))?
+ $
+ """, re.X)
+
+ #=========================================================
+ #eof
+ #=========================================================
+
+class Sha512Crypt(_ShaCrypt):
+ """This class implements the SHA-512 Crypt Algorithm,
+ according to the specification at `http://people.redhat.com/drepper/SHA-crypt.txt`_.
+ It should be byte-compatible with unix shadow hashes beginning with ``$6$``.
+
+ This implementation is based on a pure-python translation
+ of the original specification.
+
+ .. note::
+ This is *not* just the raw SHA-512 hash of the password,
+ which is sometimes incorrectly referred to as sha512-crypt.
+ This is a variable-round descendant of md5-crypt,
+ and is comparable in strength to bcrypt.
+
+ Usage Example::
+
+ >>> from bps.security.pwhash import Sha512Crypt
+ >>> crypt = Sha512Crypt()
+ >>> #to encrypt a new secret with this algorithm
+ >>> hash = crypt.encrypt("forget me not")
+ >>> hash
+ '$6$rounds=11949$KkBupsnnII6YXqgT$O8qAEcEgDyJlMC4UB3buST8vE1PsPPABA.0lQIUARTNnlLPZyBRVXAvqqynVByGRLTRMIorkcR0bsVQS5i3Xw1'
+ >>> #to verify an existing secret
+ >>> crypt.verify("forget me not", hash)
+ True
+ >>> crypt.verify("i forgot it", hash)
+ False
+
+ .. automethod:: encrypt
+ """
+ #=========================================================
+ #algorithm info
+ #=========================================================
+
+ name='sha512-crypt'
+ hash_bits = 512
+
+ #=========================================================
+ #internals
+ #=========================================================
+ _hash = hashlib.sha512
+ _key = '6'
+ _chunk_size = 64
+ _hash_size = 86
+
+ def _encode(self, result):
+ out = ''
+ a, b, c = [0, 21, 42]
+ while c < 63:
+ out += _enc64b3(result, a, b, c)
+ a, b, c = b+1, c+1, a+1
+ assert c == 63, "loop to far: %r" % (c,)
+ out += _enc64b1(result, 63)
+ return out
+
+ #=========================================================
+ #frontend
+ #=========================================================
+
+ _pat = re.compile(r"""
+ ^
+ \$(?P<alg>6)
+ (\$rounds=(?P<rounds>\d+))?
+ \$(?P<salt>[A-Za-z0-9./]+)
+ (\$(?P<chk>[A-Za-z0-9./]+))?
+ $
+ """, re.X)
+
+ #=========================================================
+ #eof
+ #=========================================================
+
+#=========================================================
+#OpenBSD's BCrypt
+#=========================================================
+class BCrypt(CryptAlgorithm):
+ """Implementation of OpenBSD's BCrypt algorithm.
+
+ BPS will use the py-bcrypt package if it is available,
+ otherwise it will fall back to a slower pure-python implementation
+ that is builtin.
+
+ .. automethod:: encrypt
+ """
+ #=========================================================
+ #algorithm info
+ #=========================================================
+ name = "bcrypt"
+ salt_bits = 128
+ hash_bits = 192
+ secret_chars = 55
+ has_rounds = True
+ has_named_rounds = True
+
+ #current recommended default rounds for blowfish
+ # last updated 2009-7-6 on a 2ghz system
+ fast_rounds = 11 # ~0.25s
+ medium_rounds = 13 # ~0.82s
+ slow_rounds = 14 # ~ 1.58s
+
+ #=========================================================
+ #frontend
+ #=========================================================
+ _pat = re.compile(r"""
+ ^
+ \$(?P<alg>2[a]?)
+ \$(?P<rounds>\d+)
+ \$(?P<salt>[A-Za-z0-9./]{22})
+ (?P<chk>[A-Za-z0-9./]{31})?
+ $
+ """, re.X)
+
+ def identify(self, hash):
+ "identify bcrypt hash"
+ if hash is None:
+ return False
+ return self._pat.match(hash) is not None
+
+ def parse(self, hash):
+ "parse bcrypt hash"
+ m = self._pat.match(hash)
+ if not m:
+ raise ValueError, "invalid bcrypt hash/salt"
+ alg, rounds, salt, chk = m.group("alg", "rounds", "salt", "chk")
+ return UnixHash(alg, salt, chk, rounds=int(rounds), source=hash)
+
+ def encrypt(self, secret, hash=None, keep_salt=False, rounds=None):
+ """encrypt using bcrypt.
+
+ In addition to the normal options that :meth:`CryptAlgorithm.encrypt` takes,
+ this function also accepts the following:
+
+ :param rounds:
+ Optionally specify the number of rounds to use
+ (technically, bcrypt will actually use ``2**rounds``).
+ This can be one of "fast", "medium", "slow",
+ or an integer in the range 4..31.
+
+ See :attr:`CryptAlgorithm.has_named_rounds` for details
+ on the meaning of "fast", "medium" and "slow".
+ """
+ #validate salt
+ if hash:
+ rec = self.parse(hash)
+ if rounds is None:
+ rounds = rec.rounds
+ #generate new salt
+ if hash and keep_salt:
+ salt = hash
+ else:
+ rounds = self._norm_rounds(rounds)
+ salt = bcrypt.gensalt(rounds)
+ #encrypt secret
+ return bcrypt.hashpw(secret, salt)
+
+ def _norm_rounds(self, rounds):
+ if isinstance(rounds, int):
+ return rounds
+ elif rounds == "fast" or rounds is None:
+ return self.fast_rounds
+ elif rounds == "slow":
+ return self.slow_rounds
+ else:
+ if rounds != "medium":
+ log.warning("unknown rounds alias %r, using 'medium'", rounds)
+ return self.medium_rounds
+
+ def verify(self, secret, hash):
+ "verify bcrypt hash"
+ return bcrypt.hashpw(secret, hash) == hash
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#
+#=========================================================
+class CryptContext(list):
+ """Helper for encrypting passwords using different algorithms.
+
+ Different storage contexts (eg: linux shadow files vs openbsd shadow files)
+ may use different sets and subsets of the available algorithms.
+ This class encapsulates such distinctions: it represents an ordered
+ list of algorithms, each with a unique name. It contains methods
+ to verify against existing algorithms in the context,
+ and still encrypt using new algorithms as they are added.
+
+ Because of all of this, it's basically just a list object.
+ However, it contains some dictionary-like features
+ such as looking up algorithms by name, and it's restriction
+ that no two algorithms in a list share the same name
+ causes it to act more like an "ordered set" than a list.
+
+ In general use, none of this matters.
+ The typical use case is as follows::
+
+ >>> from bps.security import pwhash
+ >>> #create a new context that only understands Md5Crypt & BCrypt
+ >>> myctx = pwhash.CryptContext([ pwhash.Md5Crypt, pwhash.BCrypt ])
+
+ >>> #the last one in the list will be used as the default for encrypting...
+ >>> hash1 = myctx.encrypt("too many secrets")
+ >>> hash1
+ '$2a$11$RvViwGZL./LkWfdGKTrgeO4khL/PDXKe0TayeVObQdoew7TFwhNFy'
+
+ >>> #choose algorithm explicitly
+ >>> hash2 = myctx.encrypt("too many secrets", alg="md5-crypt")
+ >>> hash2
+ '$1$E1g0/BY.$gS9XZ4W2Ea.U7jMueBRVA.'
+
+ >>> #verification will autodetect the right hash
+ >>> myctx.verify("too many secrets", hash1)
+ True
+ >>> myctx.verify("too many secrets", hash2)
+ True
+ >>> myctx.verify("too many socks", hash2)
+ False
+
+ >>> #you can also have it identify the algorithm in use
+ >>> myctx.identify(hash1)
+ 'bcrypt'
+ >>> #or just return the CryptAlgorithm instance directly
+ >>> myctx.identify(hash1, resolve=True)
+ <bps.security.pwhash.BCrypt object, name="bcrypt">
+
+ >>> #you can get a list of algs...
+ >>> myctx.keys()
+ [ 'md5-crypt', 'bcrypt' ]
+
+ >>> #and get the CryptAlgorithm object by name
+ >>> bc = myctx['bcrypt']
+ >>> bc
+ <bps.security.pwhash.BCrypt object, name="bcrypt">
+ """
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, source=None):
+ list.__init__(self)
+ if source:
+ self.extend(source)
+
+ #=========================================================
+ #wrapped list methods
+ #=========================================================
+
+ #---------------------------------------------------------
+ #misc
+ #---------------------------------------------------------
+ def __repr__(self):
+ return "%s(%s)" % (self.__class__.__name__, list.__repr__(self))
+
+ #---------------------------------------------------------
+ #readers
+ #---------------------------------------------------------
+ def keys(self):
+ "return list of names of all algorithms in context"
+ return [ alg.name for alg in self ]
+
+ def get(self, name, default=None):
+ return self.resolve(name) or default
+
+ def __getitem__(self, value):
+ "look up algorithm by index or by name"
+ if isinstance(value, str):
+ #look up by string
+ return self.must_resolve(value)
+ else:
+ #look up by index
+ return list.__getitem__(self, value)
+
+ def __contains__(self, value):
+ "check for algorithm's presence by name or instance"
+ return self.index(value) > -1
+
+ def index(self, value, start=None, stop=None):
+ """find location of algorithm by name or instance"""
+ if isinstance(value, str):
+ #hunt for element by alg name
+ for idx, crypt in enum_slice(self, start, stop):
+ if crypt.name == value:
+ return idx
+ return -1
+## elif isinstance(value, type):
+## #hunt for element by alg class
+## for idx, crypt in enum_slice(self, start, stop):
+## if isinstance(crypt, value):
+## return idx
+## return -1
+ else:
+ #else should be an alg instance
+ for idx, crypt in enum_slice(self, start, stop):
+ if crypt == value:
+ return idx
+ return -1
+
+ #---------------------------------------------------------
+ #adding
+ #---------------------------------------------------------
+ #XXX: prevent duplicates?
+
+ def _norm_alg(self, value):
+ "makes sure all elements of list are CryptAlgorithm instances"
+ if not is_crypt_alg(value):
+ raise ValueError, "value must be CryptAlgorithm class or instance: %r" % (value,)
+ if isinstance(value, type):
+ value = value()
+ if not value.name:
+ raise ValueError, "algorithm instance lacks name: %r" % (value,)
+ return value
+
+ def __setitem__(self, idx, value):
+ "override algorithm at specified location"
+ if idx < 0:
+ idx += len(self)
+ value = self._norm_alg(value)
+ old = self.index(value.name)
+ if old > -1 and old != idx:
+ raise KeyError, "algorithm named %r already present in context" % (value.name,)
+ list.__setitem__(self, idx, value)
+
+ def append(self, value):
+ "add another algorithm to end of list"
+ value = self._norm_alg(value)
+ if value.name in self:
+ raise KeyError, "algorithm named %r already present in context" % (value.name,)
+ list.append(self, value)
+
+ def insert(self, idx, value):
+ value = self._norm_alg(value)
+ if value.name in self:
+ raise KeyError, "algorithm named %r already present in context" % (value.name,)
+ list.insert(self, idx, value)
+
+ #---------------------------------------------------------
+ #composition
+ #---------------------------------------------------------
+ def __add__(self, other):
+ c = CryptContext()
+ c.extend(self)
+ c.extend(other)
+ return c
+
+ def __iadd__(self, other):
+ self.extend(other)
+ return self
+
+ def extend(self, values, include=None, exclude=None):
+ "add more algorithms from another list, optionally filtering by name"
+ if include:
+ values = (e for e in values if e.name in include)
+ if exclude:
+ values = (e for e in values if e.name not in exclude)
+ for value in values:
+ self.append(value)
+
+ #---------------------------------------------------------
+ #removing
+ #---------------------------------------------------------
+ def remove(self, value):
+ if isinstance(value, str):
+ value = self[value]
+ list.remove(self, value)
+
+ def discard(self, value):
+ if isinstance(value, str):
+ try:
+ self.remove(value)
+ return True
+ except KeyError:
+ return False
+ else:
+ try:
+ self.remove(value)
+ return True
+ except ValueError:
+ return False
+
+ #=========================================================
+ #CryptAlgorithm workalikes
+ #=========================================================
+ #TODO: recode default to be explicitly settable, not just using first one.
+ #TODO: simplify interface as much as possible.
+
+ def resolve(self, name=None, default=None):
+ """given an algorithm name, return CryptAlgorithm instance which manages it.
+ if no match is found, returns None.
+
+ resolve() without arguments will return default algorithm
+ """
+ if name is None:
+ #return default algorithm
+ if self:
+ return self[-1]
+ elif isseq(name):
+ #pick last hit from list of names
+ for elem in reversed(self):
+ if elem.name in name:
+ return elem
+ else:
+ #pick name
+ for elem in reversed(self):
+ if elem.name == name:
+ return elem
+ return default
+
+ def must_resolve(self, name):
+ "helper which raises error if alg can't be found"
+ crypt = self.resolve(name)
+ if crypt is None:
+ raise KeyError, "algorithm not found: %r" % (name,)
+ else:
+ return crypt
+
+ def identify(self, hash, resolve=False):
+ """Attempt to identify which algorithm hash belongs to w/in this context.
+
+ :arg hash:
+ The hash string to test.
+ :param resolve:
+ If ``True``, the actual algorithm object is returned.
+ If ``False`` (the default), only the name of the algorithm is returned.
+
+ All registered algorithms will be checked in from last to first,
+ and whichever one claims the hash first will be returned.
+
+ :returns:
+ The first algorithm instance that identifies the hash,
+ or ``None`` if none of the algorithms claims the hash.
+ """
+ if hash is None:
+ return None
+ for alg in reversed(self):
+ if alg.identify(hash):
+ if resolve:
+ return alg
+ else:
+ return alg.name
+ return None
+
+ def must_identify(self, hash, **kwds):
+ "helper which raises error if hash can't be identified"
+ alg = self.identify(hash, **kwds)
+ if alg is None:
+ raise ValueError, "hash could not be identified"
+ else:
+ return alg
+
+ def encrypt(self, secret, hash=None, alg=None, **kwds):
+ """encrypt secret, returning resulting hash.
+
+ :arg secret:
+ String containing the secret to encrypt
+
+ :arg hash:
+ Optional hash string previously returned by encrypt (or compatible source).
+ If specified, this string will be used to provide default
+ value for the salt, rounds, or other algorithm-specific options.
+
+ :param alg:
+ Optionally specify the name of the algorithm to use.
+ If no algorithm is specified, an attempt is made
+ to guess from the hash string. If no hash string
+ is specified, the last algorithm in the list is used.
+
+ :param **kwds:
+ All other keyword options are passed to the algorithm's encrypt method.
+ The two most common ones are "keep_salt" and "rounds".
+
+ :returns:
+ The secret as encoded by the specified algorithm and options.
+ """
+ if not self:
+ raise ValueError, "no algorithms registered"
+ if alg:
+ crypt = self.must_resolve(alg)
+ elif hash:
+ crypt = self.must_identify(hash, resolve=True)
+ else:
+ crypt = self[-1]
+ return crypt.encrypt(secret, hash, **kwds)
+
+ def verify(self, secret, hash, alg=None, **kwds):
+ """verify secret against specified hash
+
+ :arg secret:
+ the secret to encrypt
+ :arg hash:
+ hash string to compare to
+ :param alg:
+ optionally specify which algorithm(s) should be considered.
+ """
+ if not self:
+ raise ValueError, "no algorithms registered"
+ if hash is None: #for convience, so apps can pass in user_account.hash field w/o worrying if it was set
+ return False
+ if alg:
+ crypt = self.must_resolve(alg)
+ else:
+ crypt = self.must_identify(hash, resolve=True)
+ #NOTE: passing additional keywords for algorithms such as PostgresMd5Crypt
+ return crypt.verify(secret, hash, **kwds)
+
+ #=========================================================
+ #eof
+ #=========================================================
+
+def is_crypt_context(obj):
+ "check if obj following CryptContext protocol"
+ #NOTE: this isn't an exhaustive check of all required attrs,
+ #just a quick check of the most uniquely identifying ones
+ return all(hasattr(obj, name) for name in (
+ "resolve", "verify", "encrypt", "identify",
+ ))
+
+#=========================================================
+#build up the standard context objects
+#=========================================================
+
+#default context for quick use.. recognizes all known algorithms,
+# currently uses SHA-512 as default
+default_context = CryptContext([ UnixCrypt, Md5Crypt, BCrypt, Sha256Crypt, Sha512Crypt ])
+
+def identify(hash, resolve=False):
+ """Identify algorithm which generated a password hash.
+
+ :arg hash:
+ The hash string to identify.
+ :param resolve:
+ If ``True``, this function will return a :class:`CryptAlgorithm`
+ instance which can handle the hash.
+ If ``False`` (the default), then only the name of the hash algorithm
+ will be returned.
+
+ The following algorithms are currently recognized:
+
+ =================== ================================================
+ Name Description
+ ------------------- ------------------------------------------------
+ ``"unix-crypt"`` the historic unix-crypt algorithm
+
+ ``"md5-crypt"`` the md5-crypt algorithm, usually identified
+ by the prefix ``$1$`` in unix shadow files.
+
+ ``"bcrypt"`` the openbsd blowfish-crypt algorithm,
+ usually identified by the prefixes ``$2$`` or ``$2a$``
+ in unix shadow files.
+
+ ``"sha256-crypt"`` the 256-bit version of the sha-crypt algorithm,
+ usually identified by the prefix ``$5$``
+ in unix shadow files.
+
+ ``"sha512-crypt"`` the 512-bit version of the sha-crypt algorithm,
+ usually identified by the prefix ``$6$``
+ in unix shadow files.
+ =================== ================================================
+
+ :returns:
+ The name of the hash, or ``None`` if the hash could not be identified.
+ (The return may be altered by the *resolve* keyword).
+
+ .. note::
+ This is a convience wrapper for ``pwhash.default_context.identify(hash)``.
+ """
+ return default_context.identify(hash, resolve=resolve)
+
+def encrypt(secret, hash=None, alg=None, **kwds):
+ """Encrypt secret using a password hash algorithm.
+
+ :type secret: str
+ :arg secret:
+ String containing the secret to encrypt
+
+ :type hash: str|None
+ :arg hash:
+ Optional previously existing hash string which
+ will be used to provide default value for the salt, rounds,
+ or other algorithm-specific options.
+ If not specified, algorithm-chosen defaults will be used.
+
+ :type alg: str|None
+ :param alg:
+ Optionally specify the name of the algorithm to use.
+ If no algorithm is specified, an attempt is made
+ to guess from the hash string. If no hash string
+ is specified, sha512-crypt will be used.
+ See :func:`identify` for a list of algorithm names.
+
+ All other keywords are passed on to the specific password algorithm
+ being used to encrypt the secret.
+
+ :type keep_salt: bool
+ :param keep_salt:
+ This option is accepted by all of the builtin algorithms.
+
+ By default, a new salt value generated each time
+ a secret is encrypted. However, if this keyword
+ is set to ``True``, and a previous hash string is provided,
+ the salt from that string will be used instead.
+
+ .. note::
+ This is generally only useful when verifying an existing hash
+ (see :func:`verify`). Other than that, this option should be
+ avoided, as re-using a salt will needlessly decrease security.
+
+ :type rounds: int
+ :param rounds:
+ For the sha256-crypt and sha512-crypt algorithms,
+ this option lets you specify the number of rounds
+ of encryption to use. For the bcrypt algorithm,
+ this option lets you specify the log-base-2 of
+ the number of rounds of encryption to use.
+
+ For all three of these algorithms, you can either
+ specify a positive integer, or one of the strings
+ "fast", "medium", "slow" to choose a preset number
+ of rounds corresponding to an appropriate level
+ of encryption.
+
+ :returns:
+ The secret as encoded by the specified algorithm and options.
+ """
+ return default_context.encrypt(secret, hash=hash, alg=alg, **kwds)
+
+def verify(secret, hash, alg=None):
+ """verify a secret against an existing hash.
+
+ This checks if a secret matches against the one stored
+ inside the specified hash. By default this uses :func:`encrypt`
+ to re-crypt the secret, and compares it to the provided hash;
+ though some algorithms may implement this in a more efficient manner.
+
+ :type secret: str
+ :arg secret:
+ A string containing the secret to check.
+
+ :type hash: str
+ :param hash:
+ A string containing the hash to check against.
+
+ :type alg: str|None
+ :param alg:
+ Optionally specify the name of the algorithm to use.
+ If no algorithm is specified, an attempt is made
+ to guess from the hash string. If it can't be
+ identified, a ValueError will be raised.
+ See :func:`identify` for a list of algorithm names.
+
+ :returns:
+ ``True`` if the secret matches, otherwise ``False``.
+ """
+ return default_context.verify(secret, hash, alg=alg)
+
+#some general os-context helpers (these may not match your os policy exactly)
+linux_context = CryptContext([ UnixCrypt, Md5Crypt, Sha256Crypt, Sha512Crypt ])
+bsd_context = CryptContext([ UnixCrypt, Md5Crypt, BCrypt ])
+
+#some sql db context helpers
+mysql40_context = CryptContext([Mysql10Crypt])
+mysql_context = CryptContext([Mysql10Crypt, Mysql41Crypt])
+postgres_context = CryptContext([PostgresMd5Crypt])
+
+#=========================================================
+#deprecated function names
+#=========================================================
+from bps.warndep import relocated_function
+identify_secret = relocated_function("identify_secret", identify)
+encrypt_secret = relocated_function("encrypt_secret", encrypt)
+verify_secret = relocated_function("verify_secret", verify)
+
+#=========================================================
+# eof
+#=========================================================
diff --git a/bps/stream.py b/bps/stream.py
new file mode 100644
index 0000000..88fea4d
--- /dev/null
+++ b/bps/stream.py
@@ -0,0 +1,578 @@
+"""bps.stream -- stream and buffer related functions"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import os
+#local
+__all__ = [
+ #nb read
+ 'nb_read',
+ 'nb_readline_iter',
+ 'nb_readline_list',
+
+ #guesser
+ 'get_input_type', 'BT',
+]
+
+#=========================================================
+#misc
+#=========================================================
+def get_stream_size(stream, abs=False):
+ """return size of stream.
+
+ :param stream:
+ This must be a *seekable* stream object.
+ This function will return the size of the stream
+ by seeking to the end, recording the information,
+ and then restoring the original location in the stream.
+
+ :param abs:
+ If ``True``, the absolute size of the stream is reported.
+ If ``False`` (the default), the number of remaining bytes
+ relative to the current position is reported.
+
+ :returns:
+ Number of bytes in stream as an integer.
+ """
+ pos = stream.tell()
+ try:
+ stream.seek(0, 2) #seek to end
+ if abs:
+ return stream.tell()
+ else:
+ return stream.tell()-pos
+ finally:
+ stream.seek(pos)
+
+##def iter_stream_records(stream, size=None, count=None, notail=False, map=None):
+## """read a series of fixed-size records from stream,
+## returning them via an iterator.
+##
+## :param stream:
+## the stream to read from.
+## :param size:
+## size of the record in bytes.
+## :param count:
+## [Optional]
+## Exact number of records that should be read.
+## Stops when that many have been read.
+## If stream runs out before *count* records have been read, raises ValueError.
+## If count is not specified, will read to end of stream.
+##
+## :param notail:
+## If *count* is specified and this is ``True``,
+## a ValueError will be raised if any data is left in the stream
+## after reading off max records.
+##
+## :param map:
+## Optional mapping function to apply.
+## """
+## cur = 0
+## while True:
+## chunk = buffer.read(size)
+## if not chunk:
+## if count is not None:
+## raise ValueError, "too few records!"
+## return
+## if len(chunk) < size:
+## raise ValueError, "chunk unexpectedly too small"
+## assert len(chunk) == size
+## if map:
+## yield map(chunk)
+## else:
+## yield chunk
+## cur += 1
+## if count is not None and cur == count:
+## if notail:
+## #should we leave it in stream if possible?
+## #makes behavior unpredictable.
+#### if hasattr(buffer, "seek"):
+#### pos = buffer.tell()
+#### chunk = buffer.read()
+#### buffer.seek(pos, 0)
+#### else:
+## chunk = buffer.read()
+## if chunk:
+## raise ValueError, "unexpected data at end of buffer: %r" % chunk
+## return
+
+##def unpack_from_stream(fmt, stream):
+## """helper for quickly unpacking chunks from stream using struct module.
+##
+## :param fmt:
+## valid :mod:`struct` format string
+## :param stream:
+## stream to read from
+##
+## :returns:
+## unpacked array
+## """
+## size = struct.calcsize(fmt)
+## chunk = stream.read(size)
+## return struct.unpack(fmt, chunk)
+
+#=========================================================
+#
+#=========================================================
+#NOTE: this probably works, just not used or tested yet.
+##class unbuffered(object):
+## "wrapper around stream object which disables buffering"
+## buffered = False #flag so we can be called on something that's already unbuffered
+## raw_stream = None #stream we're wrapping
+##
+## def __new__(cls, stream):
+## if not getattr(stream, "buffered", True):
+## return stream
+## if not hasattr(stream, "flush"):
+## warn("can't disable buffering for stream: %r" % (stream,))
+## return stream
+## self = object.__init__(cls)
+## self.raw_stream = stream
+## if hasattr(stream, "writelines"):
+## self.writelines = self.__writelines
+## return self
+##
+## def write(self, arg):
+## retval = self.raw_stream.write(arg)
+## self.raw_stream.flush()
+## return retval
+##
+## def __writelines(self, arg):
+## retval = self.raw_stream.writelines(arg)
+## self.raw_stream.flush()
+## return retval
+##
+## def __getattr__(self, attr):
+## return getattr(self.raw_stream, attr)
+##
+## #TODO: needs dir() wrapper, maybe repr/str wrappers to, check Proxy object for source.
+
+#=========================================================
+#non-blocking pipe reader
+#=========================================================
+#NOTE: low level nb read code adapted from http://code.activestate.com/recipes/440554/
+
+def nb_read(stream, maxsize=-1):
+ """read bytes from a stream in non-blocking fashion.
+
+ This attempts to perform nonblocking read on a stream,
+ and do this uniformly across operating systems.
+
+ .. note::
+ Under windows, PeekNamedPipe is used,
+ which required the pywin32 package.
+ For other OSes, :mod:`fcntrl` is used.
+
+ :arg stream:
+ The stream to read from.
+ Currently only file() handles are supported,
+ but this may be enhanced in the future.
+
+ :param maxsize:
+ If ``-1``, the maximum available characters will be read.
+ Otherwise, up to *maxsize* characters will be returned.
+
+ :Returns:
+ String containing characters, with at length of at most *maxsize*.
+ The empty string is returned if no data is available.
+ """
+ if maxsize == 0:
+ return ''
+ if maxsize < -1:
+ raise ValueError, "maxsize must be -1, or > 0"
+ #TODO: check for none-filetype streams
+ return _nb_read(stream, maxsize)
+
+#---------------------------------------------------
+#windows version
+#---------------------------------------------------
+if os.name == "nt":
+ from win32file import ReadFile
+ from win32pipe import PeekNamedPipe
+ from msvcrt import get_osfhandle
+ import errno
+
+ def _nb_read(pipe, maxsize):
+ try:
+ x = get_osfhandle(pipe.fileno())
+ (read, avail, msg) = PeekNamedPipe(x, 0)
+ if maxsize != -1 and maxsize < avail:
+ avail = maxsize
+ if avail > 0:
+ (errnum, read) = ReadFile(x, avail, None)
+ return read
+ else:
+ return ''
+ except ValueError:
+ return '' #pipe is closed
+ except (subprocess.pywintypes.error, Exception), err:
+ if err[0] in (109, errno.ESHUTDOWN):
+ return '' #pipe is closed
+ raise
+
+#---------------------------------------------------
+#posix version
+#---------------------------------------------------
+else:
+ import fcntl
+ import select
+
+ def _nb_read(pipe, maxsize):
+ flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
+ if not pipe.closed:
+ fcntl.fcntl(pipe, fcntl.F_SETFL, flags| os.O_NONBLOCK)
+ try:
+ if not select.select([pipe], [], [], 0)[0]:
+ return ''
+ if maxsize == -1:
+ return pipe.read()
+ else:
+ return pipe.read(maxsize)
+ finally:
+ if not pipe.closed:
+ fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
+
+#---------------------------------------------------
+#TODO: other OSes
+#---------------------------------------------------
+
+#=========================================================
+#nb_read helpers
+#=========================================================
+def nb_readline_iter(stream, chop=False, chunk_size=256):
+ """generator which does non-blocking readline of stream,
+ taking care of assembling and returning only full lines.
+
+ :arg stream:
+ stream to read
+ :param chop:
+ whether to strip linefeeds from end
+ :param chunk_size:
+ how much to read at a time
+
+ :returns:
+ Calling this returns a generator bound to the stream,
+ which maintains an internal cache of bytes read.
+
+ When iterated over, it will read in all data available from the pipe.
+ It will then yield ``None`` if no complete line is available,
+ otherwise it will yield the next available line as a string.
+ If the stream closes, the last line returned may not have a linefeed at the end.
+
+ .. seealso::
+ :class:`nb_readline_list` which wraps this function
+ """
+ buf = ''
+ while True:
+ #check for next line in buffer
+ idx = buf.find("\n")+1 #FIXME: replace w/ bps.mime.utils's find_eol()
+ if idx > 0:
+ out, buf = buf[:idx], buf[idx:]
+ if chop:
+ out = out.rstrip()
+ yield out
+ continue
+ #try to populate the buffer
+ chunk = nb_read(stream, chunk_size)
+ if chunk:
+ buf += chunk
+ continue
+ #send last bit if stream is closed
+ if stream.closed:
+ if buf:
+ yield buf
+ return
+ #else yield exhausted signal
+ yield None
+
+class nb_readline_list(list):
+ """
+ List subclass which is designed to accumlate
+ parsed lines read (via :func:`nb_read`) from a stream.
+ Call this ``self.flush()`` method to load any more lines
+ that are available in the stream.
+
+ :param stream:
+ The stream to read from.
+
+ :param chop:
+ whether to strip linefeeds from end.
+
+ Usage example::
+
+ >>> from bps.stream import nb_readline_list
+ >>> fh = file("/home/elic/myfile.txt")
+ >>> lines = nb_readline_list(fh)
+ >>> lines
+ []
+ >>> lines.flush() #flush will append any more lines available on fh
+ True
+ >>> lines
+ ['line1\\n', 'line2\\n' ]
+ >>> # ... do stuff such as popping existing lines ...
+ >>> lines.pop(0)
+ 'line1\\n'
+ >>> # Now assume file was being written to concurrently,
+ >>> # next flush will append any more lines
+ >>> lines.flush()
+ True
+ >>> lines
+ [ 'line2\\n', 'line3\\n' ]
+
+ """
+ def __init__(self, stream, **kwds):
+ self.reader = nb_readline_iter(stream, **kwds)
+
+ def flush(self):
+ "flush any pending lines from stream into buffer. returns False if stream is closed"
+ if not self.reader: #stream closed during previous call
+ return False
+ for line in self.reader:
+ if line is None: #no more data for now
+ return True
+ self.append(line)
+ else: #stream closed itself
+ self.reader = None
+ return False
+
+#=========================================================
+#buffer funcs
+#=========================================================
+
+#XXX: rename to ByteSourceType?
+class BT:
+ """The common byte-source types.
+
+ This class defines three constants,
+ which represent the possible sources for a string of bytes.
+ This is mainly useful for functions which take in / return
+ bytes in various formats. The following constants
+ provide a useful standard for referring to these:
+
+ ============= ============ =======================================
+ Constant Name String Value Meaning
+ ------------- ------------ ---------------------------------------
+ ``BT.RAW`` ``"raw"`` The source is a raw string of bytes.
+ ``BT.STREAM`` ``"stream"`` The source is a file handle or
+ other stream-like object from which
+ bytes can be read.
+ ``BT.PATH`` ``"path"`` The source is string which points
+ to a path on the local filesystem.
+ ============= ============ =======================================
+
+ The constant ``BT.values`` is also available,
+ which is a list of all possible values.
+
+ .. seealso::
+ * :func:`get_input_type` which can autodetect the various byte-source types.
+ """
+ RAW = "raw"
+ STREAM = "stream"
+ PATH = "path"
+ values = (RAW, STREAM, PATH)
+
+ VALUES = values #deprecated alias
+
+BAD_CHARS = "\x00\x09\r\n"
+def get_input_type(source, source_type=None, max_path=512, bad_chars=None):
+ """This function detects whether the provided object
+ is a filepath, buffer, or a raw string. This allows many functions
+ to take in a data source without having to specify multiple variants
+ of the function to handle the different datatypes.
+
+ While buffers are easy to detect, the distinction between filepath & string
+ relies on a fuzzier set of criteria: it makes the assumption that any filepath
+ will never contain certain characters (null, cr, lf), while source data
+ is almost certain too (if this is untrue for a particular domain of source data,
+ this function will not be very much help).
+
+ :Parameters:
+ source
+ The source object to test
+ max_path
+ Maximum length that we should expect for a filepath.
+ bad_chars
+ String of characters that we should never expect to see
+ in a filepath. Setting this to ``"\x00"`` may allow
+ certain rare paths to be detected that would otherwise be skipped.
+ By default, this list includes NUL, TAB, CR, and LF.
+ source_type
+ Limits types to be considered. For example, if it is known
+ that the source must be either a filepath or buffer,
+ set this value to ``['path','stream']``, and the 'string'
+ option will be not considered. By default, all possibilities
+ (``['path', 'stream', 'raw']``) will be considered.
+
+ :Returns:
+ Returns one of the following strings:
+
+ 'path'
+ This source represents a path to a file.
+ :class:`bps.fs.FilePath` objects will be detects with
+ 100% reliability based on their attributes.
+ Otherwise, this option is guessed based on the string's contents.
+
+ 'stream'
+ This source is a stream (file, StringIO, etc).
+ These are detected with 100% reliability based on their attributes.
+
+ 'raw'
+ This source is a string containing raw data.
+ This will be used as the fallback choice.
+
+ ``None``
+ returned if ``source`` is None.
+
+ Otherwise a TypeError will be raised.
+
+ If the source is determined for certain,
+ but does not match one of the source types
+ allows by the *source_type* keyword,
+ a ValueError will be raised.
+
+ .. todo::
+ Document the BT enumerated type here.
+
+ Usage Example::
+
+ >>> from bps.stream import get_input_type
+ >>> get_input_type(r"c:\Documents and Settings\Administrator")
+ 'path'
+ >>> get_input_type(r"a\nb\nc")
+ 'raw'
+ >>> from cStringIO import StringIO
+ >>> buf = StringIO()
+ >>> get_input_type(buf)
+ 'stream'
+
+ """
+ if source is None:
+ return None
+ if isinstance(source_type, str):
+ return source_type
+ elif source_type is None:
+ source_type = BT.values
+ if bad_chars is None:
+ bad_chars = BAD_CHARS
+
+ #check for filepath objects [reliable]
+ if hasattr(source, "normpath"):
+ if BT.PATH not in source_type:
+ raise ValueError, "source appears to be a file path!"
+ return BT.PATH
+
+ #check for buffer [reliable]
+ if hasattr(source, "read"):
+ if BT.STREAM not in source_type:
+ raise ValueError, "source appears to be a stream!"
+ return BT.STREAM
+
+ #check size [this is just a guess, but reasonable]
+ if (len(source) == 0 or len(source) > max_path) and BT.RAW in source_type:
+ return BT.RAW
+
+ #look for chars we'll never see in a path [default set is pretty certain; and if bad_chars="\x00", near dead certain]
+ if any(char in source for char in bad_chars):
+ if BT.RAW not in source_type:
+ raise ValueError, "source appears to be a raw string!"
+ return BT.RAW
+
+ #assume it's a path [this is just a guess],
+ #since it appears to have all the right properties,
+ #and only small single-line non-binary strings could get here.
+ if BT.PATH in source_type:
+ return BT.PATH
+ elif BT.RAW in source_type: #path-type wasn't on the list, assume a string
+ return BT.RAW
+ else:
+ raise ValueError, "source appears to be a raw string or file path!"
+
+#=========================================================
+#get_input_type wrappers
+#=========================================================
+
+#useful but unused
+##def get_input_buffer(source, **kwds):
+## """helper using guess_input_type() which always returns a buffer, whether passed file, string, or buffer"""
+## #XXX: what if we want to open in text mode?
+## # we'll need to decode / adapt the buffer as well!
+## type = get_input_type(source, **kwds)
+## if type == BT.PATH:
+## return file(source, "rb")
+## elif type == BT.RAW:
+## return StringIO(source)
+## else:
+## assert type == BT.STREAM
+## return source
+
+#useful but unused
+##def open_input_buffer(source, **kwds):
+## "context-manager version of get_input_buffer"
+## type = guess_input_type(source, **kwds)
+## if type == BT.PATH:
+## return file(source, "rb")
+## elif type == BT.RAW:
+## buffer = StringIO(source)
+## else:
+## assert type == BT.STREAM
+## buffer = source
+## @contextmanager
+## def noop():
+## yield buffer
+## return noop()
+
+#might be useful, but untested, and may be incomplete
+##class autoflush(object):
+## """creates wrapped version of stream which auto-flushes after writes.
+##
+## Usage Example::
+##
+## >>> from bps.stream import autoflush
+## >>> f = file("out.txt","w")
+## >>> f2 = autoflush(f)
+## >>> f2.write("test\n") #will be automatically flushed.
+## """
+##
+## def __init__(self, stream):
+## self.__dict__['stream'] = stream
+##
+## if hasattr(stream, "write"):
+## def write(*a, **k):
+## ret = stream.write(*a, **k)
+## stream.flush()
+## return ret
+## self.__dict__['write'] = write
+##
+## if hasattr(stream, "writeln"):
+## def writeln(self, *a, **k):
+## ret = stream.writeln(*a, **k)
+## stream.flush()
+## return ret
+## self.__dict__['writeln'] = writeln
+##
+## def __getattr__(self, attr):
+## "proxy all attribute reads to the proxy target"
+## return getattr(self.stream, attr)
+##
+## def __setattr__(self, attr, value):
+## "proxy all attribute writes to the proxy target"
+## setattr(self.stream, attr, value)
+##
+## def __delattr__(self, attr):
+## "proxy all attribute deletes to the proxy target"
+## delattr(self.stream, attr)
+##
+## def __dir__(self):
+## "reports list of all of proxy object's attrs as well as target object's attributes (if any)"
+## attrs = set(dir(self.__class__))
+## attrs.update(self.__dict__)
+## attrs.update(dir(self.stream))
+## return sorted(attrs)
+##
+## def __repr__(self):
+## return "<autoflush: %r>" % (self.stream,)
+
+#=========================================================
+#EOC
+#=========================================================
diff --git a/bps/tests/__init__.py b/bps/tests/__init__.py
new file mode 100644
index 0000000..7cbcb8f
--- /dev/null
+++ b/bps/tests/__init__.py
@@ -0,0 +1,5 @@
+"""bps.tests -- unittests for BPS
+
+This package contains the unittests for BPS.
+The files names "test_xxx" roughly correspond to "bps.xxx" module tests.
+"""
diff --git a/bps/tests/_logs_parse_config_sample1.ini b/bps/tests/_logs_parse_config_sample1.ini
new file mode 100644
index 0000000..7571124
--- /dev/null
+++ b/bps/tests/_logs_parse_config_sample1.ini
@@ -0,0 +1,10 @@
+#sample logging config file used by test_logs_parse_config
+
+[logging:options]
+capture_warnings = True
+warning_fmt = %(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s
+reset_loggers = True
+not_an_option = ignored options should be ignored
+
+[logging:levels]
+<root> = WARNING
diff --git a/bps/tests/test_basic.py b/bps/tests/test_basic.py
new file mode 100644
index 0000000..8acdd19
--- /dev/null
+++ b/bps/tests/test_basic.py
@@ -0,0 +1,319 @@
+"""tests for bps.text -- (c) Assurance Technologies 2003-2009"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+from array import array
+import os.path
+#site
+#pkg
+from bps.basic import *
+from bps.meta import Params as ak
+#module
+from bps.tests.utils import TestCase
+#=========================================================
+#
+#=========================================================
+from bps.unstable import smart_list_iter
+class SmartListIterTest(TestCase):
+ def test_basic(self):
+ source=[5,6,100,2,3,40,8]
+ target=list(source)
+ out = []
+ itr = smart_list_iter(target)
+ for elem in itr:
+ out.append(elem)
+ self.assertEquals(out, source)
+ self.assertEquals(target, source)
+
+ def test_enum(self):
+ source=[5,6,100,2,3,40,8]
+ target=list(source)
+ out = []
+ itr = smart_list_iter(target, enum=True)
+ for elem in itr:
+ out.append(elem)
+ self.assertEquals(out, list(enumerate(source)))
+ self.assertEquals(target, source)
+
+ def test_delete(self):
+ source=[5,6,100,2,3,40,8]
+ target=list(source)
+ out = []
+ itr = smart_list_iter(target)
+ for elem in itr:
+ out.append(elem)
+ if elem > 30:
+ itr.delete()
+ self.assertEquals(out, source)
+ self.assertEquals(target, [5, 6, 2, 3, 8])
+
+ def test_double_delete(self):
+ source=[5,6,100,2,3,40,8]
+ target=list(source)
+ out = []
+ itr = smart_list_iter(target)
+ for elem in itr:
+ out.append(elem)
+ if elem > 30:
+ itr.delete()
+ itr.delete()
+ self.assertEquals(out, [5, 6, 100, 3, 40])
+ self.assertEquals(target, [5, 6, 3])
+
+ def test_insert(self):
+ source=[5,6,100,2,3,40,8]
+ target=list(source)
+ out = []
+ itr = smart_list_iter(target)
+ for elem in itr:
+ out.append(elem)
+ if elem == 6:
+ itr.delete()
+ itr.insert(0, 666)
+ elif elem == 100:
+ itr.delete()
+ itr.insert(0, 111, relative=True)
+ elif elem == 2:
+ itr.insert(2, 222)
+ itr.insert(2, 2221, relative=True)
+ elif elem == 3:
+ itr.insert(1, 333, relative=True)
+ self.assertEquals(out, [5, 6, 100, 111, 2, 3, 333, 2221, 40, 8])
+ self.assertEquals(target, [666, 5, 222, 111, 2, 3, 333, 2221, 40, 8])
+
+ def test_eol_delete(self):
+ source=[5,6,100,2,3,40,8]
+ target=list(source)
+ out = []
+ itr = smart_list_iter(target)
+ for elem in itr:
+ out.append(elem)
+ if elem == 8:
+ itr.delete()
+ self.assertRaises(IndexError, itr.delete)
+ self.assertEquals(out, source)
+ self.assertEquals(target, [5, 6, 100, 2, 3, 40])
+
+ #TODO: test various other insert/pop code
+ #TODO: test next_pos, __length__
+
+from bps.unstable import filter_in_place
+
+class FilterInPlaceTest(TestCase):
+
+ def test_list(self):
+ def ff(elem):
+ return elem in (3,9,7)
+
+ #test empty
+ a = []
+ filter_in_place(ff, a)
+ self.assertEquals(a,[])
+
+ #test all removed
+ a = [1,5,13,11]
+ filter_in_place(ff, a)
+ self.assertEquals(a,[])
+
+ #test none removed
+ a = [3,7,9]
+ filter_in_place(ff, a)
+ self.assertEquals(a,[3,7,9])
+
+ #test some removed
+ a = [1,3,5,7,9,11]
+ filter_in_place(ff, a)
+ self.assertEquals(a,[3,7,9])
+
+ #test some removed + invert
+ a = [1,3,5,7,9,11]
+ filter_in_place(ff, a, invert=True)
+ self.assertEquals(a,[1,5,11])
+
+ def test_array(self):
+ def ff(elem):
+ return elem in ("3","9","7")
+
+ #test empty
+ a = array('c')
+ filter_in_place(ff, a)
+ self.assertEquals(a.tostring(),'')
+
+ #test all removed
+ a = array('c','158')
+ filter_in_place(ff, a)
+ self.assertEquals(a.tostring(),'')
+
+ #test none removed
+ a = array('c','379')
+ filter_in_place(ff, a)
+ self.assertEquals(a.tostring(),'379')
+
+ #test some removed
+ a = array('c','135987')
+ filter_in_place(ff, a)
+ self.assertEquals(a.tostring(),'397')
+
+ #test some removed + invert
+ a = array('c','135987')
+ filter_in_place(ff, a, invert=True)
+ self.assertEquals(a.tostring(),'158')
+
+ def test_set(self):
+ def ff(elem):
+ return elem in (3,9,7)
+
+ #test empty
+ a = set()
+ filter_in_place(ff, a)
+ self.assertEquals(a,set())
+
+ #test all removed
+ a = set([1,5,13,11])
+ filter_in_place(ff, a)
+ self.assertEquals(a,set())
+
+ #test none removed
+ a = set([3,7,9])
+ filter_in_place(ff, a)
+ self.assertEquals(a,set([3,7,9]))
+
+ #test some removed
+ a = set([1,3,5,7,9,11])
+ filter_in_place(ff, a)
+ self.assertEquals(a,set([3,7,9]))
+
+ #test some removed + invert
+ a = set([1,3,5,7,9,11])
+ filter_in_place(ff, a, invert=True)
+ self.assertEquals(a,set([1,5,11]))
+
+ def test_dict(self):
+ def ff(elem):
+ return elem in (3,9,7)
+
+ #test empty
+ a = {}
+ filter_in_place(ff, a)
+ self.assertEquals(a,{})
+
+ #test all removed
+ a = {1:2, 5:6, 13:8, 11:12}
+ filter_in_place(ff, a)
+ self.assertEquals(a,{})
+
+ #test none removed
+ a = {3:2, 7:6, 9:8}
+ filter_in_place(ff, a)
+ self.assertEquals(a,{3:2, 7:6, 9:8})
+
+ #test some removed
+ a = {1:2, 5:6, 13:8, 11:12, 3:2, 7:6, 9:8}
+ filter_in_place(ff, a)
+ self.assertEquals(a,{3:2, 7:6, 9:8})
+
+ #test some removed + invert
+ a = {1:2, 5:6, 13:8, 11:12, 3:2, 7:6, 9:8}
+ filter_in_place(ff, a, invert=True)
+ self.assertEquals(a,{1:2, 5:6, 13:8, 11:12})
+
+ def test_bad_types(self):
+ def ff(elem):
+ return elem in (3,9,7)
+ a = [1,3,6]
+ self.assertRaises(TypeError, filter_in_place, ff, iter(a))
+ self.assertRaises(TypeError, filter_in_place, ff, (1,3,5,7))
+ self.assertRaises(TypeError, filter_in_place, ff, "1357")
+ self.assertRaises(TypeError, filter_in_place, ff, frozenset((1,3,5,7)))
+
+class MiscTest(TestCase):
+ intersect_cases = [
+ ([],[],False),
+ ([],[1],False),
+ ([1],[2],False),
+ ([1,3,5],[2,4,6],False),
+ ([1,3,7],[2,3,1],True),
+ ([1,2,3],[3,2,1],True),
+ ([1,3,3],[2,3,4],True),
+ ]
+ intersect_classes = [ list, tuple, set, frozenset ]
+
+ def test_intersects(self):
+ "test intersects() helper"
+ from bps.basic import intersects
+ for a,b,real in self.intersect_cases:
+ for ac in self.intersect_classes:
+ ao = ac(a)
+ for bc in self.intersect_classes:
+ bo = bc(b)
+ result = intersects(ao,bo)
+ self.assertEquals(result, real, "intersects(%r, %r):" % (ao,bo))
+
+ def test_enum_slice(self):
+ from bps.basic import enum_slice
+ def func(*a, **k):
+ return list(enum_slice(*a, **k))
+ self.check_function_results(func, [
+ #without arguments
+ ak([],''),
+ ak([(0, 'a'), (1, 'b'), (2, 'c'), (3, 'd'), (4, 'e'), (5, 'f')],
+ 'abcdef'),
+
+ #with stop
+ ak([],
+ 'abcdef', 0),
+ ak([(0, 'a')],
+ 'abcdef', 1),
+ ak([(0, 'a'), (1, 'b')],
+ 'abcdef', 2),
+ ak([(0, 'a'), (1, 'b'), (2, 'c'), (3, 'd')],
+ 'abcdef', -2),
+
+ #with start+no stop
+ ak([(0, 'a'), (1, 'b'), (2, 'c'), (3, 'd'), (4, 'e'), (5, 'f')],
+ 'abcdef', None, None),
+ ak([(0, 'a'), (1, 'b'), (2, 'c'), (3, 'd'), (4, 'e'), (5, 'f')],
+ 'abcdef', 0, None),
+ ak([(1, 'b'), (2, 'c'), (3, 'd'), (4, 'e'), (5, 'f')],
+ 'abcdef', 1, None),
+ ak([(2, 'c'), (3, 'd'), (4, 'e'), (5, 'f')],
+ 'abcdef', 2, None),
+ ak([(4, 'e'), (5, 'f')],
+ 'abcdef', -2, None),
+
+ #with start+stop
+ ak([(0, 'a'), (1, 'b'), (2, 'c'), (3, 'd'), (4, 'e')],
+ 'abcdef', 0, -1),
+ ak([(1, 'b'), (2, 'c'), (3, 'd')],
+ 'abcdef', 1, 4),
+ ak([(2, 'c'), (3, 'd'), (4, 'e')],
+ 'abcdef', 2, 5),
+ ak([(2, 'c'), (3, 'd')],
+ 'abcdef', -4, 4),
+
+ #with postive step
+ ak([(1, 'b'), (3, 'd'), (5, 'f')],
+ 'abcdef', 1, None, 2),
+
+ #with negative step
+ ak([],
+ 'abcdef', 2, -1, -1),
+ ak([],
+ 'abcdef', 2, 3, -1),
+ ak([(2, 'c'), (1, 'b'), (0, 'a')],
+ 'abcdef', 2, None, -1),
+ ak([(4, 'e'), (3, 'd'), (2, 'c'), (1, 'b'), (0, 'a')],
+ 'abcdef', 4, None, -1),
+ ak([(4, 'e'), (3, 'd'), (2, 'c'), (1, 'b')],
+ 'abcdef', 4, 0, -1),
+ ak([(4, 'e'), (2, 'c'), (0, 'a')],
+ 'abcdef', 4, None, -2),
+ ak([(4, 'e'), (2, 'c')],
+ 'abcdef', 4, 0, -2),
+ ])
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_fs.py b/bps/tests/test_fs.py
new file mode 100755
index 0000000..85120b3
--- /dev/null
+++ b/bps/tests/test_fs.py
@@ -0,0 +1,1321 @@
+"""bps3.fs unittest script"""
+
+#TODO: test filesystem property methods
+#TODO: test file manipulation methods
+#TODO: test dir manipulation methods
+#TODO: test symlink manipulation methods
+#TODO: test expand() method & shortcut functions
+
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import time
+import os.path
+import stat
+from functools import partial
+SEP = os.path.sep
+PAR = os.path.pardir
+CUR = os.path.curdir
+#package
+from bps.error import types as errors
+from bps.fs import filepath, FilePath, is_filepath, os_has_symlinks, parse_mode_mask, repr_mode_mask, chmod
+from bps.tests.utils import TestCase, get_tmp_path, ak
+#local
+if SEP == "/":
+ def local(path):
+ return path
+else:
+ def local(path):
+ return path.replace("/", SEP)
+
+#=========================================================
+#filepath function
+#=========================================================
+class ConstructorTest(TestCase):
+ "test filepath() constructor"
+ def test_none(self):
+ "test None behavior"
+ self.assertIs(filepath(None), None)
+
+ various_data = [
+ #arg1 [ arg2 ... ] result
+
+ #single element
+ ("", ""),
+ ("a", "a"),
+ (CUR, CUR),
+ (PAR, PAR),
+
+ #multiple element
+ ("a", "a", "a" + SEP + "a"),
+ ("a", "b", "c", "a" + SEP + "b" + SEP + "c"),
+
+ #elements containing seps
+ ("a" + SEP + "b", "c", "a" + SEP + "b" + SEP + "c"),
+ ("a" + SEP + "b" + SEP, "c", "a" + SEP + "b" + SEP + "c"),
+
+ ]
+
+ def test_various(self):
+ "test assorted inputs"
+ for row in self.various_data:
+ path = filepath(*row[:-1])
+ self.assert_(isinstance(path, FilePath))
+ self.assert_(is_filepath(path))
+ self.assertEqual(path, row[-1])
+
+ def test_duplicate(self):
+ "test filepath caching"
+ for row in self.various_data:
+ self.assertIs(filepath(*row[:-1]), filepath(*row[:-1]))
+
+#=========================================================
+#test path composition
+#=========================================================
+
+#we need somewhere to use as a base for the abspath tests
+cwd = os.path.abspath(os.getcwd())
+if os.name == "nt":
+ cwdcan = os.path.normcase(cwd)
+ abase = "c:\\dev"
+else:
+ cwdcan = os.path.realpath(cwd)
+ abase = "/testing"
+assert os.path.isdir(cwd)
+assert not cwd.endswith(SEP)
+
+class PathTest(TestCase):
+ "test filesystem-independant FilePath methods"
+
+ #=========================================================
+ #test filepath component attrs
+ #=========================================================
+ component_data = [
+ # path = dir + name = root + ext
+ # name = base + ext
+
+ #paths w/o dir part
+ dict(source=["a"], dir="",
+ name="a", base="a", ext="", root="a"),
+ dict(source=["a.jpg"], dir="",
+ name="a.jpg", base="a", ext=".jpg", root="a"),
+ dict(source=[".private"], dir="",
+ name=".private", base=".private", ext="", root=".private"),
+ dict(source=[".private.png"],
+ dir="", name=".private.png", base=".private", ext=".png", root=".private"),
+
+ #paths w/ dir part
+ dict(source=["aaa","bbb",".private.png"],
+ path="aaa" + SEP + "bbb" + SEP + ".private.png",
+ dir="aaa" + SEP + "bbb",
+ name=".private.png",
+ base="aaa" + SEP + "bbb" + SEP + ".private",
+ ext=".png",
+ root=".private"),
+ ]
+
+ def test_components(self):
+ "test parsing of path components"
+ self._run_attr_test(self.component_data)
+
+ #=========================================================
+ #test normpath & parentpath
+ #=========================================================
+ np_data = [
+ dict(source=['aaa', 'bbb', '.private.png'],
+ parentpath="aaa" + SEP + "bbb",
+ normpath="aaa" + SEP + "bbb" + SEP + ".private.png",
+ ),
+
+ dict(source=[PAR, 'aaa', 'bbb'],
+ parentpath=PAR + SEP + 'aaa',
+ normpath=PAR + SEP + 'aaa' + SEP + 'bbb'
+ ),
+
+ dict(source=['aaa', PAR, 'bbb'],
+ parentpath=CUR,
+ normpath='bbb',
+ ),
+
+ dict(source=[''], #empty path - poorly defined what this is.
+ #can't open it as a file anywhere.
+ #windows treats it as a dir(can list, norm=CUR) ,
+ #unix leaves it alone (norm='', no list or open).
+ #until overriding reason is found to force one behavior,
+ #just going w/ os-specific here
+ parentpath=CUR,
+ normpath=CUR,
+ dir='', name='', ext='', root='', base='',
+ ),
+
+ dict(source=[CUR], #cur dir - should return parent dir
+ parentpath=PAR,
+ normpath=CUR,
+ dir='', name=CUR, ext='', root=CUR, base=CUR,
+ ),
+
+ dict(source=[PAR], #par dir - should return parent / parent
+ parentpath=PAR + SEP + PAR,
+ normpath=PAR,
+ dir='', name=PAR, ext='', root=PAR, base=PAR,
+ ),
+
+ dict(source=[PAR, 'xxx'], #par dir - should return parent / parent
+ parentpath=PAR,
+ normpath=PAR + SEP + 'xxx',
+ dir=PAR, name='xxx', ext='', root='xxx', base=PAR + SEP + 'xxx',
+ ),
+ ]
+
+ def test_np(self):
+ "test normpath & parentpath"
+ self._run_attr_test(self.np_data)
+
+ def test_contained_in_path(self):
+ "test filepath.contained_in_path()"
+ source = filepath("x", "y")
+ target = source / "z"
+ self.assert_(target.contained_in_path(source))
+ self.assert_(target.contained_in_path(source, strict=True))
+
+ self.assert_(target.contained_in_path(target))
+ self.assert_(not target.contained_in_path(target, strict=True))
+
+ self.assert_(not source.contained_in_path(target))
+ self.assert_(not source.contained_in_path(target, strict=True))
+
+ #=========================================================
+ #test derived path attrs
+ #=========================================================
+ derived_data = [
+ dict(source=["a"],
+ normpath="a",
+ abspath=cwd + SEP + "a",
+ parentpath=CUR,
+ canonpath=cwdcan + SEP + "a",
+ ),
+
+ #CUR/PAR combinations
+ dict(source="", normpath=CUR, parentpath=CUR, abspath=cwd), #XXX: this treats "" like a file. is that right?
+ dict(source=[CUR], normpath=CUR, parentpath=PAR),
+ dict(source=[PAR], normpath=PAR, parentpath=PAR + SEP + PAR),
+ dict(source=[PAR, CUR], normpath=PAR, parentpath=PAR + SEP + PAR),
+ ]
+ def test_derived(self):
+ "test derived paths"
+ os.chdir(cwd) #so abspath will work
+ self._run_attr_test(self.derived_data)
+
+ gap_data = [
+ ("a", None, cwd + SEP + "a"),
+ ("a", cwd, cwd + SEP + "a"),
+ ("a", "xxx", cwd + SEP + 'xxx' + SEP + "a"),
+ (abase, None, abase),
+ (abase, "xxx", abase),
+ ]
+
+ def test_getabspath(self):
+ for source, start, value in self.gap_data:
+ result = filepath(source).getabspath(start)
+ self.assertEqual(result, value, "source=%r start=%r:" % (source, start))
+
+ def test_getrelpath(self):
+ "test filepath.getrelpath()"
+ for path, start, result in [
+ (['a', 'b'], ['a'], ['b']),
+ (['a', 'b'], ['a', 'd'], [PAR, 'b']),
+ (['a', 'b', 'c'], ['a', 'd', 'e'], [PAR, PAR, 'b', 'c']),
+ (['a', 'b', 'c'], ['x', 'y', 'z'], [PAR, PAR, PAR, 'a', 'b', 'c']),
+ (['a', 'b', 'c'], ['x', 'y', 'z', 'q'], [PAR, PAR, PAR, PAR, 'a', 'b', 'c']),
+ ]:
+
+ path = SEP.join(path)
+ start = SEP.join(start)
+ result = SEP.join(result)
+ out = filepath(path).getrelpath(start)
+ print [path, start, out, result]
+ self.assertEqual(out, result, "path=%r start=%r:"% (path, start))
+
+ def test_samepath(self):
+ #TODO: write real test for samepath
+ filepath(cwd).samepath(cwd)
+ filepath(cwd).samepath(cwd + SEP + "xxx")
+
+ #=========================================================
+ #expand tests
+ #=========================================================
+ def test_expand(self):
+ #TODO: write real test for expand
+
+ path = filepath(cwd)
+ #just make sure this doesn't throw errors for now
+ new = path.expand(all=True)
+
+ #=========================================================
+ #test string & joining operators
+ #=========================================================
+ def test_stringop(self):
+ "test string operations"
+ ax = "ax"
+ ab = "a" + SEP + "b"
+ abx = "a" + SEP + "bx"
+
+ #test __add__
+ self.assert_(isinstance(filepath("a") + "x", FilePath))
+ self.assertEqual(filepath("a") + "x", ax)
+
+ self.assertEqual(filepath("a") / "b" + "x", abx)
+
+ #test __radd__
+ self.assert_(isinstance("x" + filepath("a"), FilePath))
+ self.assertEqual("x" + filepath("a"), "xa")
+
+ def test_joiners(self):
+ "test div and joinXXX operations"
+ ab = "a" + SEP + "b"
+ abc = "a" + SEP + "b" + SEP + "c"
+
+ #test __div__
+ self.assertEqual(filepath("a") / "b", ab)
+ self.assertEqual(filepath("a") / ("b", "c"), abc)
+
+ #test __div__ w/ strings & filepaths mixed
+ self.assertEqual(filepath("a") / filepath("b"), ab)
+ self.assertEqual(filepath("a") / (filepath("b"), "c"), abc)
+
+ #test chained div
+ self.assertEqual(filepath("a") / filepath("b") / filepath("c"), abc)
+ self.assertEqual(filepath("a") / "b" / filepath("c"), abc)
+
+ #test joinsep .. since it's called by __div__, don't need much separate testing.
+ self.assertEqual(filepath("a").joinsep("b"), ab)
+ self.assertEqual(filepath("a").joinsep("b", "c"), abc)
+
+ #=========================================================
+ #helpers
+ #=========================================================
+
+ def _run_attr_test(self, data):
+ "helper for test_derived / test_components etc"
+ for row in data:
+ source = row['source']
+ if isinstance(source, (list, tuple)):
+ path = filepath(*source)
+ else:
+ path = filepath(source)
+ for key in row:
+ if key == "path":
+ self.assertEqual(path, row[key])
+ elif key != "source":
+ value = getattr(path, key)
+ real = row[key]
+ self.assertEqual(value, real, "source=%r attr=%r ... got=%r expected=%r" % (source, key, value, real))
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#fs interaction
+#=========================================================
+class _InteractionTest(TestCase):
+ "base class for fs interaction tests"
+ #=========================================================
+ #setup/teardown - create a tmp path for each test to use if needed
+ #=========================================================
+ def setUp(self):
+ self._paths = []
+
+ def tearDown(self):
+ for path in self._paths:
+ path.discard()
+
+ #=========================================================
+ #path creation funcs
+ #=========================================================
+ def create_path(self):
+ """create new path whose parent dir exists, but it doesn't.
+ automatically cleaned up after test completes"""
+ path = get_tmp_path()
+ assert not path.lexists
+ assert path.parentpath.isdir
+ self._paths.append(path)
+ return path
+
+ def create_noparent(self):
+ "return path whose parent doesn't exist"
+ base = self.create_path()
+ self.assert_(base.ismissing)
+ path = base / "notapath"
+ self.assert_(path.ismissing)
+ return path
+
+ def create_missing(self):
+ "return path whose parentdir exists, but path doesn't"
+ path = self.create_path()
+ self.assert_(path.parentpath.isdir)
+ return path
+
+ def create_file(self, content="qwerty"):
+ "return path which is a file"
+ path = self.create_path()
+ path.set(content)
+ self.assert_(path.isfile and path.get() == content)
+ return path
+
+ def create_parentfile(self):
+ "return path whose parent is a file"
+ base = self.create_file()
+ path = base / "notpath"
+ self.assert_(path.ismissing)
+ return path
+
+ def create_dir(self):
+ "create path which is a dir"
+ path = self.create_path()
+ path.mkdir()
+ self.assert_(path.isdir)
+ return path
+
+ #=========================================================
+ #preset creation & testing funcs (used by move/copy)
+ #=========================================================
+ def create_file_style1(self):
+ return self.create_file("qwerty")
+
+ def check_file_style1(self, path):
+ self.check_file(path, "qwerty", 1)
+
+ def create_dir_style1(self):
+ "create path which is a dir and has content (1 file and 1 dir)"
+ # path/
+ # test.file: qwerty
+ # test.dir/
+ # test.txt: hello world
+ # test.link> test.txt
+ # broken.link> ../notafile.notthere
+ path = self.create_path()
+ path.mkdir()
+ self.assert_(path.isdir)
+ (path / "test.file").set("qwerty")
+ (path / "test.dir").mkdir()
+ (path / "test.dir" / "test.txt").set("hello world")
+ if os_has_symlinks:
+ (path / "test.dir" / "test.link").mklink("../test.file")
+ (path / "test.dir" / "broken.link").mklink("../notafile.notthere")
+ return path
+
+ def check_dir_style1(self, path, copy_symlinks=True):
+ self.check_dir(path, ['test.file', 'test.dir'])
+ self.check_file(path / "test.file", 'qwerty')
+ if os_has_symlinks:
+ if copy_symlinks:
+ self.check_dir(path / 'test.dir', ['test.txt', 'test.link', 'broken.link'])
+ self.check_file_link(path / 'test.dir' / 'test.link', '../test.file', 'qwerty')
+ self.check_link(path / "test.dir" / "broken.link", "../notafile.notthere", broken=True)
+ else:
+ self.check_dir(path / 'test.dir', ['test.txt', 'test.link'])
+ self.check_file(path / 'test.dir' / 'test.link', 'qwerty')
+ else:
+ self.check_dir(path / 'test.dir', ['test.txt'])
+ self.check_file(path / 'test.dir' / 'test.txt', 'hello world')
+
+ #=========================================================
+ #check funcs - verify properties are reported correctly
+ # for various types of files
+ #=========================================================
+
+ #-----------------------------------------------
+ #missing
+ #-----------------------------------------------
+ def check_missing(self, path, link=False):
+ "check missing path properties"
+ self.assertEqual(path.filetype, "missing")
+ self.assertEqual(path.lfiletype, "link" if link else "missing")
+ self.assertEqual(path.exists, False)
+ self.assertEqual(path.lexists, link)
+ self.assertEqual(path.isdir, False)
+ self.assertEqual(path.isfile, False)
+ self.assertEqual(path.islink, link)
+ self.assertEqual(path.ismissing, True)
+ self.assertEqual(path.ismount, False)
+ self.assertAttrRaises(errors.MissingPathError, path, "atime")
+ self.assertAttrRaises(errors.MissingPathError, path, "ctime")
+ self.assertAttrRaises(errors.MissingPathError, path, "mtime")
+ self.assertAttrRaises(errors.MissingPathError, path, "size")
+ self.assertAttrRaises(errors.MissingPathError, path, "linecount")
+ self.assertAttrRaises(errors.MissingPathError, path, "dircount")
+
+ #-----------------------------------------------
+ #dir
+ #-----------------------------------------------
+ def check_empty_dir(self, path, **kwds):
+ self.check_dir(path, [], **kwds)
+
+ def check_dir(self, path, content=None, created=None, link=False):
+ "check various dir-readers, given directory path and (correct) contents"
+
+ #check filetypes
+ self.assertEqual(path.filetype, "dir")
+ self.assertEqual(path.lfiletype, "link" if link else "dir")
+ self.assertEqual(path.exists, True)
+ self.assertEqual(path.lexists, True)
+ self.assertEqual(path.isdir, True)
+ self.assertEqual(path.isfile, False)
+ self.assertEqual(path.islink, link)
+ self.assertEqual(path.ismissing, False)
+ self.assertEqual(path.ismount, False)
+
+ #check times
+ path.atime #just make sure it works, can't be sure it'll be honored
+ path.ctime
+ path.mtime
+ if created:
+ tick_start, tick_stop = created
+ self.assert_(tick_start <= path.ctime <= tick_stop)
+ self.assert_(tick_start <= path.mtime <= tick_stop)
+
+ #check relative listdir
+ if content is not None:
+ self.assertElementsEqual(path.listdir(), content)
+ self.assertElementsEqual(list(path.iterdir()), content)
+
+ #check full listdir
+ if content is not None:
+ content = [ path / elem for elem in content ]
+ self.assertElementsEqual(path.listdir(full=True), content)
+ self.assertElementsEqual(list(path.iterdir(full=True)), content)
+
+ #check other
+ path.size #what should this report for various OSes?
+ if content is not None:
+ self.assertEqual(path.dircount, len(content))
+ else:
+ self.assert_(path.dircount >= 0)
+ path.linecount #NOTE: in future, should raise ExpectedFileError
+
+ #-----------------------------------------------
+ #file
+ #-----------------------------------------------
+ def check_file(self, path, content=None, lines=None, created=None, link=False):
+ "check file properties"
+
+ #check filetypes
+ self.assertEqual(path.filetype, "file")
+ self.assertEqual(path.lfiletype, "link" if link else "file")
+ self.assertEqual(path.exists, True)
+ self.assertEqual(path.lexists, True)
+ self.assertEqual(path.isdir, False)
+ self.assertEqual(path.isfile, True)
+ self.assertEqual(path.islink, link)
+ self.assertEqual(path.ismissing, False)
+ self.assertEqual(path.ismount, False)
+
+ #check times
+ path.atime #just make sure it works, can't be sure it'll be honored
+ path.ctime
+ path.mtime
+ if created:
+ tick_start, tick_stop = created
+ self.assert_(tick_start <= path.ctime <= tick_stop)
+ self.assert_(tick_start <= path.mtime <= tick_stop)
+
+ #check other
+ if content is None:
+ self.assert_(path.size >= 0)
+ else:
+ self.assertEqual(path.size, len(content))
+ self.assertAttrRaises(errors.ExpectedDirError, path, "dircount")
+ if lines is not None:
+ self.assertEqual(path.linecount, lines)
+ else:
+ self.assert_(path.linecount >= 0)
+
+ #check content
+ if content is not None:
+ self.assertEqual(path.get(), content)
+
+ #-----------------------------------------------
+ #links
+ #-----------------------------------------------
+ def check_link(self, path, target=None, broken=None):
+ self.assert_(path.lexists)
+ self.assertEqual(path.lfiletype, "link")
+ if broken:
+ self.assertEqual(path.filetype, "missing")
+ self.assert_(path.ismissing)
+ elif broken is False:
+ self.assertNotEqual(path.filetype, "missing")
+ self.assert_(path.exists)
+
+ if broken is False:
+ path.atime
+ path.ctime
+ path.mtime
+ if path.isdir:
+ path.dircount
+ elif path.isfile:
+ path.linecount
+
+ if target is not None:
+ self.assertEqual(path.ltarget, target)
+
+ def check_file_link(self, path, target, content=None, broken=None):
+ self.check_link(path, target, broken=broken)
+ self.check_file(path, content, link=True)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#TODO: test filepath.walk() - esp followlinks behavior (since it's not available under py25)
+#TODO: test filepath.mode, filepath.modestr
+
+class _MoveCopyTest(_InteractionTest):
+ "common tests used by both move_to and copy_to"
+ copy = False
+ copy_symlinks = True
+
+ def gf(self, path):
+ if self.copy:
+ if not self.copy_symlinks:
+ return partial(path.copy_to, followlinks=True)
+ else:
+ return path.copy_to
+ else:
+ return path.move_to
+
+ def check_output_dir_style1(self, *a, **k):
+ if not self.copy_symlinks:
+ k['copy_symlinks'] = False
+ return self.check_dir_style1(*a, **k)
+
+ #=========================================================
+ #test boundary cases
+ #=========================================================
+ def test_bad_mode(self):
+ "test invalid mode value"
+ source = self.create_file()
+ target = self.create_path()
+ self.assertRaises(ValueError, self.gf(source), target, mode="not a mode")
+
+
+ #=========================================================
+ #test source/target errors common to all modes
+ #=========================================================
+ def test_source_missing(self):
+ "test handling of missing source, for all target types and move modes"
+ source = self.create_missing()
+ for target in (self.create_noparent(), self.create_missing(), self.create_file(), self.create_dir()):
+ for mode in ("exact", "child", "smart"):
+ self.assertRaises(errors.MissingPathError, self.gf(source), target, mode=mode)
+
+ def test_target_noparent(self):
+ "test handling of target's parent dir being missing, for all source types and move modes"
+ target = self.create_noparent()
+ for source in (self.create_file(), self.create_dir(), self.create_dir_style1()):
+ for mode in ("exact", "child", "smart"):
+ self.assertRaises(errors.MissingPathError, self.gf(source), target, mode=mode)
+
+ def test_target_file(self):
+ "test handling of file located as target, for all source types and move modes (except child)"
+ for mode in ("exact", "smart"):
+ for source in (self.create_file(), self.create_dir(), self.create_dir_style1()):
+ target = self.create_file()
+ #move should raise error
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode=mode,
+ __msg__="source=%r mode=%r:" % (source.filetype, mode))
+ #but this one should be successful
+ st = source.filetype
+ self.gf(source)(target, mode=mode, force=True)
+ if self.copy:
+ self.assertEqual(source.filetype, st)
+ else:
+ self.assertEqual(source.filetype, "missing")
+ self.assertEqual(target.filetype, st)
+
+ def test_child_target_file(self):
+ "test handling of file located as target, for child mode"
+ for source in (self.create_file(), self.create_dir(), self.create_dir_style1()):
+ target = self.create_file()
+ self.assertRaises(errors.ExpectedDirError, self.gf(source), target, mode="child",
+ __msg__="source=%r:" % (source.filetype, ))
+
+ #=========================================================
+ #exact mode
+ #=========================================================
+ #source: file, (full) dir
+ #target: missing, dir
+
+ def test_exact_file_missing(self):
+ source = self.create_file_style1()
+ target = self.create_missing()
+
+ self.gf(source)(target)
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(target)
+
+ def test_exact_file_dir(self):
+ source = self.create_file_style1()
+ target = self.create_dir()
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target)
+ self.check_file_style1(source)
+ self.check_dir(target, [])
+
+ self.gf(source)(target, force=True)
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(target)
+
+ def test_exact_dir_missing(self):
+ source = self.create_dir_style1()
+ target = self.create_missing()
+
+ self.gf(source)(target)
+ if self.copy:
+ self.check_dir_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(target)
+
+ def test_exact_dir_dir(self):
+ source = self.create_dir_style1()
+ target = self.create_dir()
+ temp = target / 'xxx'
+ temp.set("yyy")
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target)
+ self.check_dir_style1(source)
+ self.check_dir(target, ['xxx'])
+
+ self.gf(source)(target, force=True)
+ if self.copy:
+ self.check_dir_style1(source)
+ self.check_file(temp, 'yyy'); temp.remove()
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(target)
+
+ #=========================================================
+ #child mode
+ #=========================================================
+ #source: file, (full) dir
+ #target: missing, dir, dir with source in the way
+
+ def test_child_file_missing(self):
+ source = self.create_file_style1()
+ target = self.create_missing()
+
+ self.assertRaises(errors.MissingPathError, self.gf(source), target, mode="child")
+ self.check_file_style1(source)
+ self.check_missing(target)
+
+ def test_child_file_dir(self):
+ source = self.create_file_style1()
+ target = self.create_dir()
+ result = target / source.name
+
+ self.gf(source)(target, mode="child")
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(result)
+
+ def test_child_file_dir_occupied_file(self):
+ source = self.create_file_style1()
+ target = self.create_dir()
+ result = target / source.name
+ result.set("xxx")
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode="child")
+ self.check_file_style1(source)
+ self.check_dir(target, [source.name])
+ self.check_file(result, 'xxx')
+
+ self.gf(source)(target, mode="child", force=True)
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(result)
+
+ def test_child_file_dir_occupied_dir(self):
+ source = self.create_file_style1()
+ target = self.create_dir()
+ result = target / source.name
+ result.mkdir()
+ temp = result / 'xxx'
+ temp.set('yyy')
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode="child")
+ self.check_file_style1(source)
+ self.check_dir(target, [source.name])
+ self.check_dir(result, ['xxx'])
+ self.check_file(temp, 'yyy')
+
+ self.gf(source)(target, mode="child", force=True)
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(result)
+
+ def test_child_dir_missing(self):
+ source = self.create_dir_style1()
+ target = self.create_missing()
+
+ self.assertRaises(errors.MissingPathError, self.gf(source), target, mode="child")
+ self.check_dir_style1(source)
+ self.check_missing(target)
+
+ def test_child_dir_dir(self):
+ source = self.create_dir_style1()
+ target = self.create_dir()
+ result = target / source.name
+
+ self.gf(source)(target, mode="child")
+ if self.copy:
+ self.check_dir_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(result)
+
+ def test_child_dir_dir_occupied_file(self):
+ source = self.create_dir_style1()
+ target = self.create_dir()
+ result = target / source.name
+ result.set('xxx')
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode="child")
+ self.check_dir_style1(source)
+ self.check_dir(target, [source.name])
+ self.check_file(result, 'xxx')
+
+ self.gf(source)(target, mode="child", force=True)
+ if self.copy:
+ self.check_dir_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(result)
+
+ def test_child_dir_dir_occupied_dir(self):
+ source = self.create_dir_style1()
+ target = self.create_dir()
+ result = target / source.name
+ result.mkdir()
+ temp = result / 'xxx'
+ temp.set('yyy')
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode="child")
+ self.check_dir_style1(source)
+ self.check_dir(target, [source.name])
+ self.check_dir(result, ['xxx'])
+ self.check_file(temp, 'yyy')
+
+ self.gf(source)(target, mode="child", force=True)
+ if self.copy:
+ self.check_dir_style1(source)
+ self.check_file(temp, 'yyy'); temp.remove()
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(result)
+
+ #=========================================================
+ #smart mode
+ #=========================================================
+ #source: file, (full) dir
+ #target: missing, dir, dir with source in the way
+
+ def test_smart_file_missing(self):
+ source = self.create_file_style1()
+ target = self.create_missing()
+
+ self.gf(source)(target, mode="smart")
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(target)
+
+ def test_smart_file_dir(self):
+ source = self.create_file_style1()
+ target = self.create_dir()
+ result = target / source.name
+
+ self.gf(source)(target, mode="smart")
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(result)
+
+ def test_smart_file_dir_occupied_file(self):
+ source = self.create_file_style1()
+ target = self.create_dir()
+ result = target / source.name
+ result.set("xxx")
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode="smart")
+ self.check_file_style1(source)
+ self.check_dir(target, [source.name])
+ self.check_file(result, 'xxx')
+
+ self.gf(source)(target, mode="smart", force=True)
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(result)
+
+ def test_smart_file_dir_occupied_dir(self):
+ source = self.create_file_style1()
+ target = self.create_dir()
+ result = target / source.name
+ result.mkdir()
+ temp = result / 'xxx'
+ temp.set('yyy')
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode="smart")
+ self.check_file_style1(source)
+ self.check_dir(target, [source.name])
+ self.check_dir(result, ['xxx'])
+ self.check_file(temp, 'yyy')
+
+ self.gf(source)(target, mode="smart", force=True)
+ if self.copy:
+ self.check_file_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_file_style1(result)
+
+ def test_smart_dir_missing(self):
+ source = self.create_dir_style1()
+ target = self.create_missing()
+
+ self.gf(source)(target, mode="smart")
+ if self.copy:
+ self.check_dir_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(target)
+
+ def test_smart_dir_dir(self):
+ source = self.create_dir_style1()
+ target = self.create_dir()
+ result = target / source.name
+
+ self.gf(source)(target, mode="smart")
+ if self.copy:
+ self.check_dir_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(result)
+
+ def test_smart_dir_dir_occupied_file(self):
+ source = self.create_dir_style1()
+ target = self.create_dir()
+ result = target / source.name
+ result.set('xxx')
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode="smart")
+ self.check_dir_style1(source)
+ self.check_dir(target, [source.name])
+ self.check_file(result, 'xxx')
+
+ self.gf(source)(target, mode="smart", force=True)
+ if self.copy:
+ self.check_dir_style1(source)
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(result)
+
+ def test_smart_dir_dir_occupied_dir(self):
+ source = self.create_dir_style1()
+ target = self.create_dir()
+ result = target / source.name
+ result.mkdir()
+ temp = result / 'xxx'
+ temp.set('yyy')
+
+ self.assertRaises(errors.PathExistsError, self.gf(source), target, mode="smart")
+ self.check_dir_style1(source)
+ self.check_dir(target, [source.name])
+ self.check_dir(result, ['xxx'])
+ self.check_file(temp, 'yyy')
+
+ self.gf(source)(target, mode="smart", force=True)
+ if self.copy:
+ self.check_dir_style1(source)
+ self.check_file(temp, 'yyy'); temp.remove()
+ else:
+ self.check_missing(source)
+ self.check_output_dir_style1(result)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+class MoveTest(_MoveCopyTest):
+ """test PathType.move_to():
+
+ for each mode, behavior should be checked for all combinations
+ of source over the range of values:
+ missing - source missing
+ dir - source is a dir
+ file - source is a file
+ and for target over the range of values:
+ pfile - target is missing, parent is not a dir
+ noparent - target & parent dir missing
+ missing - target missing, parent dir exists
+ file - target is file
+ dir - target is dir
+
+ exact mode:
+ noparent, missing file dir
+ pfile
+
+ missing error error error error
+
+ file error success error error
+
+ dir error success error error
+
+ child mode:
+ noparent, missing file dir
+ pfile
+
+ missing error error error error
+
+ file error error error success
+
+ dir error error error success
+
+ smart mode:
+ noparent, missing file dir
+ pfile
+
+ missing error error error error
+
+ file error success error success
+
+ dir error success error success
+
+ Thus source missing, target noparent, and target file should always raise an error.
+ The others are dependant on the mode.
+ Each test is thus named "test_{mode}_{source type}_{target type}"
+ """
+ _prefix = "PathType.move_to()"
+ copy = False
+
+ def test_target_in_source(self):
+ "test target in source detection"
+ source = self.create_dir()
+ target = source / "z"
+ self.assertRaises(ValueError, self.gf(source), target)
+ self.assertRaises(ValueError, self.gf(source), source)
+
+class CopyTest(_MoveCopyTest):
+ #MoveTest class handles copy_test as well, copy=True flag makes it change behavior
+ _prefix = "path.copy_to()"
+ copy = True
+ copy_symlinks = False
+
+ def test_target_in_source(self):
+ "test target in source"
+ source = self.create_dir()
+ self.assertRaises(ValueError, self.gf(source), source)
+
+ target = source / "z"
+ self.gf(source)(target)
+ self.check_dir(source, ['z'])
+ self.check_dir(target, [])
+
+ def test_force_recursion(self):
+ #make sure force flag is preserved during recursion (bug fixed r7192)
+ source = self.create_dir_style2()
+ target = self.create_dir_style2a()
+ source.copy_to(target, force=True)
+ self.check_dir_style2(source)
+ self.check_dir_style2(target)
+
+ def create_dir_style2(self, content='xxx'):
+ "dir structure used by test_force_recursion to detect deep failure of force kwd"
+ source = self.create_dir()
+ stemp = source / 'test.dir' / 'really'
+ stemp.mkdir(parents=True)
+ stemp2 = stemp / 'test.txt'
+ stemp2.set(content)
+ return source
+
+ def check_dir_style2(self,path,content='xxx'):
+ self.check_dir(path,['test.dir'])
+ self.check_dir(path / 'test.dir',['really'])
+ self.check_dir(path / 'test.dir' / 'really',['test.txt'])
+ self.check_file(path / 'test.dir' / 'really' / 'test.txt',content)
+
+ def create_dir_style2a(self):
+ return self.create_dir_style2(content='yyy')
+
+ def check_dir_style2a(self, path):
+ self.check_dir_style2(path,content='yyy')
+
+class CopySymTest(_MoveCopyTest):
+ _prefix = "path.copy_to(symlinks=True)"
+ copy = True
+
+#=========================================================
+#scripts
+#=========================================================
+class ScriptTest(_InteractionTest):
+ "composite fs interaction tests"
+
+ def test_00_script(self):
+ "run through some file creation tests"
+ base = get_tmp_path()
+ self.check_missing(base)
+ self._paths.append(base)
+
+ #--------------------------------------------------------------
+ #make a directory
+ #--------------------------------------------------------------
+ assert base.parentpath.isdir, "parent directory missing"
+ tick_start = int(time.time())
+ base.mkdir()
+ tick_stop = max(tick_start+1, int(time.time()))
+
+ #check fs properties of a directory
+ self.check_empty_dir(base, created=[tick_start, tick_stop])
+ orig_mtime = base.mtime
+ time.sleep(1) #wait a second, so file's mtime will be different
+
+ #--------------------------------------------------------------
+ #make a file
+ #--------------------------------------------------------------
+ fname = "imafile.txt"
+ fpath = base / fname
+ self.assertEqual(fpath, base + SEP + fname)
+ self.assertEqual(fpath.filetype, "missing")
+ tick_start = int(time.time())
+ with fpath.open(mode="wb") as fh:
+ fh.write("123\x00")
+ tick_stop = max(tick_start+1, int(time.time()))
+
+ #check we can read it
+ self.check_file(fpath, "123\x00", 1, created=[tick_start, tick_stop])
+
+ #check dir properties
+ self.assert_(base.mtime > orig_mtime) #xxx: only true if os honors dir_mtime
+ self.check_dir(base, [fname])
+
+ #--------------------------------------------------------------
+ #truncate a file
+ #--------------------------------------------------------------
+ fpath.clear()
+ self.check_file(fpath, '')
+
+ #--------------------------------------------------------------
+ #test get/set
+ #--------------------------------------------------------------
+ fpath.set("a\r\nb\nc")
+ self.assertEqual(fpath.get(text=True), "a\nb\nc")
+ self.assertEqual(fpath.get(), "a\r\nb\nc")
+
+ #--------------------------------------------------------------
+ #test remove
+ #--------------------------------------------------------------
+ self.assert_(fpath.exists)
+ fpath.remove()
+ self.check_missing(fpath)
+ self.assertRaises(errors.MissingPathError, fpath.remove)
+
+ #TODO: test removing a symlink to a dir removes just the link
+
+ #test discard w/o path
+ fpath.discard()
+
+ #test discard w/ path
+ fpath.set("")
+ self.assert_(fpath.exists)
+ fpath.discard()
+
+ #--------------------------------------------------------------
+ #test dir's clear
+ #--------------------------------------------------------------
+ fpath.set("")
+ self.check_dir(base, [fname])
+ base.clear()
+ self.check_dir(base, [])
+ base.clear()
+
+ #--------------------------------------------------------------
+ #remove base dir
+ #--------------------------------------------------------------
+ base.remove()
+ self.assertEqual(base.filetype, "missing")
+ self.assertRaises(errors.MissingPathError, base.remove)
+ base.mkdir()
+ self.assertEqual(base.filetype, "dir")
+ base.discard()
+ self.assertEqual(base.filetype, "missing")
+ base.discard()
+
+#=========================================================
+#
+#=========================================================
+class ModeTest(TestCase):
+ """test chmod support"""
+ #TODO: chmod testing
+ # mode, mode+dirmode, mode+filemode
+ # followlinks
+ #called on file, called on dir
+
+
+ def test_parse_mode_mask(self):
+ for value in [
+ -1,
+ 1+07777,
+ (1+07777, 0),
+ (0, 1+07777),
+ (457, 3639),
+ "x", "u",
+ "v=",
+ "u=q", "u=g",
+ "a+r,o",
+ ]:
+ self.assertRaises(ValueError, parse_mode_mask, value, __msg__="%r:" % (value, ))
+
+ for input, bits, preserve in [
+ #integers
+ ((0, 0), 0, 0),
+ ((0, 123), 0, 123),
+ (456, 456, 0),
+
+ #simple
+ ("", 0, 07777),
+ ("a+", 0, 07777),
+ ("a-", 0, 07777),
+
+ ("a=", 0, 0),
+ ("u=,g=,o=", 0, 0),
+ ("ugo=", 0, 0),
+
+ #random ones
+ ("u+r,g=w,o=t", 784, 2240),
+ ("u+tr,g=w,o=t", 784, 2240),
+ ]:
+ result = parse_mode_mask(input)
+ self.assertEqual(result, (bits, preserve), "%r:" % (input, ))
+
+ def test_repr_mode_mask(self):
+ self.check_function_results(repr_mode_mask, [
+ ak("u=w,g=rw", "ug=w,g+r"),
+ ak("u=rw,g=r", "ug=rw,g-wx"),
+ ak("ug+r,o+r-x", "a+r,o-x"),
+ ak("ug=rwx,o=r", "u=rwx,g=rwx,o=r"),
+ ak("a=w", "u=w,g=w,o=rwx,a-rx"),
+ ak("", ""),
+ ak("a=", "a="),
+
+ ak("0444", "a=r", octal="prefer"),
+ ak("0444", "a=r", octal="always"),
+ ak("a+r", "a+r", octal="prefer"),
+
+ #real cases
+ ak("u=rw,g=r,o=", "u=rw,g=r,o="),
+ ak("ug+x", "ug+x"),
+ ak("ug=rw,o=", "ug=rw,o="),
+ ak("u+x,g+xs", "u+x,g+xs"),
+ ])
+ self.assertRaises(ValueError, repr_mode_mask, "a+r", octal="always")
+
+##
+## chmod(target, dict(all=all_mode, file=file_mode, dir=dir_mode), recursive=True)
+##
+## #config - app_group can read, root is real owner
+## #NOTE: could make sure parent dirs of home have o+rx
+## prepare(
+## target=[cfg.config_dir, cfg.home_dir],
+## user=root_user, group=app_group,
+## all_mode="u=rw,g=r,o=", dir_mode="ug+x",
+## )
+##
+## #state_dir, cache_dir - app user only!
+## paths = [cfg.state_dir, cfg.run_dir, cfg.cache_dir, log_dir]
+## prepare(paths, app_user, app_group, all_mode="ug=rw,o=", dir_mode="ug+x")
+##
+## #mail dir - owned by app user, but let share_group rw it as well (used by external apps)
+## prepare(cfg.mail_dir, app_user, share_group, all_mode="ug=rw,o=", dir_mode="ug+x,g+s")
+
+def ChangeModeTest(_InteractionTest):
+
+ # path/
+ # test.file: qwerty
+ # test.dir/
+ # test.txt: hello world
+ # test.link> test.txt
+ # broken.link> ../notafile.notthere
+
+ def assert_mode(self, path, mode):
+ self.assertEqual(path.modestr, mode)
+
+ def reset_mode(self, path):
+ chmod(path, "a=", recursive=True)
+
+ def assert_clear_style1(self, path, reset=False):
+ if reset:
+ self.clear_mode(path)
+ am = self.assert_mode
+ am(path, "a=")
+ am(path / 'test.file', "a=")
+ am(path / 'test.dir', 'a=')
+ am(path / 'test.dir' / 'test.txt', 'a=')
+ if os_has_symlinks:
+ am(path / 'test.dir' / 'test.link', 'a=')
+
+ def assert_style1(self, path, dm, fm):
+ am = self.assert_mode
+ am(path, dm)
+ am(path / 'test.file', fm)
+ am(path / 'test.dir', dm)
+ am(path / 'test.dir' / 'test.txt', fm)
+ if os_has_symlinks:
+ am(path / 'test.dir' / 'test.link', fm)
+
+ def test_script1(self):
+ am = self.assert_mode
+
+ #create dir to test
+ path = self.create_dir_style1()
+ self.assert_clear_style1(path, True)
+
+ #call chmod with some weird params, but no recursion
+ chmod(path, mode="ug+r,u+w", dirmode="+x", filemode="o+r")
+ dm = "u=rwx,g=rx,o=rx"
+ fm = "u=rw,g=r,o=rx"
+ self.assert_mode(path, dm)
+ path.mode = "a="
+ self.assert_clear_style1(path)
+
+ #call chmod with some weird params and recursion
+ self.assert_clear_style1(path, True)
+ chmod(path, mode="ug+r,u+w", dirmode="+x", filemode="o+r", recursive=True)
+ self.assert_style1(path, dm, fm)
+
+ #test removal
+ chmod(path, mode="og-x+w", recursive=True)
+ dm = "u=rwx,go=rw"
+ fm = "u=rw,go=rw"
+ self.assert_style1(path, dm, fm)
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/tests/test_host.py b/bps/tests/test_host.py
new file mode 100644
index 0000000..9195f86
--- /dev/null
+++ b/bps/tests/test_host.py
@@ -0,0 +1,142 @@
+"""bps3.hosts unittest script.
+
+.. warning::
+ In order to test the host detection properly,
+ this module does some heavy monkeypatching to the ``os`` module.
+ If something goes wrong, it may leave the ``os`` module seriously
+ messed up for the rest of the process' lifetime.
+ By seriously, I mean ``os.name`` reporting ``nt`` when under ``posix`` :)
+
+FIXME: should rewrite this to just run an os-specific set of checks
+for whatever os it's being run under.
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import os.path
+import sys
+from unittest import TestCase
+from logging import getLogger
+from warnings import warn
+import warnings
+#site
+#pkg
+from bps.fs import filepath
+from bps.tests.utils import get_tmp_path, catch_warnings
+#module
+log = getLogger(__name__)
+
+#=========================================================
+#
+#=========================================================
+class EnvPathTest(TestCase):
+ "test of bps3.host.get_env_path()"
+ def setUp(self):
+ unload_host()
+ self.tmp_dir = get_tmp_path()
+ self.backup = {}
+
+ def tearDown(self):
+ unpatch_modules(self.backup)
+
+ if os.name == "nt":
+ def test_nt(self):
+ #===============
+ #FIXME: patching os module is a really horrible way to make this test happen.
+ # should just implement tests which run only on the correct os,
+ # and fake the dir structure, etc... that way we don't have to worry about fileops going wrong.
+ #===============
+
+ #create nt environ in temp dir
+ home = self.tmp_dir / "Documents and Settings" / "User"
+ home.ensuredirs()
+ (home / "Desktop").ensuredirs()
+ (home / "My Documents").ensuredirs()
+ config = home / "Application Data"
+ config.ensuredirs()
+ env = dict(
+ USERPROFILE=str(home),
+ PATH="c:\\Windows",
+ APPDATA=str(config),
+ PATHEXT=".wys;.wyg",
+ )
+ def winver():
+ return (0, 0, 0, 2, 0)
+ patch_modules(self.backup, **{
+ "os": dict(name="nt", environ=env),
+ "os.path": dict(pathsep=";", sep="\\"),
+ "sys": dict(getwindowsversion=winver),
+ })
+ self.assertEqual(os.name, "nt")
+
+ #import host, check backend
+ from bps import host
+ from bps.host.windows import WindowsBackend
+ self.assertTrue(isinstance(host._backend, WindowsBackend))
+ self.assertEqual(host._backend.profile, "nt")
+ self.assertEqual(host.exe_exts, ('.wys', '.wyg'))
+
+ #check env paths
+ paths = host.get_env_path("all_paths")
+ self.assertEqual(paths.home_dir, home)
+ self.assertEqual(paths.state_dir, config)
+ self.assertEqual(paths.desktop_dir, home / "Desktop")
+ self.assertEqual(paths.docs_dir, home / "My Documents")
+ self.assertEqual(paths.start_dir, home / "Desktop")
+
+ #check app paths
+ paths = host.get_app_path("xxx", "all_paths")
+ self.assertEqual(paths.state_dir, config / "xxx")
+ self.assertEqual(paths.cache_dir, config / "xxx" / "cache")
+ self.assertEqual(paths.lock_file, config / "xxx" / "xxx.pid")
+
+ #check app paths
+ paths = host.get_app_path("xxx/yyy", "all_paths")
+ self.assertEqual(paths.state_dir, config / "xxx" / "yyy")
+ self.assertEqual(paths.cache_dir, config / "xxx" / "yyy" / "cache")
+ self.assertEqual(paths.lock_file, config / "xxx" / "yyy" / "yyy.pid")
+
+ #test legacy funcs
+## message=r"bps\.host: function 'get(Resource|State)Path' is deprecated, use .*",
+## module=r"bps\.tests\.test_host",
+ with catch_warnings(record=True) as wmsgs:
+ self.assertEqual(host.getResourcePath("home"), home)
+ self.assertEqual(host.getResourcePath("desktop"), home / "Desktop")
+ self.assertEqual(host.getResourcePath("docs"), home / "My Documents")
+ self.assertEqual(host.getResourcePath("start"), home / "Desktop")
+ self.assertEqual(host.getStatePath("xxx/yyy.txt"), config / "xxx" / "yyy.txt")
+ #TODO: verify we get the right warning msgs back.
+ #should be 4 deprecation warnings for getResourcePath,
+ # and 1 for getStatePath
+ self.assertEqual(len(wmsgs), 5, str(", ".join(str(w) for w in wmsgs)))
+
+#=========================================================
+#helpers
+#=========================================================
+def unload_host():
+ if 'bps.host' in sys.modules:
+ del sys.modules['bps.host']
+
+def patch_modules(backup, **kwds):
+ for name, attrs in kwds.iteritems():
+ mod = __import__(name, fromlist=['dummy'])
+ if name in backup:
+ orig = backup[name]
+ else:
+ orig = backup[name] = {}
+ for k, v in attrs.iteritems():
+ if k not in orig:
+ orig[k] = getattr(mod, k, None)
+ setattr(mod, k, v)
+
+def unpatch_modules(backup):
+ for name, orig in backup.iteritems():
+ mod = __import__(name, fromlist=['dummy'])
+ for k, v in orig.iteritems():
+ setattr(mod, k, v)
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_logs.py b/bps/tests/test_logs.py
new file mode 100644
index 0000000..133dfc7
--- /dev/null
+++ b/bps/tests/test_logs.py
@@ -0,0 +1,309 @@
+"""
+bps.logs unittest script -- (c) Assurance Technologies 2003-2006
+defines unit tests for bps's logging package
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import os
+import sys
+import logging
+from cStringIO import StringIO
+from warnings import warn
+import warnings
+#site
+#pkg
+from bps.fs import filepath
+from bps.logs import config as lc, handlers as lh, capture as cp, config_logging, add_handler
+from bps.logs.loggers import get_logger, parse_level_name, get_level_name
+from bps.logs.handlers import WatchedFileHandler, purge_handlers
+from bps.logs import proxy_logger
+from bps.error import types as errors
+#lib
+from bps.tests.utils import ak, get_tmp_path as get_tmp_file, TestCase, capture_logger
+#module
+
+LS = os.linesep
+
+#=========================================================
+#utilities
+#=========================================================
+class HelperTest(TestCase):
+
+ def test_get_logger(self):
+ gl = logging.getLogger
+
+ #test root
+ root = gl()
+ self.assertIs(get_logger(), root)
+ self.assertIs(get_logger(None), root)
+ self.assertIs(get_logger(""), root)
+ self.assertIs(get_logger("<root>"), root)
+
+ #test this mod
+ self.assertIs(get_logger("bps"), gl("bps"))
+
+ #test logger resolution
+ l = gl("xxx")
+ self.assertIs(get_logger(l), l)
+
+ def test_parse_level_name(self):
+ #test a few of the levels
+ self.check_function_results(parse_level_name, [
+ ak(0, "NOTSET"),
+ ak(0, "notset"),
+ ak(0, "0"),
+ ak(0, 0),
+
+ ak(30, "WARNING"),
+ ak(30, "warning"),
+ ak(30, "Warn"),
+ ak(30, "30"),
+ ak(30, 30),
+
+ ak(99, "99"),
+ ak(99, 99),
+ ])
+
+ self.assertRaises(ValueError, parse_level_name, "NotALevel")
+
+ def test_get_level_name(self):
+ self.check_function_results(get_level_name, [
+ ak("NOTSET", "NOTSET"),
+ ak("NOTSET", "notset"),
+ ak("NOTSET", "0"),
+ ak("NOTSET", 0),
+
+ ak("WARNING", "WARNING"),
+ ak("WARNING", "warning"),
+ ak("WARNING", "Warn"),
+ ak("WARNING", "30"),
+ ak("WARNING", 30),
+
+ ak("99", "99"),
+ ak("99", 99),
+ ])
+
+ self.assertRaises(ValueError, get_level_name, "NotALevel")
+
+#=========================================================
+#handler tests (startup_msg etc)
+#=========================================================
+
+#---------------------------------------------------
+#watched file handler
+#---------------------------------------------------
+class WatchedFileHandlerTest(TestCase):
+ "test WatchFileHandler"
+ def setUp(self):
+ #get logger, ensure it's free of handlers, and that it will log debug msgs
+ self.log = get_logger(__name__ + ".LogsTest.test_file_handler")
+ purge_handlers(self.log)
+ self.log.setLevel(1)
+ #get a tmp file path to work with
+ self.path = get_tmp_file()
+
+ def tearDown(self):
+ purge_handlers(self.log, close=True)
+
+ def test_logging(self):
+ #NOTE: only because we're reading a file create in 'w' mode
+ # do we have to care about os.linesep (aka global var LS).
+ # the rest of the logging system uses \n.
+
+ #make sure file is created after handler
+ h = lh.WatchedFileHandler(self.path)
+
+ self.assertTrue(self.path.exists, "log path missing")
+ self.assertEquals(self.path.get(), "", "log path not empty")
+
+ self.log.addHandler(h)
+
+ #make sure lines are flushed immediately
+ self.log.debug("AAA")
+ self.assertEqual(self.path.get(), "AAA" + LS)
+
+ self.log.debug("BBB\n")
+ self.assertEqual(self.path.get(), "AAA" + LS + "BBB" + LS + LS)
+
+ #try truncating file
+ s = h.stream
+ self.path.set("")
+ self.log.debug("CCC\n")
+ self.assertNotEqual(h.stream, s, "handler didn't reopen stream")
+ self.assertEqual(self.path.get(), "CCC" + LS + LS)
+
+ #try deleting file
+ if os.name == "nt":
+ #under windows, handler's lock prevents removal
+ self.assertRaises(errors.PathInUseError, self.path.remove)
+ h.stream.close()
+ self.path.remove()
+ s = h.stream
+ self.log.debug("QQQ\n")
+ self.assertNotEqual(h.stream, s, "handler didn't reopen stream")
+ self.assertEqual(self.path.get(), "QQQ" + LS + LS)
+
+ #try moving file
+ p2 = get_tmp_file()
+ assert p2.ismissing
+ if os.name == "nt":
+ #under windows, handler's lock prevents moving it
+ self.assertRaises(errors.PathInUseError, self.path.move_to, p2)
+ h.stream.close()
+ assert p2.ismissing #todo: perm glitch in move_to's try/except of os.rename allowing p2 to exist after failed move
+ self.path.move_to(p2)
+ self.assertTrue(self.path.ismissing, "path not moved")
+ self.assertEqual(p2.get(), "QQQ" + LS + LS, "move mismatch")
+
+ #check for reopen
+ s = h.stream
+ self.log.debug("SSS")
+ self.assertNotEqual(h.stream, s, "handler didn't reopen stream")
+ self.assertEqual(p2.get(), "QQQ" + LS + LS, "old path touched")
+ self.assertEqual(self.path.get(), "SSS" + LS, "new path not written")
+
+## def test_single_shared(self):
+## h = lh.WatchFileHandler(self.path)
+## self.log.addHandler(h)
+##
+## def test_single_solo(self):
+## h = lh.WatchFileHandler(self.path, shared=False)
+## self.log.addHandler(h)
+
+## def test_double_shared(self):
+## h1 = lh.WatchFileHandler(self.path)
+## self.log.addHandler(h1)
+##
+## h2 = lh.WatchFileHandler(self.path)
+## self.log.addHandler(h2)
+##
+## def test_double_solo(self):
+## h1 = lh.WatchFileHandler(self.path, shared=False)
+## self.log.addHandler(h1)
+##
+## h2 = lh.WatchFileHandler(self.path, shared=False)
+## self.log.addHandler(h2)
+
+#=========================================================
+#formatter tests
+#=========================================================
+
+#=========================================================
+#proxy tests
+#=========================================================
+mod_log = get_logger(__name__)
+log = alog = proxy_logger.log
+mlog = proxy_logger.multilog
+
+class ProxyLoggerTest(TestCase):
+ _prefix = "bps.logs.proxy_logger"
+
+ def setUp(self):
+ global log
+ log = proxy_logger.log
+
+ def test_00_vars(self):
+ "verify initial state is correct"
+
+ #test globals
+ g = globals()
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], proxy_logger.log)
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+ #test base logger
+ with capture_logger(__name__) as logbuf:
+ mod_log.warning("this is a test")
+ self.assertEqual(logbuf.getvalue(), "bps.tests.test_logs: WARNING: this is a test\n")
+
+ #test nothing changed
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], proxy_logger.log)
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+ def test_01_alog(self):
+ "test accessing log under alias"
+
+ #test globals
+ g = globals()
+ g['log'] = None #just to check w/o target to replace
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], None)
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+ #test base logger
+ with capture_logger(__name__) as logbuf:
+ alog.warning("this is a test")
+ self.assertEqual(logbuf.getvalue(), "bps.tests.test_logs: WARNING: this is a test\n")
+
+ #test nothing changed
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], None) #should have still replaced itself
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+ #put log back
+ g['log'] = proxy_logger.log
+
+ #test base logger
+ with capture_logger(__name__) as logbuf:
+ alog.warning("this is a test")
+ self.assertEqual(logbuf.getvalue(), "bps.tests.test_logs: WARNING: this is a test\n")
+
+ #test log replaced itself
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], get_logger(__name__)) #should have still replaced itself
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+ def test_02_alog(self):
+ "test accessing log with replacment behavior"
+
+ #test globals
+ g = globals()
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], proxy_logger.log)
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+ #test base logger
+ with capture_logger(__name__) as logbuf:
+ log.warning("this is a test")
+ self.assertEqual(logbuf.getvalue(), "bps.tests.test_logs: WARNING: this is a test\n")
+
+ #test 'log' replaced itself
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], get_logger(__name__)) #should have replaced itself
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+ def test_03_mlog(self):
+ "test accessing multilog"
+
+ #test globals
+ g = globals()
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], proxy_logger.log)
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+ #test base logger
+ with capture_logger(__name__) as logbuf:
+ mlog.warning("this is a test")
+ self.assertEqual(logbuf.getvalue(), "bps.tests.test_logs: WARNING: this is a test\n")
+
+ #test nothing changed
+ self.assertIs(g['mod_log'], get_logger(__name__))
+ self.assertIs(g['log'], proxy_logger.log) #should have replaced itself
+ self.assertIs(g['alog'], proxy_logger.log)
+ self.assertIs(g['mlog'], proxy_logger.multilog)
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_logs_apply_config.py b/bps/tests/test_logs_apply_config.py
new file mode 100644
index 0000000..5619a91
--- /dev/null
+++ b/bps/tests/test_logs_apply_config.py
@@ -0,0 +1,190 @@
+"""
+bps.logs.config unittest script -- (c) Assurance Technologies 2003-2006
+defines unit tests for the execution half of bps's logging config handling
+"""
+
+#=========================================================
+#imports
+#=========================================================
+#core
+import logging
+import warnings
+import sys
+import time
+from cStringIO import StringIO
+#pkg
+from bps.error import types as errors
+from bps.logs import config_logging
+from bps.logs.loggers import get_logger
+from bps.logs.config import LoggingConfig
+from bps.logs.handlers import purge_handlers
+from bps.meta import Params as ak
+#lib
+from bps.tests.utils import TestCase, get_tmp_path
+#local
+
+#=========================================================
+#
+#=========================================================
+
+#=========================================================
+#
+#=========================================================
+class ConfigTest(TestCase):
+
+ def tearDown(self):
+ #make sure to release any captures that were set
+ config_logging(capture_stderr=False, capture_stdout=False, capture_warnings=False)
+
+ def test_missing_file(self):
+ "test config_logging() handles missing path"
+ log_path = get_tmp_path()
+ self.assert_(config_logging(log_path) is False)
+ self.assertRaises(errors.MissingPathError, config_logging, log_path, errors="raise")
+
+ def test_placeholder(self):
+ "test reset_loggers doesn't choke on placeholders"
+ #NOTE: this was an observed bug...
+ # we reset once, add a logger which ensures a placeholder ('xxx') is created,
+ # and then make sure reset_loggers doesn't choke on the placeholder.
+ log_path = get_tmp_path()
+ config_logging(
+ reset_loggers=True,
+ levels="xxx.yyy=DEBUG",
+ )
+ config_logging(
+ reset_loggers=True,
+ )
+
+ def test_sample1(self):
+ #configure logging system
+ log_path = get_tmp_path()
+ config_logging(
+ level="WARNING",
+ levels="bps.tests.test_logs_fake=DEBUG; bps.tests.test_logs=INFO",
+ default_handler=dict(
+ klass="FileHandler",
+ args=(log_path, ),
+ formatter="std-file",
+ startup_msg=True,
+ ),
+ capture_stderr=True,
+ )
+
+## h = logging.StreamHandler(self.log_buffer)
+## h.formatter = logging.Formatter("[%(name)s %(levelname)s] %(message)s")
+
+ #log some messages
+ cur = time.strftime("%Y-%m-%d %H:%M:%S")
+ get_logger().warning("test message")
+ get_logger("myapp").info("can't see me")
+ get_logger("myapp").warning("can see me")
+ get_logger("bps.tests.test_logs").debug("shouldn't display")
+ get_logger("bps.tests.test_logs").info("should display")
+ sys.stderr.write("hello stderr!\n")
+ sys.stderr.flush()
+
+ #detach the handler
+ root = get_logger()
+ for h in root.handlers[:]:
+ if getattr(h, "baseFilename", None) == log_path:
+ root.removeHandler(h)
+ h.flush()
+ h.close()
+
+ #now check what was written
+ self.assertEqual(log_path.get(), """\
+[%(cur)s INF ] --- LOGGING STARTED %(cur)s ---
+[%(cur)s WRN root] test message
+[%(cur)s WRN myapp] can see me
+[%(cur)s INF bps.tests.test_logs] should display
+[%(cur)s RAW sys.stderr] unmanaged logging output:
+ hello stderr!
+ \n \n\n""" % dict(cur=cur))
+
+ def test_sample2(self):
+ name = __name__ + ".test_logger"
+ log = get_logger(name)
+ log.setLevel(99)
+ buffer = StringIO()
+ #NOTE: this makes sure outputs:handlers works
+ config_logging(
+ levels={name:"WARNING"},
+ outputs={name: dict(handlers=['custom'], propagate=False)},
+ handlers=dict(custom=dict(klass='StreamHandler', args=(buffer,), formatter='custom')),
+ formatters=dict(custom=dict(format="%(name)s: %(levelname)s: %(message)s")),
+ )
+ log.warning("test")
+ self.assertEqual(buffer.getvalue(), name + ": WARNING: test\n")
+
+ def test_sample3(self):
+ name = __name__ + ".test_logger"
+ log = get_logger(name)
+ log.setLevel(99)
+ buffer = StringIO()
+ #NOTE: this makes sure outputs:handlers works
+ config_logging(
+ loggers={name: dict(level="WARNING", outputs=['custom'], propagate=False)},
+ handlers=dict(custom=dict(klass='StreamHandler', args=(buffer,), formatter='custom')),
+ formatters=dict(custom=dict(format="%(name)s: %(levelname)s: %(message)s")),
+ )
+ log.warning("test")
+ self.assertEqual(buffer.getvalue(), name + ": WARNING: test\n")
+
+#=========================================================
+#eof
+#=========================================================
+
+"""
+
+config logging use cases found in the wild...
+=============================================
+
+medicred
+--------
+logs.config_logging(path)
+ medicred.cfg
+ debug.cfg
+
+logging.cfg files found in the wild...
+======================================
+[logging:levels]
+##<root> = DEBUG
+<root> = WARNING
+##thumbs = DEBUG
+##thumbs.common = DEBUG
+thumbs.zoom = DEBUG
+
+---------------
+[logging:options]
+capture_warnings = True
+warning_fmt = %(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s
+
+[logging:levels]
+<root> = WARNING
+------------
+[medicred:debug]
+site=cgmcn
+
+[logging:levels]
+<root> = WARNING
+imports = DEBUG
+bps3.base = DEBUG
+##sqlalchemy.engine = INFO
+##gwrap.windows.stack = DEBUG
+
+##automigrate = DEBUG
+medicred.migration = DEBUG
+##uif.list_control = DEBUG
+##gwrap.simple_dialogs.select_dialogs = DEBUG
+uif.mailclient = DEBUG
+
+gwrap.misc.simple_list_model = DEBUG
+
+medicred.client.report_dialog = DEBUG
+reporting = DEBUG
+
+medicred.build_client = INFO
+medicred.build_backend = INFO
+
+"""
diff --git a/bps/tests/test_logs_capture.py b/bps/tests/test_logs_capture.py
new file mode 100644
index 0000000..9dbe412
--- /dev/null
+++ b/bps/tests/test_logs_capture.py
@@ -0,0 +1,306 @@
+"""
+bps.logs.capture unittest script -- (c) Assurance Technologies 2003-2006
+defines unit tests for bps's stdio redirection package
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import os
+import sys
+from logging import getLogger
+import logging
+from cStringIO import StringIO
+from warnings import warn
+import warnings
+#site
+#pkg
+from bps.fs import filepath
+from bps.logs import config as lc, handlers as lh, capture as cp, config_logging, add_handler
+from bps.logs.handlers import WatchedFileHandler, purge_handlers
+from bps.error import types as errors
+#lib
+from bps.tests.utils import get_tmp_path as get_tmp_file, TestCase
+#module
+log = getLogger(__name__)
+
+#=========================================================
+#capture tests
+#=========================================================
+class _StdCaptureTest(TestCase):
+ name = None
+
+ def setUp(self):
+ #make sure capturing was never on
+ orig_stream, cp_proxy, cp_orig = self.get3()
+ if cp_orig:
+ raise RuntimeError, "stream capturing has been enabled by test harness"
+ if cp_proxy:
+ #code doesn't want this cleared normally, but we do
+ setattr(cp, "_proxy_" + self.name, None)
+
+ #setup the logger & a buffer for it
+ self.log = getLogger("sys." + self.name)
+ self.log.propagate = 0
+ purge_handlers(self.log)
+ self.log.setLevel(1)
+ self.log_buffer = StringIO()
+ h = logging.StreamHandler(self.log_buffer)
+ h.formatter = logging.Formatter("[%(name)s %(levelname)s] %(message)s")
+ self.raw_prefix = "[sys.%s RAW] " % self.name
+ self.debug_prefix = "[sys.%s DEBUG] " % self.name
+ self.log.addHandler(h)
+
+ #intercept sys stream
+ self.orig_stream = self.get()
+ self.buffer = StringIO()
+ self.set(self.buffer)
+
+ def tearDown(self):
+ purge_handlers(self.log)
+ self.set(self.orig_stream)
+
+ def get(self):
+ return getattr(sys, self.name)
+
+ def get3(self):
+ return self.get(), getattr(cp, "_proxy_" + self.name), getattr(cp, "_orig_" + self.name)
+
+ def set(self, value):
+ setattr(sys, self.name, value)
+
+ def test_logger(self):
+ "test that basic logger setup works"
+ self.check_empty()
+ self.log.debug("TESTING")
+ self.check_and_empty("", self.debug_prefix + "TESTING\n")
+
+ def test_capture_release(self):
+ "test capture/release cycle with some writing"
+ #test capture & write
+ self.capture(True)
+ self.wt()
+
+ #test release & write
+ self.release()
+ self.wt2()
+
+ #test re-capturing & write
+ self.capture(False)
+ self.wt()
+
+ #release
+ self.release()
+
+ def capture(self, first):
+ "enable capturing"
+ #check it's not being captured
+ self.assert_(not getattr(cp, "check_" + self.name)())
+
+ #verify buffers
+ c, p, o = self.get3()
+ self.assertEqual(c, self.buffer)
+ if first:
+ self.assertEqual(p, None)
+ else:
+ self.assertTrue(isinstance(p, cp.StreamWrapper))
+ self.assertEqual(p.name, "sys." + self.name)
+ self.assertEqual(o, None)
+
+ #enable capturing
+ config_logging(**{"capture_" + self.name: True})
+
+ #check it's being captured
+ self.assert_(getattr(cp, "check_" + self.name)())
+
+ #recheck streams
+ c, p, o = self.get3()
+ self.assertEqual(c, p)
+ self.assertTrue(isinstance(p, cp.StreamWrapper))
+ self.assertEqual(o, self.buffer)
+
+ def release(self):
+ "release capturing"
+ #check it's being captured
+ self.assert_(getattr(cp, "check_" + self.name)())
+
+ #check streams
+ c, p, o = self.get3()
+ self.assertEqual(c, p)
+ self.assertTrue(isinstance(p, cp.StreamWrapper))
+ self.assertEqual(o, self.buffer)
+
+ #release streams
+ config_logging(**{"capture_" + self.name: False})
+
+ #check release
+ self.assert_(not getattr(cp, "check_" + self.name)())
+
+ #check buffers
+ c, p, o = self.get3()
+ self.assertEqual(c, self.buffer)
+ self.assertTrue(isinstance(p, cp.StreamWrapper))
+ self.assertEqual(o, None)
+
+ def wt(self):
+ "test that writes are being captured"
+ buf = self.get() #stdout, using proxy
+ self.check_empty()
+
+ #try a flushed write...
+ buf.write("ABCDEF\n")
+ cp.flush_buffers()
+ self.check_and_empty("", self.raw_prefix + "unmanaged logging output:\nABCDEF\n\n\n")
+
+ #try a flush forced by logging
+ buf.write("QRSTVUVE\n")
+ self.log.debug("XXX")
+ self.check_and_empty("", self.raw_prefix +
+ "unmanaged logging output:\nQRSTVUVE\n\n\n" + self.debug_prefix + "XXX\n")
+
+ def wt2(self):
+ "test that we're writing to buffer, not being captured"
+ self.check_empty()
+ self.get().write("ABC\n")
+ self.check_and_empty("ABC\n", "")
+
+ def check_and_empty(self, buffer, log_buffer):
+ self.assertEqual(self.buffer.getvalue(), buffer)
+ if buffer:
+ self.buffer.reset(); self.buffer.truncate()
+
+ self.assertEqual(self.log_buffer.getvalue(), log_buffer)
+ if log_buffer:
+ self.log_buffer.reset(); self.log_buffer.truncate()
+
+ self.check_empty()
+
+ def check_empty(self):
+ self.assertEqual(self.buffer.getvalue(), "")
+ self.assertEqual(self.log_buffer.getvalue(), "")
+
+class StdOutCaptureTest(_StdCaptureTest):
+ _prefix = "capture stdout"
+ name = "stdout"
+
+class StdErrCaptureTest(_StdCaptureTest):
+ _prefix = "capture stderr"
+ name = "stderr"
+
+class WarningCaptureTest(TestCase):
+ _prefix = "capture warnings"
+
+ def setUp(self):
+ if cp._orig_showwarning:
+ raise RuntimeError, "capture warnings enabled"
+ self.orig_stderr = sys.stderr
+ self.orig_format = warnings.formatwarning
+ def fmt(message, category, filename, lineno, line=None):
+ filename = filepath(filename)
+ return "[%s %s] %s" % (filename.root, category.__name__, message)
+ warnings.formatwarning = fmt
+
+ sys.stderr = self.err_buffer = StringIO()
+
+ #setup the logger & a buffer for it
+ self.log_buffer = StringIO()
+ self.log = getLogger("sys.warnings")
+ self.log.setLevel(1)
+ add_handler(self.log.name,
+ klass='StreamHandler',
+ args=(self.log_buffer,),
+ formatter=dict(fmt="[%(name)s %(levelname)s] %(message)s"),
+ propagate=False, add=False,
+ )
+## self.log.propagate = 0
+## purge_handlers(self.log)
+## h = logging.StreamHandler(self.log_buffer)
+## h.formatter = logging.Formatter("[%(name)s %(levelname)s] %(message)s")
+## self.log.addHandler(h)
+
+ self.warning_path = filepath(__file__).abspath
+
+ def tearDown(self):
+ sys.stderr = self.orig_stderr
+ warnings.formatwarning = self.orig_format
+ cp.release_warnings()
+ purge_handlers(self.log)
+
+ def test_logger(self):
+ "test basic logger behavior"
+ self.check_empty()
+ self.log.debug("TESTING")
+ self.check_and_empty("", "[sys.warnings DEBUG] TESTING\n")
+
+ def test_capture_release(self):
+ modname = __name__.rsplit(".", 1)[1]
+ wp = "[" + modname + " UserWarning] "
+ lp = "[sys.warnings WARNING] UserWarning:\n\tmessage: "
+
+ #check before capture
+ self.check_empty()
+ warn("XXX YYY")
+ self.check_and_empty(wp + "XXX YYY", "")
+
+ #check w/ capture
+ self.capture()
+ warn("ABDDEF")
+ self.check_and_empty("", lp + "ABDDEF\n")
+
+ #check after release
+ self.release()
+ warn("QRSIUT")
+ self.check_and_empty(wp + "QRSIUT", "")
+
+ #check re-capture
+ self.capture()
+ warn("ASIDUAASDADS")
+ self.check_and_empty("",lp + "ASIDUAASDADS\n")
+
+ #check re-release
+ self.release()
+ warn("XXXXXXX")
+ self.check_and_empty(wp + "XXXXXXX", "")
+
+ def capture(self):
+ self.assert_(not cp.check_warnings())
+ self.check_empty()
+ self.assertEqual(cp._orig_showwarning, None)
+ config_logging(
+ capture_warnings=True,
+ warning_target="sys.warnings", #override normal redirection so we see it
+ warning_fmt = "%(category)s:\n\tmessage: %(message)s" #override normal format so we can test for it
+ )
+ self.assert_(cp.check_warnings())
+ self.assertNotEqual(cp._orig_showwarning, None)
+ self.check_empty()
+
+ def release(self):
+ self.assert_(cp.check_warnings())
+ self.check_empty()
+ self.assertNotEqual(cp._orig_showwarning, None)
+ config_logging(
+ capture_warnings=False,
+ )
+ self.assertEqual(cp._orig_showwarning, None)
+ self.check_empty()
+ self.assert_(not cp.check_warnings())
+
+ def check_and_empty(self, err_buffer, buffer):
+ self.assertEqual(self.log_buffer.getvalue(), buffer, "capture buffer:")
+ if buffer:
+ self.log_buffer.reset(); self.log_buffer.truncate()
+
+ self.assertEqual(self.err_buffer.getvalue(), err_buffer, "stderr buffer:")
+ if err_buffer:
+ self.err_buffer.reset(); self.err_buffer.truncate()
+ self.check_empty()
+
+ def check_empty(self):
+ self.assertEqual(self.log_buffer.getvalue(), "", "capture buffer:")
+ self.assertEqual(self.err_buffer.getvalue(), "", "stderr buffer:")
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_logs_parse_config.py b/bps/tests/test_logs_parse_config.py
new file mode 100644
index 0000000..e6e4725
--- /dev/null
+++ b/bps/tests/test_logs_parse_config.py
@@ -0,0 +1,537 @@
+"""
+bps.logs.config unittest script -- (c) Assurance Technologies 2003-2006
+defines unit tests for the parsing half of bps's logging config handling
+"""
+from __future__ import with_statement
+#=========================================================
+#imports
+#=========================================================
+import logging
+import warnings
+import sys
+from bps.fs import filepath
+from bps.logs import config
+from bps.meta import Params
+ak = Params
+from bps.logs.config import LoggingConfig
+from bps.tests.utils import TestCase, catch_warnings, capture_logger
+from bps.error.types import MissingPathError, InputError
+
+#=========================================================
+#
+#=========================================================
+class HelperTest(TestCase):
+
+ def test_parse_output_value(self):
+ self.check_function_results(config.parse_output_value, [
+ ak(dict(outputs=[]), ""),
+ ak(dict(outputs=[]), ",|"),
+ ak(dict(outputs=['a', 'b']), "a,b"),
+ ak(dict(outputs=['a', 'b'], add=True), "a,b|add=True"),
+ ak(dict(outputs=['a', 'b'], propagate=True), "a,b|propagate=True"),
+ ak(dict(outputs=['a', 'b'], propagate=False), "a,b|propagate=False"),
+ ])
+
+ def test_parse_dict_string(self):
+ self.check_function_results(config.parse_dict_string, [
+ ak(dict(a="1", b="2"), "a=1;b=2", ";"),
+ ak(dict(a="1", d="2"), "a=1 #blah\nd=2\n\n\n#blah","\n", strip_comments=True),
+ ak(dict(a="1", b="2"), "a=1,b=2", ","),
+ ak(dict(a="1", b="2 #comment"), " a=1 , b = 2 #comment", ","),
+ ak(dict(a="1", b="2; 3=4"), "a= 1,b = 2; 3=4", ","),
+ ])
+ self.assertRaises(ValueError, config.parse_dict_string, "a=1 #blah\nd=2\n\n\n#blah","\n")
+ #ValueError: unexpected element in string
+ # caused by "#blah" element
+
+ def test_splitcomma(self):
+ self.check_function_results(config.splitcomma, [
+ ak(['a', 'b'], "a,b"),
+ ak(['a', 'b'], "a, b"),
+ ak(['a', 'b'], " a, b"),
+ ak(['a', 'b'], "a,, b ,,, , , , "),
+ ak(['a', 'b'], "a,, \n b ,,, , , , "),
+ ak(['a', 'b', ';'], "a,, \n b ,,, ; , , , "),
+ ])
+
+#=========================================================
+#parsing tests
+#=========================================================
+class ParseConfigTest(TestCase):
+
+ #=========================================================
+ #test failures
+ #=========================================================
+ def test_missing_file(self):
+ "test parse_config()'s missing file behavior"
+
+ #test file-not-found w/ errors='log'
+ path = self.sample1_path + "_file_does_not_exist.ini"
+ with capture_logger("bps.logs.config") as logbuf:
+ result = config.parse_config(path)
+ self.assertIs(result, None)
+ self.assertEqual(logbuf.getvalue(), """\
+bps.logs.config: ERROR: config file not found: filename=%r
+""" % path)
+
+ #test file-not-found w/ errors='raise'
+ self.assertRaises(MissingPathError, config.parse_config, path, errors="raise")
+ #error: no such file or directory
+
+ def test_empty_data(self):
+
+ with capture_logger("bps.logs.config") as logbuf:
+ result = config.parse_config("\n")
+ self.assertIs(result, None)
+ self.assertEqual(logbuf.getvalue(), """\
+bps.logs.config: WARNING: couldn't determine logging config format: stype='raw'
+""")
+
+ self.assertRaises(InputError, config.parse_config, "", errors="raise")
+
+ #=========================================================
+ #samples
+ #=========================================================
+ sample1 = r"""[logging:options]
+capture_warnings = True
+warning_fmt = %(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s
+reset_loggers = True
+not_an_option = ignored options should be ignored
+
+[logging:levels]
+<root> = WARNING
+"""
+ sample1_path = filepath(__file__, "..", "_logs_parse_config_sample1.ini").abspath
+
+ def test_sample1(self, data=None):
+ """test small compact-ini sample"""
+ if data is None:
+ data = self.sample1
+
+ #check it parses correctly (and emits warning about not_an_option)
+ with catch_warnings(record=True) as log:
+ warnings.filterwarnings("always")
+ c = config.parse_config(data)
+ self.assertEquals(len(log), 1)
+ self.assertWarningEquals(log[0],
+ message="unknown logging:options key encountered: 'not_an_option'",
+ filename=__file__,
+ )
+
+ self.assertConfigEquals(c, dict(
+ loggers={"":dict(level=30)},
+ options=dict(
+ capture_warnings=True,
+ reset_loggers=True,
+ warning_fmt='%(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s'
+ ),
+ ))
+
+ #examine w/ readers
+ self.assert_(c.get_option("capture_warnings"))
+ self.assert_(c.get_option("reset_loggers"))
+ self.assert_(c.get_option("reset_handlers"))
+
+ #change it
+ c.set_level('x', "DEBUG")
+ c.set_level('y', "NotSET")
+ del c.options['reset_loggers']
+
+ #try reparsing
+ c2 = config.parse_config(c)
+
+ #should be diff objects
+ assert c2 is not c
+ assert c2.loggers is not c.loggers
+
+ #but values should have parsed properly
+ self.assertConfigEquals(c2, dict(
+ loggers={"":dict(level=30), "x":dict(level=10), "y":dict(level=0)},
+ options=dict(
+ capture_warnings=True,
+ warning_fmt='%(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s'
+ ),
+ ))
+
+ #examine w/ readers
+ self.assert_(c.get_option("capture_warnings"))
+ self.assert_(not c.get_option("reset_loggers"))
+ self.assert_(not c.get_option("reset_handlers"))
+
+ def test_sample1a(self):
+ self.test_sample1(self.sample1_path)
+
+ #------------------------------------------------
+ #
+ #------------------------------------------------
+ def test_sample2(self):
+ "test large compact-ini sample"
+ data = r"""
+
+[logging:options]
+capture_stdout = false
+capture_warnings = true
+warning_fmt = %(category)s: %(message)s
+
+[logging:levels]
+<root> = INFO
+myapp = DEBUG
+pylons = WARNING
+
+[logging:output]
+<root> = console | add=True
+
+[logging:outputs]
+myapp = syslog | propagate=False
+mylib = syslog
+
+[logging:handler:console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+startup_msg = True
+
+[logging:handler:syslog]
+class=handlers.SysLogHandler
+level=ERROR
+formatter=generic
+args=(('localhost', handlers.SYSLOG_UDP_PORT), handlers.SysLogHandler.LOG_USER)
+
+[logging:handler:syslog2]
+class=handlers.SysLogHandler
+level=ERROR
+formatter=generic
+args=(('localhost', handlers.SYSLOG_UDP_PORT), level=handlers.SysLogHandler.LOG_USER)
+
+[logging:formatter:generic]
+format = %(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S #simple date format
+
+"""
+ #check it parses correctly
+ with catch_warnings(record=True) as log:
+ warnings.filterwarnings("always")
+ c = config.parse_config(data)
+ self.assertEquals(len(log), 1)
+ self.assertWarningEquals(log[0],
+ message=r"'logging:output' is deprecated, use 'logging:outputs' instead",
+ filename=__file__,
+ )
+
+ self.assertConfigEquals(c, dict(
+ loggers={
+ "": dict(level=20,
+ outputs=['console'],
+ add=True,
+ propagate=True,
+ ),
+ "myapp": dict(
+ level=10,
+ outputs=['syslog'],
+ propagate=False,
+ ),
+ 'pylons': dict(level=30),
+ 'mylib': dict(
+ outputs=['syslog'],
+ propagate=True,
+ ),
+ },
+ formatters={
+ "generic": dict(
+ format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s",
+ datefmt="%H:%M:%S #simple date format",
+ ),
+ },
+ handlers={
+ "console": dict(
+ klass=logging.StreamHandler,
+ args=Params(sys.stderr),
+ level="NOTSET",
+ formatter="generic",
+ startup_msg=True,
+ ),
+ "syslog": dict(
+ klass=logging.handlers.SysLogHandler,
+ level="ERROR",
+ formatter="generic",
+ args=Params(('localhost', logging.handlers.SYSLOG_UDP_PORT), logging.handlers.SysLogHandler.LOG_USER),
+ ),
+ "syslog2": dict(
+ klass=logging.handlers.SysLogHandler,
+ level="ERROR",
+ formatter="generic",
+ args=Params(('localhost', logging.handlers.SYSLOG_UDP_PORT),
+ level=logging.handlers.SysLogHandler.LOG_USER
+ ),
+ ),
+ },
+ options=dict(
+ capture_warnings=True,
+ capture_stdout=False,
+ warning_fmt='%(category)s: %(message)s',
+ ),
+ ))
+
+ #------------------------------------------------
+ #
+ #------------------------------------------------
+ def test_sample3(self):
+ "test large stdlib-ini sample"
+ data = r"""
+
+[loggers]
+keys=root,my_app,mylib,pylons
+
+[handlers]
+keys=console,syslog
+
+[formatters]
+keys=generic
+
+[logger_root]
+level = INFO
+handlers = console
+
+[logger_my_app]
+level = DEBUG
+qualname=myapp
+handlers = syslog
+propagate = 0
+
+[logger_mylib]
+handlers = syslog
+qualname = mylib
+
+[logger_pylons]
+level = WARNING
+qualname = pylons
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+startup_msg = True
+
+[handler_syslog]
+class=handlers.SysLogHandler
+level=ERROR
+formatter=generic
+args=(('localhost', handlers.SYSLOG_UDP_PORT), handlers.SysLogHandler.LOG_USER)
+
+[formatter_generic]
+format = %(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S #simple date format
+
+"""
+ #check it parses correctly
+ c = config.parse_config(data)
+ self.assertConfigEquals(c, dict(
+ loggers={
+ "": dict(level=20,
+ outputs=['console'],
+ ),
+ "myapp": dict(
+ level=10,
+ outputs=['syslog'],
+ propagate=False,
+ ),
+ 'pylons': dict(level=30, outputs=[], propagate=True),
+ 'mylib': dict(
+ outputs=['syslog'],
+ propagate=True,
+ ),
+ },
+ formatters={
+ "generic": dict(
+ format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s",
+ datefmt="%H:%M:%S #simple date format",
+ ),
+ },
+ handlers={
+ "console": dict(
+ klass=logging.StreamHandler,
+ args=Params(sys.stderr),
+ level="NOTSET",
+ formatter="generic",
+ ),
+ "syslog": dict(
+ klass=logging.handlers.SysLogHandler,
+ level="ERROR",
+ formatter="generic",
+ args=Params(('localhost', logging.handlers.SYSLOG_UDP_PORT), logging.handlers.SysLogHandler.LOG_USER),
+ ),
+ },
+ options=dict(disable_existing_loggers=True),
+ ))
+
+ #------------------------------------------------
+ #
+ #------------------------------------------------
+ def test_sample4(self):
+ data = dict(
+ levels="<root>=WARNING; myapp=DEBUG",
+ capture_warnings=True,
+ warning_fmt='%(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s',
+ default_handler="console",
+ )
+
+ #check it parses correctly
+ c = config.parse_config(**data)
+ self.assertConfigEquals(c, dict(
+ loggers={"":dict(level=30, outputs=['console']), "myapp": dict(level=10)},
+ options=dict(
+ capture_warnings=True,
+ warning_fmt='%(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s',
+ ),
+ ))
+
+ #------------------------------------------------
+ #
+ #------------------------------------------------
+ def test_sample5(self):
+ data = dict(
+ levels="<root>=WARNING #my comment \n #another comment \n myapp=DEBUG",
+ capture_warnings=True,
+ warning_fmt='%(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s',
+ default_handler=dict(klass=logging.StreamHandler),
+ handlers=dict(
+ console=dict(
+ klass='StreamHandler',
+ args='sys.stderr,',
+ level="NOTSET",
+ formatter="generic",
+ startup_msg=True,
+ ),
+## console="""
+##class = StreamHandler
+##args = (sys.stderr,)
+##level = NOTSET
+##formatter = generic
+##startup_msg = True
+##""",
+ syslog=dict(
+ klass="handlers.SysLogHandler",
+ level="ERROR",
+ formatter="generic",
+ args=(('localhost', logging.handlers.SYSLOG_UDP_PORT), logging.handlers.SysLogHandler.LOG_USER),
+ ),
+ ),
+ formatters=dict(
+ generic=dict(
+ format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s",
+ datefmt="%H:%M:%S",
+ ),
+## generic="""
+##format = %(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+##datefmt = %H:%M:%S
+##""",
+ alt=dict(
+ fmt = "%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s",
+ datefmt = "%H:%M:%S",
+ )
+ )
+ )
+
+ #check it parses correctly
+ c = config.parse_config(**data)
+ self.assertConfigEquals(c, dict(
+ loggers={
+ "":dict(level=30, outputs=[LoggingConfig.DEFAULT_HANDLER_NAME]),
+ "myapp":dict(level=10),
+ },
+ options=dict(
+ capture_warnings=True,
+ warning_fmt='%(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s',
+ ),
+ handlers={
+ LoggingConfig.DEFAULT_HANDLER_NAME: {"klass": logging.StreamHandler},
+ "console": dict(
+ klass=logging.StreamHandler,
+ args=Params(sys.stderr,),
+ level="NOTSET",
+ formatter="generic",
+ startup_msg=True,
+ ),
+ "syslog": dict(
+ klass=logging.handlers.SysLogHandler,
+ level="ERROR",
+ formatter="generic",
+ args=Params(('localhost', logging.handlers.SYSLOG_UDP_PORT), logging.handlers.SysLogHandler.LOG_USER),
+ ),
+ },
+ formatters={
+ "generic": dict(
+ format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s",
+ datefmt="%H:%M:%S",
+ ),
+ "alt": dict(
+ fmt="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s",
+ datefmt="%H:%M:%S",
+ ),
+ },
+ ))
+
+ #------------------------------------------------
+ #
+ #------------------------------------------------
+ def test_sample6(self):
+ name = __name__ + ".test_logger"
+ buffer = sys.stderr
+ c = config.parse_config(
+ levels={name:"WARNING"},
+ outputs={name: dict(handlers=['custom'], propagate=False)},
+ handlers=dict(custom=dict(klass='StreamHandler', args=(buffer,), formatter='custom')),
+ formatters=dict(custom=dict(format="%(name)s: %(levelname)s: %(message)s")),
+ )
+
+ self.assertConfigEquals(c, dict(
+ loggers={
+ name: dict(
+ level=30,
+ outputs=['custom'],
+ propagate=False,
+ )
+ },
+ handlers=dict(
+ custom=dict(
+ klass=logging.StreamHandler,
+ args=Params(buffer),
+ formatter='custom',
+ ),
+ ),
+ formatters=dict(
+ custom=dict(
+ format="%(name)s: %(levelname)s: %(message)s"
+ ),
+ ),
+ ))
+
+ #=========================================================
+ #helpers
+ #=========================================================
+ def assertConfigEquals(self, config, test):
+ assert isinstance(config, LoggingConfig)
+ for k in ("options", "loggers", "formatters", "handlers"):
+ msg = k + ":"
+ real = getattr(config, k)
+ correct = test.setdefault(k, {})
+ if real == correct:
+ continue
+ if set(real.keys()) != set(correct.keys()):
+ self.assertEqual(real, correct, msg) #force error
+ for sk in real:
+ left = real.get(sk)
+ right = correct.get(sk)
+ if left != right:
+ self.assertEqual(left, right, "%s[%r]:" % (k, sk, ))
+ raise RuntimeError
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/tests/test_meta.py b/bps/tests/test_meta.py
new file mode 100644
index 0000000..68b6b6a
--- /dev/null
+++ b/bps/tests/test_meta.py
@@ -0,0 +1,678 @@
+"""bps.meta unittest script"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import sys
+import time
+import os.path
+#package
+from bps import meta
+ak = Params = meta.Params
+#local
+from bps.tests.utils import TestCase
+#=========================================================
+#
+#=========================================================
+##
+## #interfaces
+## 'isseq', 'isnum', 'isstr',
+##
+## #introspection & monkeypatching
+## 'is_overridden',
+## 'find_attribute',
+#### 'get_module',
+#### 'get_module_exports',
+## 'instrument_super',
+##
+## #other decorators
+## 'abstract_method', 'abstract_property', 'AbstractMethodError',
+## 'decorate_per_instance',
+
+class MonkeypatchTest(TestCase):
+
+ def test_monkeypatch_class(self):
+ "test monkeypatch against a class"
+ #create test class
+ target = [0]
+ class Testum(object):
+ def abc(self, x):
+ target[0] = x
+ return x
+
+ xyz = abc
+
+ t = Testum()
+
+ #reset target
+ target[0] = 1
+ self.assertEqual(target[0], 1)
+
+ #test base method
+ self.assertEqual(t.abc(5), 5)
+ self.assertEqual(target[0], 5)
+
+ #reset target
+ target[0] = 1
+ self.assertEqual(target[0], 1)
+
+ #patch the class
+ @meta.monkeypatch(Testum)
+ def abc(self, x):
+ target[0] = 2*x
+ return 3*x
+
+ #check patch worked
+ self.assertEqual(t.abc(10), 30)
+ self.assertEqual(target[0], 20)
+
+ #reset target
+ target[0] = 1
+ self.assertEqual(target[0], 1)
+
+ #try patch with wrapping
+ @meta.monkeypatch(Testum, wrap=True)
+ def xyz(orig, self, x):
+ return 30*orig(self, 10*x)
+
+ #check patch worked
+ self.assertEqual(t.xyz(10), 3000)
+ self.assertEqual(target[0], 100)
+
+ def test_monkeypatch_object(self):
+ "test monkeypatch against an object"
+ #create test object
+ target = [0]
+ class Testum(object):
+ def abc(self, x):
+ target[0] = x
+ return x
+
+ xyz = abc
+
+ v = 1
+
+ t = Testum()
+ s = Testum()
+
+ #reset target
+ target[0] = 1
+ self.assertEqual(target[0], 1)
+
+ #test base method
+ self.assertEqual(t.abc(5), 5)
+ self.assertEqual(target[0], 5)
+
+ #reset target
+ target[0] = 1
+ self.assertEqual(target[0], 1)
+
+ #patch the class
+ @meta.monkeypatch(t)
+ def abc(x):
+ target[0] = 2*x
+ return 3*x
+
+ #check patch worked
+ self.assertEqual(t.abc(10), 30)
+ self.assertEqual(target[0], 20)
+
+ #reset target
+ target[0] = 1
+ self.assertEqual(target[0], 1)
+
+ #try patch with wrapping
+ @meta.monkeypatch(t, wrap=True)
+ def xyz(orig, x):
+ return 30*orig(10*x)
+
+ #check patch worked
+ self.assertEqual(t.xyz(10), 3000)
+ self.assertEqual(target[0], 100)
+
+ #-------------------------
+ #check patching didn't affect class or other instances
+
+ #reset target
+ target[0] = 1
+ self.assertEqual(target[0], 1)
+
+ #test base method
+ self.assertEqual(s.abc(5), 5)
+ self.assertEqual(target[0], 5)
+
+ def test_monkeypatch_attr(self):
+ #check patching a attribute (non-decorator mode)
+ class Testum(object):
+ v = 1
+ t = Testum()
+ s = Testum()
+ self.assertEqual(t.v, 1)
+ meta.monkeypatch(t, 'v')(2)
+ self.assertEqual(t.v, 2)
+ self.assertEqual(s.v, 1)
+
+ def test_monkeypatch_clobber(self):
+ class Testum(object):
+ def f(self):
+ pass
+ f = Testum.__dict__['f']
+
+ def g():
+ pass
+
+ meta.monkeypatch(Testum, clobber=False)(g)
+ self.assertIs(Testum.__dict__['g'], g)
+
+ self.assertRaises(AttributeError, meta.monkeypatch(Testum, attr="f", clobber=False), g)
+ self.assertIs(Testum.__dict__['f'], f)
+
+class MonkeypatchMixinTest(TestCase):
+
+ def test_separate_class(self):
+ "test monkeypatch of external mixin"
+
+ # A --+--> B
+ # |
+ # M --/
+ #
+ # M should be shadowed by A and B
+
+ class A(object):
+ x = 'a' #shadowed by B
+ y = 'a' #not shadowed by B
+
+ class B(A):
+ x = 'b'
+
+ self.assertEqual(B.x, 'b')
+ self.assertEqual(B.y, 'a')
+ self.assertEqual(getattr(B,'m',None), None)
+
+ class M(object):
+ x = 'm' #shadowed by A,B
+ y = 'm' #shadowed by A
+ m = 'm'
+
+ r = meta.monkeypatch_mixin(B)(M)
+ self.assertIs(r, M)
+
+ self.assertEqual(B.x, 'b')
+ self.assertEqual(B.y, 'a')
+ self.assertEqual(B.m, 'm')
+
+ def test_separate_class_first(self):
+ "test monkeypatch of external mixin"
+
+ # M --+--> B
+ # |
+ # A --/
+ #
+ # M should shadow A, be shadowed by B
+
+ class A(object):
+ x = 'a' #shadowed by M,B
+ y = 'a' #shadowed by M
+
+ class B(A):
+ x = 'b'
+
+ self.assertEqual(B.x, 'b')
+ self.assertEqual(B.y, 'a')
+ self.assertEqual(getattr(B,'m',None), None)
+
+ class M(object):
+ x = 'm' #shadowed by A,B
+ y = 'm' #shadowed by A
+ m = 'm'
+
+ r = meta.monkeypatch_mixin(B, first=True)(M)
+ self.assertIs(r, M)
+
+ self.assertEqual(B.x, 'b')
+ self.assertEqual(B.y, 'm')
+ self.assertEqual(B.m, 'm')
+
+ def test_subclass(self):
+ "if patching subclass, should be noop"
+
+ # A --> M --> B
+
+ class A(object):
+ x = 'a' #shadowed by B
+ y = 'a' #not shadowed
+
+ class M(A):
+ x = 'm'
+ y = 'm'
+ m = 'm'
+
+ class B(M):
+ x = 'b'
+
+ self.assertEqual(B.x, 'b')
+ self.assertEqual(B.y, 'm')
+ self.assertEqual(B.m, 'm')
+
+ r = meta.monkeypatch_mixin(B)(M)
+ self.assertIs(r, M)
+
+ self.assertEqual(B.x, 'b')
+ self.assertEqual(B.y, 'm')
+ self.assertEqual(B.m, 'm')
+
+ def test_wrong_subclass(self):
+ "patching parent should be error"
+
+ # A --> B --> M
+
+ class A(object):
+ x = 'a' #shadowed by B
+ y = 'a' #not shadowed
+
+ class B(A):
+ x = 'b'
+
+ self.assertEqual(B.x, 'b')
+ self.assertEqual(B.y, 'a')
+ self.assertEqual(getattr(B,'m',None), None)
+
+ class M(B):
+ x = 'm'
+ y = 'm'
+ m = 'm'
+
+ self.assertRaises(TypeError, meta.monkeypatch_mixin(B), M)
+
+class ParamsTest(TestCase):
+ "test bps.meta.Params class"
+
+ def test_misc(self):
+ self.assertEqual(repr(Params(1, 2, x=1)), "Params(1, 2, x=1)")
+
+ def test_constructor(self):
+ "test Params()"
+ #test constructor
+ p1 = Params()
+ self.assertEqual(p1.args, [])
+ self.assertEqual(p1.kwds, {})
+
+ p2 = Params(1, 2)
+ self.assertEqual(p2.args, [1, 2])
+ self.assertEqual(p2.kwds, {})
+
+ p3 = Params(1, 2, x=1, y=2)
+ self.assertEqual(p3.args, [1, 2])
+ self.assertEqual(p3.kwds, dict(x=1, y=2))
+
+ p4 = Params(x=1, y=2)
+ self.assertEqual(p4.args, [])
+ self.assertEqual(p4.kwds, dict(x=1, y=2))
+
+ def test_clone(self):
+ p1 = Params(1,2, x=1, y=2)
+
+ p2 = p1.clone()
+ self.assertIsNot(p2.args, p1.args)
+ self.assertEquals(p1.args, [1,2])
+ self.assertEquals(p2.args, [1,2])
+
+ self.assertIsNot(p2.kwds, p1.kwds)
+ self.assertEquals(p1.kwds, dict(x=1,y=2))
+ self.assertEquals(p2.kwds, dict(x=1,y=2))
+
+ def test_clone_mutate(self):
+ p1 = Params(1,2, x=1, y=2)
+
+ p2 = p1.clone(3,y=3,z=4)
+ self.assertIsNot(p2.args, p1.args)
+ self.assertEquals(p1.args, [1,2])
+ self.assertEquals(p2.args, [1,2,3])
+
+ self.assertIsNot(p2.kwds, p1.kwds)
+ self.assertEquals(p1.kwds, dict(x=1,y=2))
+ self.assertEquals(p2.kwds, dict(x=1,y=3,z=4))
+
+ def test_clear(self):
+ p1 = Params(1, 2, x=1, y=2)
+
+ a = p1.args
+ k = p1.kwds
+ self.assertEqual(a, [1,2])
+ self.assertEqual(k, dict(x=1,y=2))
+
+ p1.clear()
+ self.assertIs(p1.args, a)
+ self.assertIs(p1.kwds, k)
+ self.assertEqual(a, [])
+ self.assertEqual(k, dict())
+
+ def test_eq(self):
+ p1 = Params(1, 2, x=1, y=2)
+
+ p2 = Params(1, 2, x=1, y=2)
+ self.assertIsNot(p2, p1)
+ self.assertEqual(p2, p1)
+
+ p3 = Params(1, 2, x=1)
+ p4 = Params(1, 2, x=1, y=3)
+ p5 = Params(1, x=1, y=2)
+ p6 = Params(1, 3, x=1, y=2)
+ choices = [p1, p3, p4, p5, p6]
+ for c1 in choices:
+ for c2 in choices:
+ if c1 is c2:
+ self.assertEqual(c1, c2)
+ else:
+ self.assertNotEqual(c1, c2)
+
+ self.assertNotEqual(None, p1)
+
+ def test_parse(self):
+ "test Params.parse() constructor"
+ self.check_function_results(Params.parse, [
+ ak(Params(), ""),
+ ak(Params(1, 2), "1,2"),
+ ak(Params(1, 2), "(1,2)"),
+ ak(Params((1, 2)), "(1,2),"),
+ ak(Params((1, 5), 5), "(1,y),y", scope=dict(y=5)),
+ ak(Params(1, 2), "(x,2)", scope=dict(x=1)),
+ ak(Params((1, 2), z=3), "(1,2),z=3"),
+ ak(Params((1, 2), z=3), "(1,2),z=3,"),
+ ak(Params(a=1, b=2), "a=1,b=2"),
+ ak(Params(1, a=2, b=3), "1,a=2,b=3"),
+ ])
+
+ #TODO: test evil scope behavior, see if we can lock it down somehow.
+ #except restricted-python seems to be a frequently attempted pipedream,
+ #so fixing it would probably require waiting.
+ #we _could_ add a "safe" flag to parse,
+ #which simply prevents any syntaxes (eg complex exprs) that we can't lock down.
+
+ def test_render(self):
+ results = [
+ ak("", Params()),
+ ak("1, 2", Params(1, 2)),
+ ak("(1, 2)", Params((1, 2))),
+ ak("(1, 5), 5", Params((1, 5), 5)),
+ ak("(1, 2), z=3", Params((1, 2), z=3)),
+ ak("a=1, b=2", Params(a=1, b=2)),
+ ak("1, a=2, b=3", Params(1, a=2, b=3)),
+ ]
+ self.check_function_results(lambda x: x.render(), results)
+ self.check_function_results(lambda x: str(x), results)
+
+ def test_render_offset(self):
+ results = [
+ ak("2", Params(1, 2)),
+ ak("", Params((1, 2))),
+ ak("5", Params((1, 5), 5)),
+ ak("z=3", Params((1, 2), z=3)),
+ ak("a=2, b=3", Params(1, a=2, b=3)),
+
+ #NOTE: it hasn't been decided as to whether offsets
+ # which goes past end of positional args should be allowed to implicitly return empty tuple,
+ # or raise an error (the former behavior is what we currently have)
+ ak("", Params()),
+ ak("a=1, b=2", Params(a=1, b=2)),
+ ]
+ self.check_function_results(lambda x: x.render(1), results)
+
+ def test_render_class(self):
+ class Test(object):
+ pass
+ cls = meta.SingleSuperProperty
+ obj = cls(Test)
+
+ p = Params()
+ self.assertEquals(p.render_class(cls),"bps.meta.SingleSuperProperty()")
+
+ p = Params(1,2,a='a')
+ self.assertEquals(p.render_class(cls),"bps.meta.SingleSuperProperty(1, 2, a='a')")
+ self.assertEquals(p.render_class(obj),"bps.meta.SingleSuperProperty(1, 2, a='a')")
+
+
+ # check x.args is list, and can be edited directly
+ # check x.kwds is dict, and can be edited directly
+ # check x.normalize
+ # check x[int] and x[str]
+
+ def test_append(self):
+ p = Params(1,2,3,a='a',b='b')
+
+ p.append()
+ self.assertEqual(p.args,[1,2,3])
+ self.assertEqual(p.kwds,dict(a='a',b='b'))
+
+ p.append(b='bb',c='c')
+ self.assertEqual(p.args,[1,2,3])
+ self.assertEqual(p.kwds,dict(a='a',b='bb',c='c'))
+
+ p.append(4,c='cc')
+ self.assertEqual(p.args,[1,2,3,4])
+ self.assertEqual(p.kwds,dict(a='a',b='bb',c='cc'))
+
+ p.append(5,1,3)
+ self.assertEqual(p.args,[1,2,3,4,5,1,3])
+ self.assertEqual(p.kwds,dict(a='a',b='bb',c='cc'))
+
+ p.append(c='c2',d=None)
+ self.assertEqual(p.args,[1,2,3,4,5,1,3])
+ self.assertEqual(p.kwds,dict(a='a',b='bb',c='c2',d=None))
+
+ def test_append_modified(self):
+ p = Params(1,2,3,a='a',b='b')
+
+ p.append_modified({})
+ self.assertEqual(p.args,[1,2,3])
+ self.assertEqual(p.kwds,dict(a='a',b='b'))
+
+ p.append_modified(dict(c=1,d=None))
+ self.assertEqual(p.args,[1,2,3])
+ self.assertEqual(p.kwds,dict(a='a',b='b',c=1))
+
+ p.append_modified(dict(c=2,d=None), default=2)
+ self.assertEqual(p.args,[1,2,3])
+ self.assertEqual(p.kwds,dict(a='a',b='b',c=1,d=None))
+
+ def test_insert(self):
+ p = Params(1,2,3,a='a',b='b')
+
+ p.insert(2)
+ self.assertEqual(p.args,[1,2,3])
+ self.assertEqual(p.kwds,dict(a='a',b='b'))
+
+ p.insert(2,b='bb',c='c')
+ self.assertEqual(p.args,[1,2,3])
+ self.assertEqual(p.kwds,dict(a='a',b='bb',c='c'))
+
+ p.insert(2,4,c='cc')
+ self.assertEqual(p.args,[1,2,4,3])
+ self.assertEqual(p.kwds,dict(a='a',b='bb',c='cc'))
+
+ p.insert(4,5,1,3)
+ self.assertEqual(p.args,[1,2,4,3,5,1,3])
+ self.assertEqual(p.kwds,dict(a='a',b='bb',c='cc'))
+
+class MiscTest(TestCase):
+
+ def test_lookup_module_files(self):
+ "test lookup_module() against known py files"
+ lookup_module = meta.lookup_module
+ import bps
+ for module in (os, meta):
+ path = module.__file__
+ self.assertEquals(lookup_module(path), module)
+ self.assertEquals(lookup_module(path, name=True), module.__name__)
+
+ def test_lookup_module_packages(self):
+ "test lookup_module() against known packages"
+ lookup_module = meta.lookup_module
+ import bps as module
+ path = module.__file__
+ dir = os.path.dirname(path)
+ self.assertEquals(lookup_module(path), module)
+ self.assertEquals(lookup_module(path, name=True), module.__name__)
+ self.assertEquals(lookup_module(dir), module)
+ self.assertEquals(lookup_module(dir, name=True), module.__name__)
+
+ if os.name in ("posix", "nt"):
+ def test_lookup_module_compiled(self):
+ "test lookup_module() against a compiled extension"
+ lookup_module = meta.lookup_module
+
+ if os.name == "nt":
+ name = "select" #known to be a .pyd under nt
+ else:
+ assert os.name == "posix"
+ name = "audioop" #known to be a .so under linux
+
+ #test module isn't already in use, allowing us to remove it at will
+ #TODO: could remember & restore state
+ self.assert_(name not in sys.modules)
+
+ #import module & test compiled-module handling
+ module = __import__(name)
+ path = module.__file__
+ self.assert_(os.path.splitext(path)[1] in meta._cmod_exts)
+ self.assertEquals(lookup_module(path), module)
+ self.assertEquals(lookup_module(path, name=True), name)
+
+ #now test no detection if module not loaded
+ del sys.modules[name]
+ self.assertEquals(lookup_module(path), None)
+ self.assertEquals(lookup_module(path, name=True), None)
+
+ def test_func_accepts_key(self):
+ def check(f, k, r=True):
+ self.assertEqual(meta.func_accepts_key(f, k), r)
+
+ #check normal func
+ def f(a, b):
+ pass
+ check(f, 'a')
+ check(f, 'b')
+ check(f, 'c', False)
+ check(f, ['a', 'b'])
+ check(f, ['a', 'c'], False)
+
+
+ #check normal func
+ def f(a=None, b=None):
+ pass
+ check(f, 'a')
+ check(f, 'b')
+ check(f, 'c', False)
+ check(f, ['a', 'b'])
+ check(f, ['a', 'c'], False)
+
+ #check kwd func
+ def f(**k):
+ pass
+ check(f, 'a')
+ check(f, 'b')
+ check(f, 'c')
+ check(f, ['a', 'b'])
+ check(f, ['a', 'c'])
+
+ #check class
+ class f(object):
+ def __init__(self, a, b=None):
+ pass
+ check(f, "self", False) #first arg shouldn't count
+ check(f, 'a')
+ check(f, 'b')
+ check(f, 'c', False)
+ check(f, ['a', 'b'])
+ check(f, ['a', 'c'], False)
+
+class FallbackMethodTest(TestCase):
+ "tests class_property, fallback_method, fallback_property"
+
+ def test_class_property(self):
+ class Test:
+
+ @meta.class_property
+ def test(*a, **k):
+ return 1, a, k
+ self.assertEquals(Test.test, (1, (Test,),{}))
+ test = Test()
+ self.assertEquals(test.test, (1, (Test,),{}))
+
+ def test_fallback_property(self):
+ class Test:
+
+ @meta.fallback_property
+ def test(*a, **k):
+ return 1, a, k
+ self.assertEquals(Test.test, (1, (None, Test),{}))
+ test = Test()
+ self.assertEquals(test.test, (1, (test, Test),{}))
+
+ if meta._classobj:
+ def test_classobj(self):
+ class Test:
+
+ @meta.fallback_method
+ def test(*a, **k):
+ return a,k
+
+ self.assert_(isinstance(Test, meta._classobj))
+
+ func = Test.__dict__['test'].im_func
+
+ a = Test.test
+ self.assertIs(a.im_self, None)
+ self.assertIs(a.im_class, Test)
+ self.assertIs(a.im_func, func)
+
+ self.assertIsNot(Test.test, a)
+
+ self.assertEquals(a(), ((None, Test),{}))
+ self.assertEquals(a(1,2), ((None, Test,1,2),{}))
+ self.assertEquals(a(1,2,x=1), ((None, Test,1,2),{'x':1}))
+
+ self._check_instance(Test)
+
+ def test_class(self):
+ class Test(object):
+
+ @meta.fallback_method
+ def test(*a, **k):
+ return a,k
+
+ func = Test.__dict__['test'].im_func
+
+ a = Test.test
+ self.assertIs(a.im_self, None)
+ self.assertIs(a.im_class, Test)
+ self.assertIs(a.im_func, func)
+
+ self.assertIs(Test.test, a)
+
+ self.assertEquals(a(), ((None, Test),{}))
+ self.assertEquals(a(1,2), ((None, Test,1,2),{}))
+ self.assertEquals(a(1,2,x=1), ((None, Test,1,2),{'x':1}))
+
+ self._check_instance(Test)
+
+ def _check_instance(self, Test):
+ func = Test.__dict__['test'].im_func
+ test = Test()
+ a = test.test
+
+ self.assertIs(a.im_self, test)
+ self.assertIs(a.im_class, Test)
+ self.assertIs(a.im_func, func)
+
+ self.assertIs(test.test, a)
+ self.assertIs(test.__dict__['test'], a)
+
+ self.assertEquals(a(), ((test, Test),{}))
+ self.assertEquals(a(1,2), ((test, Test,1,2),{}))
+ self.assertEquals(a(1,2,x=1), ((test, Test,1,2),{'x':1}))
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/tests/test_misc.py b/bps/tests/test_misc.py
new file mode 100644
index 0000000..8bf550f
--- /dev/null
+++ b/bps/tests/test_misc.py
@@ -0,0 +1,179 @@
+"""tests for bps.misc -- (c) Assurance Technologies 2009"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+#site
+#pkg
+from bps import misc
+#module
+from bps.tests.utils import TestCase
+#=========================================================
+#
+#=========================================================
+class PropertyTest(TestCase):
+ "test various property constructors"
+
+ def test_indirect_property(self):
+ class Test(object):
+
+ test = misc.indirect_property("tget", "tset")
+
+ x = 1
+ def tget(self):
+ return self.x
+ def tset(self, value):
+ self.x = value
+
+ class Test2(Test):
+ def tget(self):
+ return self.x*2
+
+ def tset(self, value):
+ self.x = -value
+
+ #test direct works
+ t = Test()
+ self.assertEqual(t.x, 1)
+ self.assertEqual(t.test, 1)
+ t.test = 3
+ self.assertEqual(t.x, 3)
+ self.assertEqual(t.test, 3)
+
+ #test subclass overide works
+ t2 = Test2()
+ self.assertEqual(t2.x, 1)
+ self.assertEqual(t2.test, 2)
+ t2.test = 3
+ self.assertEqual(t2.x, -3)
+ self.assertEqual(t2.test, -6)
+
+ #test instance override works
+ t3 = Test2()
+ t3.tget = lambda : t3.x+.5
+ self.assertEqual(t3.x, 1)
+ self.assertEqual(t3.test, 1.5)
+
+ def test_constructor_property(self):
+ class Test(object):
+ test = misc.constructor_property(dict)
+
+ #test class view
+ self.assertIsInstance(Test.test, misc.constructor_property)
+
+ #check initial construction
+ t = Test()
+ d = t.test
+ self.assertIsInstance(d, dict)
+
+ #check we get same attr next time
+ self.assertIs(t.test, d)
+
+ #check overwrite works
+ e = [2]
+ t.test = e
+ self.assertIs(t.test, e)
+
+ #check delete causes re-creation
+ del t.test
+ f = t.test
+ self.assertIsInstance(f, dict)
+ self.assertIsNot(d, f)
+
+ def test_constructor_property_passref(self):
+ #check passref works
+ def f(obj):
+ return [obj]
+ class Test(object):
+ test = misc.constructor_property(f, passref=True)
+
+ #test class view
+ self.assertIsInstance(Test.test, misc.constructor_property)
+
+ #check initial construction
+ t = Test()
+ d = t.test
+ self.assertIsInstance(d, list)
+ self.assertEquals(d, [t])
+
+ #check we get same attr next time
+ self.assertIs(t.test, d)
+
+ #check overwrite works
+ e = [2]
+ t.test = e
+ self.assertIs(t.test, e)
+
+ #check delete causes re-creation
+ del t.test
+ f = t.test
+ self.assertIsInstance(f, list)
+ self.assertIsNot(f, d)
+ self.assertEquals(f, d)
+
+ def test_class_property(self):
+ class Test(object):
+ x = 1
+
+ @misc.class_property
+ def test(self):
+ return self, self.x
+
+ #make sure it works as class property
+ self.assertEquals(Test.x, 1)
+ self.assertEquals(Test.test, (Test, 1))
+
+ #make sure it doesn't return instance
+ t = Test()
+ self.assertEquals(t.x, 1)
+ self.assertEquals(t.test, (Test, 1))
+
+ #make sure it reads from the class, not instance
+ t.x = 2
+ self.assertEquals(t.x, 2)
+ self.assertEquals(t.test, (Test, 1))
+
+#=========================================================
+#
+#=========================================================
+
+class ParseAgentTest(TestCase):
+
+ def test_parse_agent_string(self):
+ for s in self.agents:
+ #TODO: should compare to return values,
+ #not just make sure we _can_ parse this
+ misc.parse_agent_string(s)
+
+ #this is just a random assorted of agent strings
+ # that should be parseable
+ agents = [
+ #firefox
+ "Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.11) Gecko/2009060309 Ubuntu/9.04 (jaunty) Firefox/3.0.11"
+ 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.12) Gecko/20050915 Firefox/1.0.7',
+
+ #msie
+ 'Mozilla/4.0 (compatible; MSIE 6.0; AOL 9.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)',
+ 'Mozilla/4.0 (compatible; MSIE 5.5; Windows 98)',
+ 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; FunWebProducts; .NET CLR 1.1.4322)',
+ 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)',
+
+ #konq
+ 'Mozilla/5.0 (compatible; Konqueror/3.4; Linux) KHTML/3.4.1 (like Gecko)',
+
+ #safari
+ 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en) AppleWebKit/417.9 (KHTML, like Gecko) Safari/417.8',
+
+ #TODO: chrome
+
+ #java
+ 'Mozilla/4.0 (Windows XP 5.1) Java/1.6.0_07',
+
+ #various bots
+ 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)',
+ 'Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)',
+ 'Mozilla/5.0 (compatible; Ask Jeeves/Teoma; +http://about.ask.com/en/docs/about/webmasters.shtml)'
+ 'msnbot/0.9 (+http://search.msn.com/msnbot.htm)',
+ 'XmlRssTimingBot/2.03 (libwww-perl/5.800)',
+ ]
diff --git a/bps/tests/test_numeric.py b/bps/tests/test_numeric.py
new file mode 100644
index 0000000..9e0c5c1
--- /dev/null
+++ b/bps/tests/test_numeric.py
@@ -0,0 +1,771 @@
+"""
+test_bps3 -- bps3 unittest script -- (c) Assurance Technologies 2003-2006
+
+defines unit tests for bps lib.
+
+NOTE: this module should import bps3 modules
+relative to sys.path, not locally, since it may be run in another location.
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import sys
+from itertools import islice
+from decimal import Decimal
+#site
+#pkg
+from bps import numeric as num
+from bps.rng import random
+from bps.meta import Params as ak
+#module
+from bps.tests.utils import TestCase
+native = sys.byteorder
+#=========================================================
+#number theory funcs
+#=========================================================
+class NumberTheoryTest(TestCase):
+ #TODO: test gcd, lcm
+
+ composites = [
+ 4, 8, 12, 7*6, 108, 641**5 * 431**2 * 449
+ ]
+ factors = {
+ -1: [],
+ 0: [],
+ 1: [],
+ 2: [(2, 1)],
+ 8:[(2, 3)],
+ -10:[(2, 1), (5, 1)],
+ }
+ def test_factors(self):
+ "run factors() against simple & border test cases"
+ for value, factors in self.factors.iteritems():
+ self.assertEqual(num.factors(value), factors)
+ for value in PrimeTest.primes:
+ f = num.factors(value)
+ self.assert_(f == [(value, 1)], "prime %r has wrong factors: %r" % (value, f))
+ for value in self.composites:
+ f = num.factors(value)
+ self.assert_(len(f) > 1 or (f[0][0] < value and f[0][1] > 1),
+ "composite %r has wrong factors: %r" % (value, f))
+
+ def test_random_factors(self):
+ "run factors() against randomly generated composites"
+ primes = PrimeTest.primes
+ for r in xrange(25):
+ c = random.randrange(1, 9)
+ out = set()
+ while len(out) < c:
+ out.add(random.choice(primes))
+ f = [
+ (p, random.randrange(1, 7))
+ for p in sorted(out)
+ ]
+ n = 1
+ for p, e in f:
+ n *= (p**e)
+ result = num.factors(n)
+ self.assertEqual(result, f)
+
+ def test_gcd(self):
+ self.check_function_results(num.gcd, [
+ #test zero behavior
+ (0, 0, 0),
+ (100, 100, 0),
+ (100, 0, 100),
+
+ #test 1 behavior
+ (1, 1, 10),
+ (1, 1, 2),
+ (1, 1, 3),
+
+ #test prime behavior
+ (1, 5, 7),
+
+ #test various composites
+ (5, 10, 15),
+
+ #test negatives
+ (2, 10, 4),
+ (2, -10, 4),
+ (2, 10, -4),
+ (2, -10, -4),
+ ])
+
+ def test_lcm(self):
+ self.check_function_results(num.lcm, [
+ (40, 10, 8),
+ (120, 15, 40),
+ (45, 15, 45),
+ ])
+
+#=========================================================
+#primality funcs
+#=========================================================
+class PrimeTest(TestCase):
+ "test primality/factorization"
+ #the first 128 primes (that's 64 more than is stored internally,
+ # so that we test both modes of operations
+ primes = [
+ #prime 0
+ 2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,
+ 59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,
+ 137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,
+ 227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,
+
+ #^ end of bps.numeric._small_primes
+
+ #prime 64
+ 313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,
+ 419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,
+ 509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,
+ 617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,
+
+ #prime 128
+ 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827,
+ 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941,
+ 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, 1019, 1021, 1031, 1033, 1039, 1049,
+ 1051, 1061, 1063, 1069, 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163,
+
+ #prime 192
+ 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283,
+ 1289, 1291, 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, 1381, 1399, 1409, 1423,
+ 1427, 1429, 1433, 1439, 1447, 1451, 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511,
+ 1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, 1597, 1601, 1607, 1609, 1613, 1619,
+
+ #^ prime 255
+ ]
+
+ big_primes = [
+
+
+ ]
+
+ def test_is_prime(self):
+ primes = set(self.primes)
+ for value in xrange(-10, max(primes)):
+ self.assertEqual(num.is_prime(value), value in primes)
+
+ def test_is_mr_prime(self):
+ primes = set(self.primes)
+ for value in xrange(-10, max(primes)):
+ self.assertEqual(num.is_mr_prime(value), value in primes)
+
+ def test_iter_primes(self):
+ #test the list matches
+ primes = list(islice(num.iter_primes(), len(self.primes)))
+ self.assertEqual(primes, self.primes)
+
+ #what if we start real low
+ primes = list(islice(num.iter_primes(-100), len(self.primes)))
+ self.assertEqual(primes, self.primes)
+
+ #in middle of small primes
+ primes = list(islice(num.iter_primes(53), len(self.primes)-15))
+ self.assertEqual(primes, self.primes[15:])
+
+ #at end of small primes
+ primes = list(islice(num.iter_primes(310), len(self.primes)-63))
+ self.assertEqual(primes, self.primes[63:])
+
+ #at end of small primes 2
+ primes = list(islice(num.iter_primes(311), len(self.primes)-63))
+ self.assertEqual(primes, self.primes[63:])
+
+ #at end of small primes 3
+ primes = list(islice(num.iter_primes(313), len(self.primes)-64))
+ self.assertEqual(primes, self.primes[64:])
+
+ #test 'stop' kwd
+ primes = list(num.iter_primes(313, 419))
+ self.assertEqual(primes, self.primes[64:64+16])
+
+ primes = list(num.iter_primes(312, 419))
+ self.assertEqual(primes, self.primes[64:64+16])
+
+ #test 'count' kwd
+ primes = list(num.iter_primes(313, count=16))
+ self.assertEqual(primes, self.primes[64:64+16])
+
+ #test stop < count
+ primes = list(num.iter_primes(313, 419, count=32))
+ self.assertEqual(primes, self.primes[64:64+16])
+
+ #test stop > count
+ primes = list(num.iter_primes(313, 900, count=16))
+ self.assertEqual(primes, self.primes[64:64+16])
+
+ def test_np_prime(self):
+ "test next_prime & prev_prime"
+ #before first prime
+ for value in xrange(-10, 2):
+ self.assertEqual(num.next_prime(value), 2)
+ self.assertEqual(num.prev_prime(value), None)
+
+ #test 2
+ self.assertEqual(num.next_prime(2), 3)
+ self.assertEqual(num.prev_prime(2), None)
+
+ #over first 128 primes (avoiding 2 & last one)
+ for idx, value in enumerate(self.primes[1:-1]):
+ idx += 1
+ assert self.primes[idx] == value
+
+ #prev
+ prev = self.primes[idx-1]
+ self.assertEqual(num.prev_prime(value+2), value)
+ self.assertEqual(num.prev_prime(value+1), value)
+ self.assertEqual(num.prev_prime(value), prev, "value: %r" % value)
+ if prev == value-1:
+ assert value == 3
+ self.assertEqual(num.prev_prime(value-1), None)
+ self.assertEqual(num.prev_prime(value-2), None)
+ elif prev == value-2:
+ self.assertEqual(num.prev_prime(value-1), prev)
+ self.assert_(num.prev_prime(value-2) < prev)
+ else:
+ self.assertEqual(num.prev_prime(value-1), prev)
+ self.assertEqual(num.prev_prime(value-2), prev)
+
+ #next
+ next = self.primes[idx+1]
+ if value == 3:
+ self.assertEqual(num.next_prime(value-2), 2)
+ else:
+ self.assertEqual(num.next_prime(value-2), value)
+ self.assertEqual(num.next_prime(value-1), value)
+ self.assertEqual(num.next_prime(value), next)
+ self.assertEqual(num.next_prime(value+1), next)
+ if next == value+2:
+ self.assert_(num.next_prime(value+2) > next, "v=%r nv+2=%r n=%r" % (value, num.next_prime(value+2), next))
+ else:
+ self.assertEqual(num.next_prime(value+2), next)
+
+#=========================================================
+#bit string funcs
+#=========================================================
+class BytesTest(TestCase):
+
+ def test_list_to_bytes(self):
+ self.check_function_results(num.list_to_bytes, [
+ #standard big endian
+ ak('\x00', [0], 1),
+ ak('\x01', [1], 1),
+ ak('\x00\x01', [1], 2),
+ ak('\x00\x01', [0, 1], 2),
+ ak('\x00\x00\x01', [1], 3),
+ ak('\x00\x00\x00\x00', [0], 4),
+ ak('\x00\x00\x00\x01', [1], 4),
+ ak('\x00\x00\x00\xff', [255], 4),
+ ak('\x00\x00\x01\xff', [1, 255], 4),
+ ak('\x04\x03\x02\x01', [4, 3, 2, 1], 4),
+
+ #standard little endian
+ ak('\x00', [0], 1, order="little"),
+ ak('\x01', [1], 1, order="little"),
+ ak('\x01\x00', [1], 2, order="little"),
+ ak('\x01\x00', [0, 1], 2, order="little"),
+ ak('\x01\x00\x00', [1], 3, order="little"),
+ ak('\x00\x00\x00\x00', [0], 4, order="little"),
+ ak('\x01\x00\x00\x00', [1], 4, order="little"),
+ ak('\xff\x00\x00\x00', [255], 4, order="little"),
+ ak('\xff\x01\x00\x00', [1, 255], 4, order="little"),
+ ak('\x01\x02\x03\x04', [4, 3, 2, 1], 4, order="little"),
+
+ ])
+
+ #check bytes size check
+ self.assertRaises(ValueError, num.list_to_bytes, [])
+ self.assertRaises(ValueError, num.list_to_bytes, [0, 0], bytes=1)
+
+ #check bytes bound check
+ self.assertRaises(ValueError, num.list_to_bytes, [256], bytes=1)
+
+ #quick check native mode works right
+ if native == "little":
+ self.assertEqual(num.list_to_bytes([1], 3, order="native"), '\x01\x00\x00')
+ else:
+ self.assertEqual(num.list_to_bytes([1], 3, order="native"), '\x00\x00\x01')
+
+ def test_bytes_to_list(self):
+ self.check_function_results(num.bytes_to_list, [
+
+ #standard big endian
+ ak([1], '\x01'),
+ ak([0, 1], '\x00\x01'),
+ ak([0, 0, 1], '\x00\x00\x01'),
+ ak([0, 0, 0, 0],'\x00\x00\x00\x00'),
+ ak([0, 0, 0, 1],'\x00\x00\x00\x01'),
+ ak([0, 0, 0, 255],'\x00\x00\x00\xff'),
+ ak([0, 0, 1, 0],'\x00\x00\x01\x00'),
+ ak([4, 3, 2, 1],'\x04\x03\x02\x01'),
+
+ #standard little endian
+ ak([1], '\x01', order="little"),
+ ak([0, 1], '\x01\x00', order="little"),
+ ak([0, 0, 1], '\x01\x00\x00', order="little"),
+ ak([0, 0, 0, 0], '\x00\x00\x00\x00', order="little"),
+ ak([0, 0, 0, 1], '\x01\x00\x00\x00', order="little"),
+ ak([0, 0, 0, 255], '\xff\x00\x00\x00', order="little"),
+ ak([0, 0, 1, 0], '\x00\x01\x00\x00', order="little"),
+ ak([4, 3, 2, 1],'\x01\x02\x03\x04', order="little"),
+
+ ])
+
+ #quick check native mode works right
+ if native == "little":
+ self.assertEqual(num.bytes_to_list('\x01\x00\x00', order="native"), [0, 0, 1])
+ else:
+ self.assertEqual(num.bytes_to_list('\x00\x00\x01', order="native"), [0, 0, 1])
+
+ def test_int_to_bytes(self):
+ self.check_function_results(num.int_to_bytes, [
+ #standard big endian
+ ak('\x00', 0, 1),
+ ak('\x01', 1, 1),
+ ak('\x00\x01', 1, 2),
+ ak('\x00\x00\x01', 1, 3),
+ ak('\x00\x00\x00\x00', 0, 4),
+ ak('\x00\x00\x00\x01', 1, 4),
+ ak('\x00\x00\x00\xff', 255, 4),
+ ak('\x00\x00\x01\x00', 256, 4),
+ ak('\x04\x03\x02\x01', 0x04030201, 4),
+
+ #standard little endian
+ ak('\x00', 0, 1, order="little"),
+ ak('\x01', 1, 1, order="little"),
+ ak('\x01\x00', 1, 2, order="little"),
+ ak('\x01\x00\x00', 1, 3, order="little"),
+ ak('\x00\x00\x00\x00', 0, 4, order="little"),
+ ak('\x01\x00\x00\x00', 1, 4, order="little"),
+ ak('\xff\x00\x00\x00', 255, 4, order="little"),
+ ak('\x00\x01\x00\x00', 256, 4, order="little"),
+ ak('\x01\x02\x03\x04', 0x04030201, 4, order="little"),
+
+ ])
+
+ #check bytes bound check
+ self.assertRaises(ValueError, num.int_to_bytes, 256, bytes=1)
+
+ #check upper bound check
+ self.assertRaises(ValueError, num.int_to_bytes, 129, upper=128)
+
+ #check bytes/upper check
+ self.assertRaises(ValueError, num.int_to_bytes, 1, bytes=1, upper=512)
+
+ #quick check native mode works right
+ if native == "little":
+ self.assertEqual(num.int_to_bytes(1, 3, order="native"), '\x01\x00\x00')
+ else:
+ self.assertEqual(num.int_to_bytes(1, 3, order="native"), '\x00\x00\x01')
+
+ def test_bytes_to_int(self):
+ self.check_function_results(num.bytes_to_int, [
+ #standard big endian
+ ak(1, '\x01'),
+ ak(1, '\x00\x01'),
+ ak(1, '\x00\x00\x01'),
+ ak(0,'\x00\x00\x00\x00'),
+ ak(1,'\x00\x00\x00\x01'),
+ ak(255,'\x00\x00\x00\xff'),
+ ak(256,'\x00\x00\x01\x00'),
+ ak(0x04030201,'\x04\x03\x02\x01'),
+
+ #standard little endian
+ ak(1, '\x01', order="little"),
+ ak(1, '\x01\x00', order="little"),
+ ak(1, '\x01\x00\x00', order="little"),
+ ak(0, '\x00\x00\x00\x00', order="little"),
+ ak(1, '\x01\x00\x00\x00', order="little"),
+ ak(255, '\xff\x00\x00\x00', order="little"),
+ ak(256, '\x00\x01\x00\x00', order="little"),
+ ak(0x04030201,'\x01\x02\x03\x04', order="little"),
+
+ ])
+
+ #quick check native mode works right
+ if native == "little":
+ self.assertEqual(num.bytes_to_int('\x01\x00\x00', order="native"), 1)
+ else:
+ self.assertEqual(num.int_to_bytes('\x00\x00\x01', order="native"), 1)
+
+ def test_xor_bytes(self):
+ self.check_function_results(num.xor_bytes, [
+ #result, left, right
+ ak('\x00\x00\x00\x00', '\x00\x00\x00\x00', '\x00\x00\x00\x00'),
+ ak('\x00\x00\xff\x00', '\xff\x00\xff\x00', '\xff\x00\x00\x00'),
+ ak('\x00\x00\x00\x00', '\xff\x00\xff\x00', '\xff\x00\xff\x00'),
+ ak('\x00\x44\x03\x02', '\x08\x04\x02\x03', '\x08\x40\x01\x01')
+ ])
+ self.check_bs_func(num.xor_bytes, lambda a, b: a^b)
+
+ def test_align_bytes(self):
+ #since xor_bytes/and/etc all use the same alignment func,
+ #we're just quickly testing xor_bytes
+
+ self.assertRaises(ValueError, num.xor_bytes, '\x00', '\x00\x21')
+ self.assertRaises(ValueError, num.xor_bytes, '\x00', '\x00\x21', order=None)
+
+ self.assertEqual(num.xor_bytes('\x01', '\x00\x21', order="big"), '\x00\x20')
+ #^ same as '\x00\x21' \x00\x01'
+ self.assertEqual(num.xor_bytes('\x01', '\x00\x21', order="little"), '\x01\x21')
+ #^ same as '\x00\x21' \x01\x00'
+
+ def test_and_bytes(self):
+ self.check_function_results(num.and_bytes, [
+ #result, left, right
+ ak('\x00\x00\x00\x00', '\x00\x00\x00\x00', '\x00\x00\x00\x00'),
+ ak('\xff\x00\x00\x00', '\xff\x00\xff\x00', '\xff\x00\x00\x00'),
+ ak('\xff\x00\xff\x00', '\xff\x00\xff\x00', '\xff\x00\xff\x00'),
+ ak('\x08\x00\x00\x01', '\x08\x04\x02\x03', '\x08\x40\x01\x01')
+ ])
+ self.check_bs_func(num.and_bytes, lambda a, b: a&b)
+
+ def test_or_bytes(self):
+ self.check_function_results(num.or_bytes, [
+ #result, left, right
+ ak('\x00\x00\x00\x00', '\x00\x00\x00\x00', '\x00\x00\x00\x00'),
+ ak('\xff\x00\xff\x00', '\xff\x00\xff\x00', '\xff\x00\x00\x00'),
+ ak('\xff\x00\xff\x00', '\xff\x00\xff\x00', '\xff\x00\xff\x00'),
+ ak('\x08\x44\x03\x03', '\x08\x04\x02\x03', '\x08\x40\x01\x01')
+ ])
+ self.check_bs_func(num.or_bytes, lambda a, b: a|b)
+
+ def test_binop_bytes(self):
+ #note: this also checks some python invariants, just to be safe,
+ #as well as some internal bits of bs_op.
+ #under the guise of that, we test using bs_op to perform NAND
+ assert -1 % 256 == 255
+ assert 255 % 256 == 255
+ def nand(a, b):
+ assert a % 256 == a
+ assert b % 256 == b
+ c = 256 + ~ (a & b)
+ assert c % 256 == c
+ return c
+ self.check_function_results(num.binop_bytes, [
+ #result, left, right
+ ak('\xff\xff\xff\xff', '\x00\x00\x00\x00', '\x00\x00\x00\x00', nand),
+ ak('\x00\xff\xff\xff', '\xff\x00\xff\x00', '\xff\x00\x00\x00', nand),
+ ak('\x00\xff\x00\xff', '\xff\x00\xff\x00', '\xff\x00\xff\x00', nand),
+ ak('\xf7\xff\xff\xfe', '\x08\x04\x02\x03', '\x08\x40\x01\x01', nand)
+ ])
+
+ def test_invert_bytes(self):
+ self.check_function_results(num.invert_bytes, [
+ ak('\x00\xff', '\xff\x00'),
+ ak('\x84\x21\x00', '\x7b\xde\xff'),
+ ])
+
+ def check_bs_func(self, func, op):
+ "check bool operation over random bytes"
+ for r in xrange(1000):
+ al = random.randrange(1, 9)
+ bl = random.randrange(1, 9)
+ a = random.getrandbytes(al)
+ b = random.getrandbytes(bl)
+
+ #do big-endian
+ av = num.bytes_to_int(a)
+ bv = num.bytes_to_int(b)
+ cv = op(av, bv)
+ c = num.int_to_bytes(cv, max(al, bl))
+ self.assertEqual(func(a, b, order="big"), c)
+
+ #do little-endian
+ av = num.bytes_to_int(a, order="little")
+ bv = num.bytes_to_int(b, order="little")
+ cv = op(av, bv)
+ c = num.int_to_bytes(cv, max(al, bl), order="little")
+ self.assertEqual(func(a, b, order="little"), c)
+
+#=========================================================
+#roman numeral funcs
+#=========================================================
+class RomanTest(TestCase):
+ # I V X L C D M
+ # 1 5 10 50 100 500 1000
+
+ roman_pairs = [
+ #pairs tests forward and backward
+('i', 1), ('ii', 2), ('iii', 3), ('iv', 4),
+('v', 5), ('vi', 6), ('vii', 7), ('viii', 8),
+('ix', 9), ('x', 10), ('xi', 11), ('xii', 12),
+('xiii', 13), ('xiv', 14), ('xv', 15),
+('xlv', 45), ('xlvi', 46), ('xlvii', 47), ('xlviii', 48),
+('xlix', 49), ('l', 50), ('li', 51),
+('xcix', 99), ('c', 100), ('ci', 101),
+('cxcix', 199), ('cc', 200), ('cci', 201),
+('cccxcix', 399), ('cd', 400), ('cdi', 401),
+('cmxcix', 999), ('m', 1000), ('mi', 1001),
+('mcmxcviii', 1998), ('mcmxcix', 1999), ('mm', 2000), ('mmi', 2001),
+('mmmcmxcix', 3999),
+ ]
+
+ ns_roman_pairs = [
+ #non-standard roman -- not allowed in strict mode
+
+ #duplicate elements
+ ('xxxxxx', 60),
+ ('mmmmm', 5000),
+
+ #duplicate stanzas
+ ('iviv', 8),
+ ('ivivx', 2),
+
+ #over-large substraction stanzas
+ ('iiiiv', 1),
+ ('vix', 4),
+ ]
+
+ invalid_roman = [
+ #grammatically incorrect (encodes a negative w/in a subtraction stanza)
+ "vvx", "iiiiiv", "iviviix",
+ #wrong literals
+ "", "axcv",
+ ]
+
+ def test_int_to_roman(self):
+ for roman, arabic in self.roman_pairs:
+ self.assertEqual(num.int_to_roman(arabic), roman.upper())
+ self.assertRaises(ValueError, num.int_to_roman, -1)
+ self.assertRaises(ValueError, num.int_to_roman, 0)
+ self.assertRaises(ValueError, num.int_to_roman, 4001)
+
+ #test dialects
+ self.assertEqual(num.int_to_roman(99), "xcix".upper())
+ self.assertEqual(num.int_to_roman(99, dialect="standard"), "xcix".upper())
+ self.assertEqual(num.int_to_roman(99, dialect="additive"), "lxxxxviiii".upper())
+
+ #test some invariants...
+ #2. the powers of ten should never occur >3 times
+ for i in xrange(1, 3999):
+ r = num.int_to_roman(i)
+ #the non-powers of ten should never occur twice in a row
+ for c in "VLD":
+ self.assert_((c*2) not in r)
+ for c in "IXCM":
+ self.assert_((c*4) not in r)
+
+ def test_roman_to_int(self):
+ #test std pairs
+ for roman, arabic in self.roman_pairs:
+ self.assertEqual(num.roman_to_int(roman), arabic)
+
+ #run all numbers through and back
+ for i in xrange(1, 3999):
+ self.assertEqual(num.roman_to_int(num.int_to_roman(i)), i)
+
+ #check for some non-standard but correct ones
+ for roman, arabic in self.ns_roman_pairs:
+ self.assertEqual(num.roman_to_int(roman), arabic)
+ self.assertRaises(ValueError, num.roman_to_int, roman, strict=True)
+
+ #check invalid romans
+ for roman in self.invalid_roman:
+ self.assertRaises(ValueError, num.roman_to_int, roman)
+
+#=========================================================
+#base conversions
+#=========================================================
+
+# int_to_base
+# int_from_base -- int
+# float_to_base
+# float_from_base
+ # test float_to_base(1<<BPF,2) - was throwing error
+
+#=========================================================
+#misc
+#=========================================================
+
+class MiscTest(TestCase):
+
+ def test_sdivmod(self):
+ sdivmod = num.sdivmod
+
+ def ts(x,y,cd,cr):
+ rd,rr = sdivmod(x,y)
+ if isinstance(x, Decimal):
+ self.assertIsInstance(rr, Decimal)
+ elif isinstance(x, float):
+ self.assertIsInstance(rr, float)
+ self.assertEquals(rd, cd)
+ self.assertEquals(rr, cr)
+
+ ts(12,5, 2,2)
+ ts(-12,5, -2,-2)
+
+ ts(12.5,5, 2, 2.5)
+ ts(-12.5,5, -2, -2.5)
+
+ ts(Decimal("12.5"), 5, 2, Decimal("2.5"))
+ ts(Decimal("-12.5"), 5, -2, Decimal("-2.5"))
+
+ def test_splitfrac(self):
+ def ts(v, ci, cf):
+ ri, rf = num.splitfrac(v)
+ if isinstance(v, long):
+ self.assertIsInstance(ri, long)
+ self.assertIsInstance(rf, int) #could make this a long for symetry, but it's not worth it
+ elif isinstance(v, int):
+ self.assertIsInstance(ri, int)
+ self.assertIsInstance(rf, int)
+ elif isinstance(v, float):
+ self.assertIsInstance(ri, int)
+ self.assertIsInstance(rf, float)
+ elif isinstance(v, Decimal):
+ self.assertIsInstance(ri, int)
+ self.assertIsInstance(rf, Decimal)
+ else:
+ raise TypeError
+ self.assertEquals(ri, ci)
+ self.assertEquals(rf, cf)
+
+ #float w/ frac portion
+ ts(-10.75, -10, -.75)
+ ts(-1.25, -1, -.25)
+ ts(0.25, 0, .25)
+ ts(1.25, 1, .25)
+ ts(10.25, 10, .25)
+
+ #float w/o frac portion
+ ts(-12.0, -12, 0)
+ ts(-1.0, -1, 0)
+ ts(0.0, 0, 0)
+ ts(1.0, 1, 0)
+ ts(12.0, 12, 0)
+
+ #decimal w/ frac portion
+ ts(Decimal("-10.1"), -10, Decimal("-.1"))
+ ts(Decimal("-1.1"), -1, Decimal("-.1"))
+ ts(Decimal("-.1"), 0, Decimal("-.1"))
+ ts(Decimal(".1"), 0, Decimal(".1"))
+ ts(Decimal("1.1"), 1, Decimal(".1"))
+ ts(Decimal("12.1"), 12, Decimal(".1"))
+
+ #decimal w/o frac portion
+ ts(Decimal("-10.0"), -10, Decimal("0"))
+ ts(Decimal("-1.0"), -1, Decimal("0"))
+ ts(Decimal("0.0"), 0, Decimal("0"))
+ ts(Decimal("1.0"), 1, Decimal("0"))
+ ts(Decimal("12.0"), 12, Decimal("0"))
+
+ #ints
+ ts(-10, -10, 0)
+ ts(0,0,0)
+ ts(1, 1, 0)
+ ts(10, 10, 0)
+
+ #longs
+ ts(1L, 1L, 0)
+
+ #TODO: Decimal support
+
+ def test_int_to_base(self):
+ self.check_function_results(num.int_to_base, [
+ #check base 2
+ ak('0', 0, 2),
+ ak('11', 3, 2),
+ ak('1000', 8, 2),
+
+ #check 10 in various bases
+ ak('1010', 10, 2),
+ ak('101', 10, 3),
+ ak('10', 10, 10),
+
+ #check 16 in various bases
+ ak('16', 16, 10),
+ ak('f', 15, 16),
+ ak('10', 16, 16),
+ ak('23', 35, 16),
+
+ #check 35 in various bases
+ ak('35', 35, 10),
+ ak('z', 35, 36),
+ ak('10', 36, 36),
+ ak('zz', 1295, 36),
+
+ #check negatives
+ ak('-110', -(4+2), 2),
+ ak('-1f', -0x1F, 16),
+
+ #check pad kwd
+ ak('1010', 8+2, 2, pad=0),
+ ak('1010', 8+2, 2, pad=4),
+ ak('01010', 8+2, 2, pad=5),
+ ak('00001101', 8+4+1, 2, pad=8),
+ ak('00101101', 32+8+4+1, 2, pad=8),
+
+ #check pad + negative
+ ak('-00001101', -(8+4+1), 2, pad=8),
+ ])
+ self.assertRaises(ValueError, num.int_to_base, 0, -1)
+ self.assertRaises(ValueError, num.int_to_base, 0, 0)
+ self.assertRaises(ValueError, num.int_to_base, 0, 1)
+ self.assertRaises(ValueError, num.int_to_base, 0, 37)
+ self.assertRaises(ValueError, num.int_to_base, 0, 1000)
+ for r in xrange(1000):
+ b = random.randrange(2, 37)
+ n = random.randrange(-2**32, 2**32+1)
+ v = num.int_to_base(n, b)
+ self.assertEqual(int(v, b), n)
+
+ def test_limit(self):
+ self.check_function_results(num.limit, [
+ # result, value, lower, upper
+ ak(5, 5, 0, 10),
+ ak(0, -5, 0, 10),
+ ak(10, 15, 0, 10),
+
+ ak(0, -1.5, 0, 1),
+ ak(.5, .5, 0, 1),
+ ak(1, 1.5, 0, 1),
+
+ ak(5, -10, 5, 5),
+ ])
+ #check it won't let lower > upper
+ self.assertRaises(ValueError, num.limit, 0, 10, 5)
+
+ def test_avgsd(self):
+ self.assertEqual(num.avgsd([0, 1, 1, 2]), (1.0, 0.70710678118654757))
+
+ def test_digits(self):
+ self.check_function_results(num.digits, [
+ # result, value, [digits]
+ ak(2, 99),
+
+ ak(2, 99, 10),
+ ak(3, 100, 10),
+ ak(3, 7, 2),
+ ak(4, 8, 2),
+ ak(2, 255, 16),
+
+ ak(2, -99, 10),
+ ak(1, 0, 10),
+
+ ])
+
+## seqsum_cases = [
+## (
+## ak([179,50,74,51], [0,126,41,4], [93,99,109]),
+## [272, 275, 224, 55],
+## ),
+## (
+## ak(1, [179,50,74,51], 1, [0,126,41,4], 1, [93,99,109]),
+## [272, 275, 224, 55],
+## ),
+## (
+## ak((1, [179,50,74,51]), (1, [0,126,41,4]), (1, [93,99,109])),
+## [272, 275, 224, 55],
+## ),
+## ]
+## def test_seqsum(self):
+## for i,o in self.seqsum_cases:
+## r = seqsum(*i.args, **i.kwds)
+## self.assertEqual(r, o, "case %r: got %r, expected %r" % (i, r, o))
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_security_bcrypt.py b/bps/tests/test_security_bcrypt.py
new file mode 100755
index 0000000..414824d
--- /dev/null
+++ b/bps/tests/test_security_bcrypt.py
@@ -0,0 +1,210 @@
+"""bps.security._bcrypt unitests
+
+The BaseTest class was adapted from the jBcrypt unitests,
+released under the following license:
+
+ // Permission to use, copy, modify, and distribute this software for any
+ // purpose with or without fee is hereby granted, provided that the above
+ // copyright notice and this permission notice appear in all copies.
+ //
+ // THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ // WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ // MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ // ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ // WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ // ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ // OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+"""
+#=========================================================
+#imports
+#=========================================================
+#site
+try:
+ import bcrypt as pybcrypt
+except ImportError:
+ pybcrypt = None
+#pkg
+from bps.tests.utils import TestCase, enable_suite
+from bps.security import _bcrypt as slow_bcrypt
+
+#=========================================================
+#test suite
+#=========================================================
+class UtilTest(TestCase):
+ "test slow_bcrypt's utilities"
+
+ def test_encode64(self):
+ encode = slow_bcrypt.encode_base64
+ self.check_function_results(encode, [
+ ('', ''),
+ ('..', '\x00'),
+ ('...', '\x00\x00'),
+ ('....', '\x00\x00\x00'),
+ ('9u', '\xff'),
+ ('996', '\xff\xff'),
+ ('9999', '\xff\xff\xff'),
+ ])
+
+ def test_decode64(self):
+ decode = slow_bcrypt.decode_base64
+ self.check_function_results(decode, [
+ ('', ''),
+ ('\x00', '..'),
+ ('\x00\x00', '...'),
+ ('\x00\x00\x00', '....'),
+ ('\xff', '9u', ),
+ ('\xff\xff','996'),
+ ('\xff\xff\xff','9999'),
+ ])
+
+class _BcryptTestBase(TestCase):
+ mod = None
+
+ test_vectors = [
+ [ "",
+ "$2a$06$DCq7YPn5Rq63x1Lad4cll.",
+ "$2a$06$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s." ],
+ [ "",
+ "$2a$08$HqWuK6/Ng6sg9gQzbLrgb.",
+ "$2a$08$HqWuK6/Ng6sg9gQzbLrgb.Tl.ZHfXLhvt/SgVyWhQqgqcZ7ZuUtye" ],
+ [ "",
+ "$2a$10$k1wbIrmNyFAPwPVPSVa/ze",
+ "$2a$10$k1wbIrmNyFAPwPVPSVa/zecw2BCEnBwVS2GbrmgzxFUOqW9dk4TCW" ],
+ [ "",
+ "$2a$12$k42ZFHFWqBp3vWli.nIn8u",
+ "$2a$12$k42ZFHFWqBp3vWli.nIn8uYyIkbvYRvodzbfbK18SSsY.CsIQPlxO" ],
+ [ "a",
+ "$2a$06$m0CrhHm10qJ3lXRY.5zDGO",
+ "$2a$06$m0CrhHm10qJ3lXRY.5zDGO3rS2KdeeWLuGmsfGlMfOxih58VYVfxe" ],
+ [ "a",
+ "$2a$08$cfcvVd2aQ8CMvoMpP2EBfe",
+ "$2a$08$cfcvVd2aQ8CMvoMpP2EBfeodLEkkFJ9umNEfPD18.hUF62qqlC/V." ],
+ [ "a",
+ "$2a$10$k87L/MF28Q673VKh8/cPi.",
+ "$2a$10$k87L/MF28Q673VKh8/cPi.SUl7MU/rWuSiIDDFayrKk/1tBsSQu4u" ],
+ [ "a",
+ "$2a$12$8NJH3LsPrANStV6XtBakCe",
+ "$2a$12$8NJH3LsPrANStV6XtBakCez0cKHXVxmvxIlcz785vxAIZrihHZpeS" ],
+ [ "abc",
+ "$2a$06$If6bvum7DFjUnE9p2uDeDu",
+ "$2a$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i" ],
+ [ "abc",
+ "$2a$08$Ro0CUfOqk6cXEKf3dyaM7O",
+ "$2a$08$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm" ],
+ [ "abc",
+ "$2a$10$WvvTPHKwdBJ3uk0Z37EMR.",
+ "$2a$10$WvvTPHKwdBJ3uk0Z37EMR.hLA2W6N9AEBhEgrAOljy2Ae5MtaSIUi" ],
+ [ "abc",
+ "$2a$12$EXRkfkdmXn2gzds2SSitu.",
+ "$2a$12$EXRkfkdmXn2gzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q" ],
+ [ "abcdefghijklmnopqrstuvwxyz",
+ "$2a$06$.rCVZVOThsIa97pEDOxvGu",
+ "$2a$06$.rCVZVOThsIa97pEDOxvGuRRgzG64bvtJ0938xuqzv18d3ZpQhstC" ],
+ [ "abcdefghijklmnopqrstuvwxyz",
+ "$2a$08$aTsUwsyowQuzRrDqFflhge",
+ "$2a$08$aTsUwsyowQuzRrDqFflhgekJ8d9/7Z3GV3UcgvzQW3J5zMyrTvlz." ],
+ [ "abcdefghijklmnopqrstuvwxyz",
+ "$2a$10$fVH8e28OQRj9tqiDXs1e1u",
+ "$2a$10$fVH8e28OQRj9tqiDXs1e1uxpsjN0c7II7YPKXua2NAKYvM6iQk7dq" ],
+ [ "abcdefghijklmnopqrstuvwxyz",
+ "$2a$12$D4G5f18o7aMMfwasBL7Gpu",
+ "$2a$12$D4G5f18o7aMMfwasBL7GpuQWuP3pkrZrOAnqP.bmezbMng.QwJ/pG" ],
+ [ "~!@#$%^&*() ~!@#$%^&*()PNBFRD",
+ "$2a$06$fPIsBO8qRqkjj273rfaOI.",
+ "$2a$06$fPIsBO8qRqkjj273rfaOI.HtSV9jLDpTbZn782DC6/t7qT67P6FfO" ],
+ [ "~!@#$%^&*() ~!@#$%^&*()PNBFRD",
+ "$2a$08$Eq2r4G/76Wv39MzSX262hu",
+ "$2a$08$Eq2r4G/76Wv39MzSX262huzPz612MZiYHVUJe/OcOql2jo4.9UxTW" ],
+ [ "~!@#$%^&*() ~!@#$%^&*()PNBFRD",
+ "$2a$10$LgfYWkbzEvQ4JakH7rOvHe",
+ "$2a$10$LgfYWkbzEvQ4JakH7rOvHe0y8pHKF9OaFgwUZ2q7W2FFZmZzJYlfS" ],
+ [ "~!@#$%^&*() ~!@#$%^&*()PNBFRD",
+ "$2a$12$WApznUOJfkEGSmYRfnkrPO",
+ "$2a$12$WApznUOJfkEGSmYRfnkrPOr466oFDCaj4b6HY3EXGvfxm43seyhgC" ],
+ ]
+
+ def test_00_hashpw(self):
+ "test hashpw() generates expected result for a given plaintext & salt"
+ hashpw = self.mod.hashpw
+ for plain, salt, expected in self.test_vectors:
+ hashed = hashpw(plain, salt)
+ self.assertEquals(hashed, expected)
+
+ def test_01_hashpw_success(self):
+ "test hashpw() verifies knowns correctly"
+ hashpw = self.mod.hashpw
+ for plain, _, expected in self.test_vectors:
+ hash = hashpw(plain, expected)
+ self.assertEquals(hash, expected)
+
+ def test_02_hashpw_failure(self):
+ "test hashpw() negatively verifies incorrect knowns"
+ hashpw = self.mod.hashpw
+ for plain, _, expected in self.test_vectors:
+ hash = hashpw(plain + 'number 15', expected)
+ self.assertNotEquals(hash, expected)
+
+ def test_03_gensalt(self):
+ "test new salts verifies correctly"
+ hashpw = self.mod.hashpw
+ gensalt = self.mod.gensalt
+ seen = set()
+ for plain, _, _ in self.test_vectors:
+ if plain in seen:
+ continue
+ seen.add(plain)
+
+ #create salt
+ salt = gensalt()
+
+ #hash it
+ hashed1 = hashpw(plain, salt)
+
+ #run check again
+ hashed2 = hashpw(plain, hashed1)
+
+ #hashes shouldn't have changed
+ self.assertEquals(hashed1, hashed2)
+
+ def test_04_gensalt(self):
+ "test gensalt options"
+ hashpw = self.mod.hashpw
+ gensalt = self.mod.gensalt
+ seen = set()
+ for plain, _, _ in self.test_vectors:
+ if plain in seen:
+ continue
+ seen.add(plain)
+
+ #create salt
+ salt = gensalt(4)
+
+ #hash it
+ hashed1 = hashpw(plain, salt)
+
+ #run check again
+ hashed2 = hashpw(plain, hashed1)
+
+ #hashes shouldn't have changed
+ self.assertEquals(hashed1, hashed2)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+if enable_suite("slow_bcrypt"):
+ class SlowBcryptTest(_BcryptTestBase):
+ "test slow bcrypt module"
+ mod = slow_bcrypt
+
+if pybcrypt and enable_suite("bcrypt"):
+ #if pybcrypt is installed, run our unitest on them too,
+ #just to ensure slow_bcrypt's interface is compatible.
+ class PyBcryptTest(_BCryptTestBase):
+ "make sure slow_bcrypt is compatible w/ pybcrypt"
+ mod = pybcrypt
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/tests/test_security_policy.py b/bps/tests/test_security_policy.py
new file mode 100644
index 0000000..ac726f7
--- /dev/null
+++ b/bps/tests/test_security_policy.py
@@ -0,0 +1,1710 @@
+"""bps.security.policy unitests"""
+
+#=========================================================
+#imports
+#=========================================================
+#core
+from __future__ import with_statement
+import warnings
+#site
+#pkg
+from bps import *
+from bps.types import stub
+from bps.meta import is_iter
+from bps.tests.utils import TestCase, catch_all_warnings
+from bps.security.policy import Policy, Role, Permission, PERM
+
+#=========================================================
+#helpers
+#=========================================================
+def policy_01x(**kwds):
+ "default roleset used by many tests"
+ #NOTE: this is a non-sensical inheritance hierarchy,
+ #which mainly exists to provide a variety of shapes for testing purposes.
+ # admin -> (user, pirate) -> base
+ policy = Policy(**kwds)
+ b = policy.create_role("base", grantable=False)
+ u = policy.create_role("user", "base")
+ p = policy.create_role("pirate", "base")
+ a = policy.create_role("admin", 'user,pirate')
+ return policy,b,u,p,a
+
+def policy_01(**kwds):
+ return policy_01x(**kwds)[0]
+
+def policy_02x():
+ "default roleset+permset used by many tests"
+ #NOTE: this is a non-sensical set of permissions,
+ #which mainly exists to provide a perms for testing purposes
+ policy = policy_01()
+ a = policy.permit("admin", "perm-a")
+ b = policy.permit("base", "perm-b")
+ u = policy.permit("user", "perm-u")
+ p = policy.permit("pirate", "perm-p")
+ policy.freeze()
+ #NOTE: order should be order of definition
+ return policy,a,b,u,p
+
+def policy_02():
+ return policy_02x()[0]
+
+def policy_03x():
+ "example roleset+permset"
+ #NOTE: this role & permset should make sense,
+ # and are a stripped down version of the policy for a web application.
+
+ policy = Policy()
+
+ #
+ #create a nice user account object
+ #
+ class User(BaseClass):
+ policy = None
+
+ def __init__(self, name=None, roles=None):
+ self.name = name or "NoName"
+ self.roles = roles or ()
+
+ def has_role(self, role):
+ #NOTE: by calling this instead of reading .roles, we get benefit of inherited role system
+ return self.policy.user_has_role(self,role)
+
+ def has_permission(self, *a, **k):
+ return self.policy.user_has_permission(self, *a, **k)
+ User.policy = policy
+
+ #
+ #create roles
+ #
+ policy.create_role("person", desc="base role for all users")
+ policy.create_role("client", inherits="person", desc="role for end users of system")
+ policy.create_role("employee", inherits="person", desc="role for people running system")
+ policy.create_role("admin", inherits="employee", desc="system administrator")
+
+ #
+ #create some helpful guard funcs
+ #
+ def is_own_account(user, item=None):
+ "only permit action if it's on user's own account"
+ if item:
+ return user == item
+ return True
+
+ def is_client_account(user, item=None):
+ "only permit action if it's on a client user account"
+ if item:
+ return item.has_role("client")
+ return True
+
+ #
+ #grant actions to roles
+ #
+
+ #all users can edit own account, and log in
+ policy.permit_list(["person"],[
+ dict(action="sign-in", klass=False),
+ dict(action="view", klass="user", guard=is_own_account),
+ dict(action="update", klass="user", guard=is_own_account),
+ ])
+
+ #all clients can manage their journal entries
+ #NOTE: in real-world, a guard would be used to
+ # require journal was owned by user:
+ ##def is_own_journal(user, item=None):
+ ## if item:
+ ## return item.owner == user
+ ## return True
+ policy.permit_list(["client"],[
+ dict(action="list", klass="journal"),
+ dict(action="create", klass="journal"),
+ dict(action="view", klass="journal"),
+ dict(action="update", klass="journal", attrs=("owner",), deny=True),
+ dict(action="update", klass="journal"),
+ dict(action="retire", klass="journal"),
+ ])
+
+ #all employees can manage client accounts
+ policy.permit_list(["employee"],[
+ dict(action="list", klass="user", guard=is_client_account),
+ dict(action="create", klass="user", guard=is_client_account),
+ dict(action="view", klass="user", guard=is_client_account),
+ dict(action="update", klass="user", guard=is_client_account),
+ dict(action="retire", klass="user", guard=is_client_account),
+ ])
+
+ #and admins can perform all std actions on any class
+ policy.permit_list(["admin"],[
+ dict(action="list", klass=True),
+ dict(action="create", klass=True),
+ dict(action="view", klass=True),
+ dict(action="update", klass=True),
+ dict(action="retire", klass=True),
+ dict(action="delete", klass=True), #and only admins can delete things forever
+ ])
+
+ #
+ #freeze policy & create some users
+ #
+ policy.freeze()
+ admin = User(roles=("admin",))
+ employee = User(roles=("employee",))
+ client = User(roles=("client",))
+ return policy, admin, employee, client, User
+
+#TODO: remove this stub
+def check_rtype(self, func, deftype, elems, ordered=False):
+ "run test for func with rtype return"
+ return self.check_function_rtype(func, retval=elems, rtype=deftype, ordered=ordered)
+
+#=========================================================
+#support classes
+#=========================================================
+class RoleClassTest(TestCase):
+ "test role class itself"
+
+ def test_const_basic(self):
+ "test basic role constructor"
+ r = Role("admin")
+ self.assertEquals(r.name, "admin")
+ self.assertEquals(r.title, "Admin")
+ self.assertIs(r.desc, None)
+ self.assertEquals(r.inherits, frozenset())
+ self.assertEquals(r.grantable, True)
+
+ def test_const_invalid(self):
+ "test invalid role constructors"
+ self.assertRaises(TypeError, Role)
+ self.assertRaises(TypeError, Role, "admin", xxx=123)
+
+ def test_const_full(self):
+ "test common role constructor"
+ r = Role("admin", title="captain", inherits=["user", "pirate", "user"],
+ desc="descy", grantable=False)
+ self.assertEquals(r.name, "admin")
+ self.assertEquals(r.title, "captain")
+ self.assertEquals(r.desc, "descy")
+ self.assertEquals(r.grantable, False)
+ self.assertEquals(r.inherits, frozenset(["user", "pirate"]))
+
+ def test_eq(self):
+ "test role equal to itself"
+
+ r1 = Role("admin")
+ self.assertEquals(r1,r1)
+
+ r2 = Role("other")
+ self.assertNotEquals(r1,r2)
+
+ #NOTE: module doesn't current assert either way on this,
+ # an eq operator may be defined in the future.
+ # this test is just to ensure current behavior is reliable.
+ r3 = Role("admin")
+ self.assertNotEquals(r1,r3)
+
+class PermissionClassTest(TestCase):
+ "test permission class itself"
+
+ #=========================================================
+ #constructors
+ #=========================================================
+ def test_const_basic(self):
+ "test basic perm constructor"
+ p = Permission("sign-in")
+ self.assertEquals(p.action, "sign-in")
+ self.assertEquals(p.desc, None)
+ self.assertEquals(p.klass, None)
+ self.assertEquals(p.attrs, None)
+ self.assertEquals(p.guard, None)
+ self.assertEquals(p.deny, False)
+ self.assertEquals(p.priority, 0)
+
+ def test_const_invalid(self):
+ "test invalid perm constructors"
+
+ #make sure action is required
+ self.assertRaises(TypeError, Permission)
+ self.assertRaises(TypeError, Permission, klass=False)
+
+ #make sure unknowns raise err
+ self.assertRaises(TypeError, Permission, "sign-in", xxx=123)
+
+ def test_const_full(self):
+ "test full perm constructor"
+ def g(user):
+ return True
+ p = Permission(
+ action="update",
+ klass="user",
+ attrs=("bob","sue"),
+ guard=g,
+ deny=True,
+ desc="desc",
+ priority=-100,
+ )
+ self.assertEquals(p.action, "update")
+ self.assertEquals(p.desc, "desc")
+ self.assertEquals(p.klass, "user")
+ self.assertEquals(p.attrs, frozenset(["bob", "sue"]))
+ self.assertEquals(p.guard, g)
+ self.assertEquals(p.guard_kwds, frozenset(["user"]))
+ self.assertEquals(p.deny, True)
+ self.assertEquals(p.priority, -100)
+
+ #=========================================================
+ #check() basic usage
+ #=========================================================
+ def test_check_params(self):
+ "test check() params"
+ u = stub()
+ p = Permission("update", "user", attrs=("bob", None))
+
+ self.assertEquals(p.check(u,"update","user",u, attr=None), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update","journal",u, attr=None), PERM.PENDING)
+
+ p = Permission("sign-in", False)
+ self.assertEquals(p.check(u,"sign-in"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in",None), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in","user"), PERM.PENDING)
+
+ def test_check_invalid_values(self):
+ "test passing invalid values to check"
+ u = stub()
+ p = Permission("update")
+
+ #haven't decided the policy for these border cases,
+ #so they're currently forbidden..
+ self.assertRaises(ValueError, p.check, u, "update", attr="")
+ self.assertRaises(ValueError, p.check, u, "update", attr=False)
+ self.assertRaises(ValueError, p.check, u, "update", klass="")
+ self.assertRaises(ValueError, p.check, u, "update", klass=False)
+
+ #=========================================================
+ #could_allow() basic usage
+ #=========================================================
+ def test_could_allow_params(self):
+ #simple check against action + klass
+ p = Permission("update", "user", attrs=("bob", None))
+ self.assert_(p.could_allow("update","user", attr=None))
+ self.assert_(not p.could_allow("update","journal", attr=None))
+
+ #simple check against action - klass
+ p = Permission("sign-in", False)
+ self.assert_(p.could_allow("sign-in"))
+ self.assert_(p.could_allow("sign-in",None))
+ self.assert_(not p.could_allow("sign-in","user"))
+
+ def test_could_allow_guard(self):
+ #simple check that guard is ignored
+ p = Permission("update", "user", guard=lambda user: False)
+ self.assert_(p.could_allow("update","user"))
+ self.assert_(not p.could_allow("update","journal"))
+
+ #=========================================================
+ #action matching
+ #=========================================================
+ def test_action(self):
+ "test generic action specifier"
+ u = stub()
+ p = Permission("update")
+ self.assertEquals(p.check(u, None), PERM.PENDING)
+ self.assertEquals(p.check(u, "update"), PERM.ALLOW)
+ self.assertEquals(p.check(u, "zzz"), PERM.PENDING)
+
+ #=========================================================
+ #klass matching
+ #=========================================================
+ #NOTE: klass="" and klass=False shouldn't be passed into check normally,
+ # only pass in a non-empty string or None.
+
+ def test_klass_false(self):
+ "test klass=False matches only if klass missing"
+ u = stub()
+ p = Permission("sign-in", klass=False)
+ self.assertEquals(p.klass, False)
+
+ self.assertEquals(p.check(u,"sign-in"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in",None), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in","user"), PERM.PENDING)
+ self.assertEquals(p.check(u,"sign-in","journal"), PERM.PENDING)
+
+ def test_klass_none(self):
+ "test klass=None matches anything"
+ u = stub()
+
+ p1 = Permission("sign-in")
+ self.assertEquals(p1.klass, None)
+
+ p = Permission("sign-in", klass=None)
+ self.assertEquals(p.klass, None)
+
+ self.assertEquals(p.check(u,"sign-in"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in",None), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in","user"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in","journal"), PERM.ALLOW)
+
+ def test_klass_true(self):
+ "test klass=True matches anything except missing klass"
+ u = stub()
+
+ p = Permission("sign-in", klass=True)
+ self.assertEquals(p.klass, True)
+
+ self.assertEquals(p.check(u,"sign-in"), PERM.PENDING)
+ self.assertEquals(p.check(u,"sign-in",None), PERM.PENDING)
+ self.assertEquals(p.check(u,"sign-in","user"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in","journal"), PERM.ALLOW)
+
+ def test_klass_exact(self):
+ "test klass=<str> matches only that string"
+ u = stub()
+
+ p = Permission("sign-in", klass="user")
+ self.assertEquals(p.klass, "user")
+
+ self.assertEquals(p.check(u,"sign-in"), PERM.PENDING)
+ self.assertEquals(p.check(u,"sign-in",None), PERM.PENDING)
+ self.assertEquals(p.check(u,"sign-in","user"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"sign-in","journal"), PERM.PENDING)
+
+ #=========================================================
+ #attr matching
+ #=========================================================
+ #NOTE: no policy is current set for attr="" and attr=False,
+ # these shouldn't be used.
+
+ def test_attrs_none(self):
+ "test attrs=None matches any/no attrs"
+ u = stub()
+
+ p1 = Permission("update")
+ self.assertEquals(p1.attrs, None)
+
+ p = Permission("update", attrs=None)
+ self.assertEquals(p.attrs, None)
+
+ self.assertEquals(p.check(u,"update"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr=None), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr="xxx"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr="zzz"), PERM.ALLOW)
+
+ def test_attrs_empty(self):
+ "test attrs=(), attrs=(None), and attrs=False matches no attr"
+ u = stub()
+
+ p1 = Permission("update", attrs=False)
+ self.assertEquals(p1.attrs, frozenset([None]))
+
+ p1 = Permission("update", attrs=(None,))
+ self.assertEquals(p1.attrs, frozenset([None]))
+
+ p = Permission("update", attrs=())
+ self.assertEquals(p.attrs, frozenset([None]))
+
+ self.assertEquals(p.check(u,"update"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr=None), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr="xxx"), PERM.PENDING)
+ self.assertEquals(p.check(u,"update", attr="zzz"), PERM.PENDING)
+
+ def test_attrs_explicit(self):
+ "test attrs=('a','b') matches only those attrs"
+ u = stub()
+
+ p = Permission("update", attrs=("xxx", "yyy","xxx"))
+ self.assertEquals(p.attrs, frozenset(["xxx", "yyy"]))
+
+ self.assertEquals(p.check(u,"update"), PERM.PENDING)
+ self.assertEquals(p.check(u,"update", attr=None), PERM.PENDING)
+ self.assertEquals(p.check(u,"update", attr="xxx"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr="zzz"), PERM.PENDING)
+
+ def test_attrs_explicit2(self):
+ "test attrs=('a','b',None) matches only those or no attrs"
+ u = stub()
+
+ p = Permission("update", attrs=("xxx", "yyy",None))
+ self.assertEquals(p.attrs, frozenset(["xxx", "yyy",None]))
+
+ self.assertEquals(p.check(u,"update"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr=None), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr="xxx"), PERM.ALLOW)
+ self.assertEquals(p.check(u,"update", attr="zzz"), PERM.PENDING)
+
+ #=========================================================
+ #guard matching
+ #=========================================================
+ result = None #used as temp location for some guards in these tests
+
+ def test_guard_retval(self):
+ "test guard w/o args, using diff retvals"
+
+ #test True return value
+ def g():
+ self.result = True
+ return True
+ u = stub()
+ p = Permission("update", guard=g)
+ self.result = False
+ self.assertEquals(p.check(u,"update"), PERM.ALLOW)
+ self.assert_(self.result)
+
+ #test False return value
+ def g():
+ self.result = True
+ return False
+ u = stub()
+ p = Permission("update", guard=g)
+ self.result = False
+ self.assertEquals(p.check(u,"update"), PERM.PENDING)
+ self.assert_(self.result)
+
+ #test None return value
+ def g():
+ self.result = True
+ return None
+ u = stub()
+ p = Permission("update", guard=g)
+ self.result = False
+ self.assertEquals(p.check(u,"update"), PERM.PENDING)
+ self.assert_(self.result)
+
+ #test false-as-bool return value
+ def g():
+ self.result = True
+ return ""
+ u = stub()
+ p = Permission("update", guard=g)
+ self.result = False
+ self.assertEquals(p.check(u,"update"), PERM.PENDING)
+ self.assert_(self.result)
+
+ #test true-as-bool return value
+ def g():
+ self.result = True
+ return "xxx"
+ u = stub()
+ p = Permission("update", guard=g)
+ self.result = False
+ self.assertEquals(p.check(u,"update"), PERM.ALLOW)
+ self.assert_(self.result)
+
+ def test_guard_after_patterns(self):
+ "test guard called after patterns"
+ def g():
+ self.result = True
+ return True
+ u = stub()
+
+ #check action
+ self.result = False
+ p = Permission("update", guard=g)
+ self.assertEquals(p.check(u,"list"),PERM.PENDING)
+ self.assert_(not self.result)
+ self.assertEquals(p.check(u,"update"),PERM.ALLOW)
+ self.assert_(self.result)
+
+ #check klass
+ self.result = False
+ p = Permission("update", "user", guard=g)
+ self.assertEquals(p.check(u,"update", "journal"),PERM.PENDING)
+ self.assert_(not self.result)
+ self.assertEquals(p.check(u,"update", "user"),PERM.ALLOW)
+ self.assert_(self.result)
+
+ #check attrs
+ self.result = False
+ p = Permission("update", attrs=["x"], guard=g)
+ self.assertEquals(p.check(u,"update", attr="y"),PERM.PENDING)
+ self.assert_(not self.result)
+ self.assertEquals(p.check(u,"update", attr="x"),PERM.ALLOW)
+ self.assert_(self.result)
+
+ def test_guard_wildcard(self):
+ "test guard w/ all kwds defaulting"
+ def g(**kwds):
+ self.result = kwds
+ return True
+ u = stub()
+ p = Permission("update", guard=g)
+ self.result = False
+ self.assertEquals(p.check(u,"update"), PERM.ALLOW)
+ self.assertEquals(self.result,dict(
+ user=u,
+ action="update",
+ klass=None,
+ attr=None,
+ item=None,
+ scope=None,
+ perm=p,
+ ))
+
+ def test_guard_full(self):
+ "test guard w/ all kwds filled in"
+ def g(**kwds):
+ self.result = kwds
+ return True
+ u = stub()
+ p = Permission("update", guard=g)
+ self.result = False
+ self.assertEquals(p.check(u,"update","user",u,attr="xxx",scope=self), PERM.ALLOW)
+ self.assertEquals(self.result,dict(
+ user=u,
+ action="update",
+ klass="user",
+ attr="xxx",
+ item=u,
+ scope=self,
+ perm=p,
+ ))
+
+ def test_guard_some(self):
+ "test guard w/ all kwds filled in but few used"
+ def g(user, action=None, item=None):
+ self.result = dict(user=user, action=action, item=item)
+ return True
+ u = stub()
+ p = Permission("update", guard=g)
+ self.result = False
+ self.assertEquals(p.check(u,"update","user",u,attr="xxx",scope=self), PERM.ALLOW)
+ self.assertEquals(self.result,dict(
+ user=u,
+ action="update",
+ item=u,
+ ))
+
+ #=========================================================
+ #test eq
+ #=========================================================
+ def test_eq(self):
+ "test permission equality operator works"
+
+ #test match considers action, klass, attrs, guard, deny
+ for k,a,b in (
+ ("action", "update", "delete"),
+ ("klass", None, False),
+ ("klass", "user", False),
+ ("klass", "user", "journal"),
+ ("attrs", ("xxx",), ("yyy",)),
+ ("attrs", (), ("yyy",)),
+ ("guard", lambda : True, lambda : True),
+ ("deny", True, False),
+ ):
+ ad = {k:a}
+ bd = {k:b}
+ if k != "action":
+ ad["action"] = bd["action"] = "update"
+ p1 = Permission(**ad)
+ p2 = Permission(**ad)
+ p3 = Permission(**bd)
+ self.assertEquals(p1,p2)
+ self.assertEquals(p1,p1)
+ self.assertNotEquals(p1,p3)
+ self.assertNotEquals(p2,p3)
+
+ #test match discards non-perms
+ p = Permission("update")
+ self.assertNotEquals(p, None)
+ self.assertNotEquals(p, "xxx")
+ self.assertNotEquals(p, Role("admin"))
+
+ #test match ignores desc
+ p1 = Permission("update", desc="xxx")
+ p2 = Permission("update", desc="yyy")
+ self.assertEquals(p1,p1)
+ self.assertEquals(p1,p2)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#
+#=========================================================
+class RoleManagementTest(TestCase):
+ "test policy's role management functions"
+
+ #=========================================================
+ #test create_role(name, *a, **k) -> role_obj
+ #=========================================================
+ def test_create_role_simple(self):
+ "test creating a role works properly"
+ policy = Policy()
+
+ r = policy.create_role("admin")
+ self.assertIsInstance(r, Role)
+ self.assertEquals(r.name, "admin")
+ self.assertEquals(r.title, "Admin")
+ self.assertEquals(r.inherits, frozenset())
+
+ self.assertElementsEqual(policy.get_role_objs(), [r])
+
+ def test_create_role_inherits(self):
+ "test role inheritance works properly"
+ policy = Policy()
+
+ u = policy.create_role("pirate")
+ p = policy.create_role("user")
+
+ r = policy.create_role("admin", inherits=["user", "pirate", "user"])
+ self.assertEquals(r.name, "admin")
+ self.assertEquals(r.inherits, frozenset(["user", "pirate"]))
+
+ self.assertElementsEqual(policy.get_role_objs(), [u,p,r])
+
+ def test_create_role_inherits_undefined(self):
+ "test role inheritance requires existing parents"
+ policy = Policy()
+
+ #shouldn't be able to create role w/ out parents
+ self.assertRaises(ValueError, policy.create_role, "admin", ["user", "pirate"])
+ #KeyError: undefined roles: user, pirate
+
+ u = policy.create_role("user")
+
+ #without _all_ parents
+ self.assertRaises(ValueError, policy.create_role, "admin", ["user", "pirate"])
+ #KeyError: undefined roles: pirate
+
+ self.assertElementsEqual(policy.get_role_objs(), [u])
+
+ def test_create_role_inherit_self(self):
+ "test role can't inherit from self"
+ policy = Policy()
+
+ self.assertRaises(ValueError, policy.create_role, "user", ['user'])
+ #ValueError: role can't inherit from self
+
+ def test_create_role_frozen(self):
+ "test role can't be created after frozen"
+ policy = Policy()
+ policy.freeze()
+
+ self.assertRaises(AssertionError, policy.create_role, "user")
+ #AssertionError: policy frozen
+
+ def test_create_role_subclassed(self):
+ "test create_role() honors Policy.Role"
+ #make sure orig Role won't take "xxx"
+ self.assertRaises(TypeError, Policy.Role, "test", xxx=23)
+
+ #create role subclass which accepts "xxx"
+ class MyRole(Policy.Role):
+ def __init__(self, *a, **k):
+ self.xxx = k.pop("xxx",None)
+ self.__super.__init__(*a,**k)
+
+ #create policy class which uses MyRole
+ class MyPolicy(Policy):
+ Role = MyRole
+
+ #try creating role, make sure MyRole was used
+ policy = MyPolicy()
+ r = policy.create_role("test", xxx=23)
+ self.assertIsInstance(r,MyRole)
+ self.assertEqual(r.xxx, 23)
+
+ #=========================================================
+ #test get_role_obj(name, default=Undef) -> role_obj|default/Error
+ #=========================================================
+ def test_get_role_obj(self):
+ policy = Policy()
+
+ #check raises error by default
+ self.assertRaises(KeyError, policy.get_role_obj, "user")
+
+ #test default works
+ self.assertIs(policy.get_role_obj("user",None), None)
+
+ #test returns correct result
+ u = policy.create_role("user")
+ self.assertIs(policy.get_role_obj("user"), u)
+
+ #=========================================================
+ #test get_role_objs(roles=None, grantable=None, rtype=set)-> role_objs
+ #=========================================================
+ def test_gro_plain(self):
+ "test get_role_objs() w/o args"
+ policy,b,u,p,a = policy_01x()
+ result = policy.get_role_objs()
+ self.assertElementsEqual(result, [b,u,p,a])
+
+ def test_gro_roles(self):
+ "test get_role_objs() with role name filter"
+ policy,b,u,p,a = policy_01x()
+
+ #check normal
+ result = policy.get_role_objs(['admin','pirate'])
+ self.assertElementsEqual(result,[a,p])
+
+ #check w/ unknown
+ self.assertRaises(KeyError, policy.get_role_objs, ['admin', 'person'])
+
+ #check ordering matches input list
+ result = policy.get_role_objs(["admin","base","user","pirate"],
+ rtype=list)
+ self.assertEquals(result,[a,b,u,p])
+
+ #check ordering matches input list (just in case 1st was accident)
+ result = policy.get_role_objs(["base","user","admin","pirate"],
+ rtype=list)
+ self.assertEquals(result,[b,u,a,p])
+
+ def test_gro_grantable(self):
+ "test get_role_objs() with grantable filter"
+ policy,b,u,p,a = policy_01x()
+ self.assertElementsEqual(policy.get_role_objs(grantable=None),[b,u,p,a])
+ self.assertElementsEqual(policy.get_role_objs(grantable=True),[u,p,a])
+ self.assertElementsEqual(policy.get_role_objs(grantable=False),[b])
+
+ def test_gro_grantable_roles(self):
+ "test get_role_objs() with grantable & role name filters"
+ policy,b,u,p,a = policy_01x()
+ self.assert_sets_equal(
+ policy.get_role_objs(['admin','base'], grantable=True),
+ [a])
+ self.assert_sets_equal(
+ policy.get_role_objs(['admin','base'], grantable=False),
+ [b])
+
+ def test_gro_rtype(self):
+ "test get_role_objs() rtype option"
+ policy,b,u,p,a = policy_01x()
+ elems = [b,u,p,a]
+ check_rtype(self, policy.get_role_objs, set, elems)
+
+ func = partial(policy.get_role_objs, ["base","user","pirate","admin"])
+ check_rtype(self, func, set, elems, ordered=True)
+
+ #=========================================================
+ #test get_roles(grantable=None, rtype=set)
+ #=========================================================
+ def test_gr_plain(self):
+ "test get_roles() w/o args"
+ policy = policy_01()
+ result = policy.get_roles()
+ self.assert_sets_equal(result, ["base", "admin", "user", "pirate"])
+
+ def test_gr_grantable(self):
+ "test get_roles() with grantable filter"
+ policy = policy_01()
+ self.assertElementsEqual(
+ policy.get_roles(grantable=None),
+ ["base","admin","user","pirate"])
+ self.assertElementsEqual(
+ policy.get_roles(grantable=True),
+ ["admin","user","pirate"])
+ self.assertElementsEqual(
+ policy.get_roles(grantable=False),
+ ["base"])
+
+ def test_gr_rtype(self):
+ "test get_roles() rtype option"
+ policy = policy_01()
+ elems = ["base", "user", "pirate", "admin"]
+ out = policy.get_roles(rtype=list)
+ self.assert_sets_equal(out,elems)
+ check_rtype(self, policy.get_roles, set, out, ordered=True)
+
+ #=========================================================
+ #test has_role(role, grantable=None)
+ #=========================================================
+ def test_has_role(self):
+ "test has_role()"
+ policy = policy_01()
+
+ #test grantable role
+ self.assert_(policy.has_role("admin"))
+ self.assert_(policy.has_role("admin", grantable=True))
+ self.assert_(not policy.has_role("admin", grantable=False))
+
+ #test ungrantable role
+ self.assert_(policy.has_role("base"))
+ self.assert_(not policy.has_role("base", grantable=True))
+ self.assert_(policy.has_role("base", grantable=False))
+
+ #test unknown role
+ self.assert_(not policy.has_role("fooey"))
+ self.assert_(not policy.has_role("fooey", grantable=True))
+ self.assert_(not policy.has_role("fooey", grantable=False))
+
+ def test_has_role_inherits(self):
+ "test has_role() inherits kwd"
+ policy = policy_01()
+
+ self.assertEqual(policy.has_role("admin", inherits="base"), True)
+ self.assertEqual(policy.has_role("admin", inherits="pirate"), True)
+ self.assertEqual(policy.has_role("admin", inherits=["user", "base"]), True)
+
+ self.assertEqual(policy.has_role("user", inherits="base"), True)
+ self.assertEqual(policy.has_role("user", inherits="pirate"), False)
+ self.assertEqual(policy.has_role("user", inherits=["admin", "base"]), True)
+
+ #make sure can't inherit from self
+ self.assertEqual(policy.has_role("user", inherits="user"), False)
+
+ #or from child
+ self.assertEqual(policy.has_role("user", inherits="admin"), False)
+
+ #=========================================================
+ #test get_user_roles(user, inherited=True, rtype=set)
+ #=========================================================
+ def test_gur_simple(self):
+ "test basic get_user_roles behavior"
+ policy = policy_01()
+ user = stub(roles=("user",))
+
+ self.assert_sets_equal(
+ policy.get_user_roles(user),
+ ['user','base'],
+ )
+
+ self.assert_sets_equal(
+ policy.get_user_roles(user, inherited=False),
+ ['user',],
+ )
+
+ def test_gur_rtype(self):
+ "test get_user_roles() rtype"
+ policy = policy_01()
+ user = stub(roles=("user",))
+ check_rtype(self, partial(policy.get_user_roles,user),
+ set,["user","base"])
+
+ def test_gur_ungrantable(self):
+ "test get_user_roles() prevents ungrantable roles"
+ policy = policy_01()
+ user = stub(roles=("user","base"))
+ self.assertRaises(ValueError, policy.get_user_roles, user)
+ self.assertRaises(ValueError, policy.get_user_roles, user, inherited=False)
+
+ #=========================================================
+ #test user_has_role(user,role,inherited=True)
+ #=========================================================
+ def test_uhr_simple(self):
+ "test user_has_role() basic behavior"
+ policy = policy_01()
+
+ user = stub(roles=("user",))
+ def func(role):
+ return policy.user_has_role(user,role)
+ self.assertEqual(func("base"),True)
+ self.assertEqual(func("user"),True)
+ self.assertEqual(func("pirate"),False)
+ self.assertEqual(func("admin"),False)
+
+ user2 = stub(roles=("admin",))
+ def func(role):
+ return policy.user_has_role(user2,role)
+ self.assertEqual(func("base"),True)
+ self.assertEqual(func("user"),True)
+ self.assertEqual(func("pirate"),True)
+ self.assertEqual(func("admin"),True)
+
+ def test_uhr_inherit(self):
+ "test user_has_role() inherited=False flag"
+ policy = policy_01()
+
+ user = stub(roles=("user",))
+ def func(role):
+ return policy.user_has_role(user,role,inherited=False)
+ self.assertEqual(func("base"),False)
+ self.assertEqual(func("user"),True)
+ self.assertEqual(func("pirate"),False)
+ self.assertEqual(func("admin"),False)
+
+ user2 = stub(roles=("admin",))
+ def func(role):
+ return policy.user_has_role(user2,role,inherited=False)
+ self.assertEqual(func("base"),False)
+ self.assertEqual(func("user"),False)
+ self.assertEqual(func("pirate"),False)
+ self.assertEqual(func("admin"),True)
+
+ #=========================================================
+ #test user_has_any_role(user,roles,inherited=True)
+ #=========================================================
+ def test_uhar_simple(self):
+ "test user_has_any_role() basic behavior"
+ policy = policy_01()
+
+ user = stub(roles=("user",))
+ def func(*roles):
+ return policy.user_has_any_role(user,roles)
+ self.assertEqual(func("base","pirate"),True)
+ self.assertEqual(func("user","pirate"),True)
+ self.assertEqual(func("pirate","pirate"),False)
+ self.assertEqual(func("admin","pirate"),False)
+
+ user2 = stub(roles=("user","pirate"))
+ def func(*roles):
+ return policy.user_has_any_role(user2,roles)
+ self.assertEqual(func("base"),True)
+ self.assertEqual(func("user","pirate"),True)
+ self.assertEqual(func("pirate"),True)
+ self.assertEqual(func("admin","pirate"),True)
+
+ def test_uhar_inherited(self):
+ "test user_has_any_role() inherited=False"
+ policy = policy_01()
+
+ user = stub(roles=("user",))
+ def func(*roles):
+ return policy.user_has_any_role(user,roles, inherited=False)
+ self.assertEqual(func("base","pirate"),False)
+ self.assertEqual(func("user","pirate"),True)
+ self.assertEqual(func("pirate","pirate"),False)
+ self.assertEqual(func("admin","pirate"),False)
+
+ user2 = stub(roles=("user","pirate"))
+ def func(*roles):
+ return policy.user_has_any_role(user2,roles, inherited=False)
+ self.assertEqual(func("base"),False)
+ self.assertEqual(func("user","pirate"),True)
+ self.assertEqual(func("pirate"),True)
+ self.assertEqual(func("admin","pirate"),True)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+class RoleHelperTest(TestCase):
+ "test policy's role helper functions"
+ #=========================================================
+ #test expand_roles(roles,rtype=set)
+ #=========================================================
+ def test_expand_roles(self):
+ policy = policy_01()
+
+ #expand from 1
+ self.assertElementsEqual(policy.expand_roles(["admin"]),
+ ["admin", "user", "pirate", "base"])
+
+ #expand from 1 + dup
+ self.assertElementsEqual(policy.expand_roles(["admin","user"]),
+ ["admin", "user", "pirate", "base"])
+
+ #expand from 1 lower
+ self.assertElementsEqual(policy.expand_roles(["user"]),
+ ["user", "base"])
+
+ #expand from 1 lowest
+ self.assertElementsEqual(policy.expand_roles(["base"]),
+ ["base"])
+
+ #expand from none
+ self.assertElementsEqual(policy.expand_roles([]),
+ [])
+
+ def test_expand_roles_rtype(self):
+ policy = policy_01()
+ elems = ["admin", "pirate", "user", "base"]
+ check_rtype(self, partial(policy.expand_roles,["admin", "user"]), set, elems)
+
+ #=========================================================
+ #test collapse_roles(roles,rtype=set)
+ #=========================================================
+ def test_collapse_roles(self):
+ #TODO: test rtype option
+ policy = policy_01()
+
+ #collapse to top
+ self.assertElementsEqual(policy.collapse_roles(["admin", "user", "pirate", "base"]),
+ ["admin"])
+
+ #collapse to medium
+ self.assertElementsEqual(policy.collapse_roles(["user", "pirate", "base"]),
+ ["pirate","user"])
+
+ #collapse to same
+ self.assertElementsEqual(policy.collapse_roles(["user","pirate"]),
+ ["pirate", "user"])
+
+ #collapse none
+ self.assertElementsEqual(policy.collapse_roles([]),
+ [])
+
+ def test_collapse_roles_rtype(self):
+ policy = policy_01()
+ elems = ["pirate", "user"]
+ check_rtype(self, partial(policy.collapse_roles,["pirate","base", "user"]), set, elems)
+
+ #=========================================================
+ #test ascend_roles(roles,rtype=set)
+ #=========================================================
+ def test_ascend_roles(self):
+ policy = policy_01()
+
+ #ascend top
+ self.assertElementsEqual(
+ policy.ascend_roles(["admin"]),
+ [])
+
+ #ascend top and medium
+ self.assertElementsEqual(
+ policy.ascend_roles(["admin", "pirate"]),
+ ["admin"])
+
+ #ascend medium & dup
+ self.assertElementsEqual(
+ policy.ascend_roles(["user", "pirate","base"]),
+ ["admin","user", "pirate"])
+
+ #ascend medium
+ self.assertElementsEqual(
+ policy.ascend_roles(["user", "pirate"]),
+ ["admin"])
+
+ #ascend lowest
+ self.assertElementsEqual(
+ policy.ascend_roles(["base"]),
+ ["admin", "user", "pirate"])
+
+ #ascend none
+ self.assertElementsEqual(
+ policy.ascend_roles([]),
+ [])
+
+ def test_ascend_roles_keep(self):
+ policy = policy_01()
+
+ #ascend top
+ self.assertElementsEqual(
+ policy.ascend_roles(["admin"],keep=True),
+ ["admin"])
+
+ #ascend top and medium
+ self.assertElementsEqual(
+ policy.ascend_roles(["admin", "pirate"],keep=True),
+ ["admin","pirate"])
+
+ #ascend medium & dup
+ self.assertElementsEqual(
+ policy.ascend_roles(["user", "pirate","base"],keep=True),
+ ["admin","user", "pirate","base"])
+
+ #ascend medium
+ self.assertElementsEqual(
+ policy.ascend_roles(["user", "pirate"],keep=True),
+ ["admin","user", "pirate"])
+
+ #ascend lowest
+ self.assertElementsEqual(
+ policy.ascend_roles(["base"],keep=True),
+ ["admin", "user", "pirate","base"])
+
+ #ascend none
+ self.assertElementsEqual(
+ policy.ascend_roles([],keep=True),
+ [])
+
+ def test_ascend_roles_rtype(self):
+ policy = policy_01()
+ elems = [ "admin" ]
+ check_rtype(self, partial(policy.ascend_roles, ["pirate"]), set, elems)
+
+ #=========================================================
+ #test descend_roles(roles,rtype=set)
+ #=========================================================
+ def test_descend_roles(self):
+ policy = policy_01()
+
+ #descend top
+ self.assertElementsEqual(
+ policy.descend_roles(["admin"]),
+ ["user", "pirate", "base"])
+
+ #descend top and medium
+ self.assertElementsEqual(
+ policy.descend_roles(["admin", "pirate"]),
+ ["user", "pirate", "base"])
+
+ #descend medium & dup
+ self.assertElementsEqual(
+ policy.descend_roles(["user", "pirate","base"]),
+ ["base"])
+
+ #descend medium
+ self.assertElementsEqual(
+ policy.descend_roles(["user", "pirate"]),
+ ["base"])
+
+ #descend lowest
+ self.assertElementsEqual(
+ policy.descend_roles(["base"]),
+ [])
+
+ #descend none
+ self.assertElementsEqual(
+ policy.descend_roles([]),
+ [])
+
+ def test_descend_roles_keep(self):
+ policy = policy_01()
+
+ #descend top
+ self.assertElementsEqual(
+ policy.descend_roles(["admin"], keep=True),
+ ["user", "pirate", "base","admin"])
+
+ #descend top and medium
+ self.assertElementsEqual(
+ policy.descend_roles(["admin", "pirate"], keep=True),
+ ["user", "pirate", "base","admin"])
+
+ #descend medium & dup
+ self.assertElementsEqual(
+ policy.descend_roles(["user", "pirate","base"], keep=True),
+ ["base","user","pirate"])
+
+ #descend medium
+ self.assertElementsEqual(
+ policy.descend_roles(["user", "pirate"], keep=True),
+ ["base","user","pirate"])
+
+ #descend lowest
+ self.assertElementsEqual(
+ policy.descend_roles(["base"], keep=True),
+ ["base"])
+
+ #descend none
+ self.assertElementsEqual(
+ policy.descend_roles([], keep=True),
+ [])
+
+ def test_descend_roles_rtype(self):
+ policy = policy_01()
+ elems = [ "base"]
+ check_rtype(self, partial(policy.descend_roles, ["user","pirate"]), set, elems)
+
+ #=========================================================
+ #test ensure_valid_roles(roles,grantable=None)
+ #=========================================================
+ def test_ensure_valid_roles(self):
+ "test ensure_valid_roles()"
+ policy = policy_01()
+
+ #test no roles
+ self.assert_(policy.ensure_valid_roles([]))
+ self.assert_(policy.ensure_valid_roles([], grantable=True))
+ self.assert_(policy.ensure_valid_roles([], grantable=False))
+
+ #test known roles
+ self.assert_(policy.ensure_valid_roles(["user", "base"]))
+
+ #test unknown roles (and combinations w/ known roles)
+ self.assertRaises(ValueError, policy.ensure_valid_roles, ["user", "xxx"])
+ self.assertRaises(ValueError, policy.ensure_valid_roles, [None])
+
+ #test grantable role
+ self.assert_(policy.ensure_valid_roles(["user"], grantable=True))
+ self.assertRaises(ValueError, policy.ensure_valid_roles, ["user"], grantable=False)
+
+ #test ungrantable role
+ self.assert_(policy.ensure_valid_roles(["base"], grantable=False))
+ self.assertRaises(ValueError, policy.ensure_valid_roles, ["base"], grantable=True)
+
+ #check accepts single role string
+ #NOTE: this is deprecated, and may be removed in future.
+ with catch_all_warnings() as wmsgs:
+ self.assert_(policy.ensure_valid_roles("user", grantable=True))
+ wmsgs.pop()
+ self.assertRaises(ValueError, policy.ensure_valid_roles, "user", grantable=False)
+ wmsgs.pop()
+
+ #=========================================================
+ #test ensure_valid_role(role,grantable=None)
+ #=========================================================
+ def test_ensure_valid_role(self):
+ "test ensure_valid_role()"
+ policy = policy_01()
+
+ #test unknown role
+ self.assertRaises(ValueError, policy.ensure_valid_role, None)
+ self.assertRaises(ValueError, policy.ensure_valid_role, "xxx")
+ self.assertRaises(ValueError, policy.ensure_valid_role, "xxx", grantable=True)
+ self.assertRaises(ValueError, policy.ensure_valid_role, "xxx", grantable=False)
+
+ #test grantable role
+ self.assert_(policy.ensure_valid_role("user"))
+ self.assert_(policy.ensure_valid_role("user", grantable=True))
+ self.assertRaises(ValueError, policy.ensure_valid_role, "user", grantable=False)
+
+ #test ungrantable role
+ self.assert_(policy.ensure_valid_role("base"))
+ self.assertRaises(ValueError, policy.ensure_valid_role, "base", grantable=True)
+ self.assert_(policy.ensure_valid_role("base", grantable=False))
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+class PermissionCreationTest(TestCase):
+ "test policy's permission & link creation functions"
+ #=========================================================
+ #test permit(roles, action, *a, **k) -> perm_obj
+ #=========================================================
+ def test_permit(self):
+ "test permit()"
+ policy = policy_01()
+ self.assertEquals(policy._links,[])
+
+ a = policy.permit(["admin"], "perm-a")
+ b = policy.permit(["user","pirate"],"perm-b")
+ l1,l2 = policy._links
+
+ self.assertEqual(l1.perm_objs, [a])
+ self.assert_sets_equal(l1.base_roles,["admin"])
+ ##self.assert_sets_equal(l1.expanded_roles,["admin"])
+
+ self.assertEqual(l2.perm_objs, [b])
+ self.assert_sets_equal(l2.base_roles,["user",'pirate'])
+ ##self.assert_sets_equal(l2.expanded_roles,["admin","user","pirate"])
+
+ def test_permit_string(self):
+ "test permit() w/ single role as string"
+ policy = policy_01()
+ self.assertEquals(policy._links,[])
+ a = policy.permit("admin","perm-a")
+ l1, = policy._links
+ self.assertEqual(l1.perm_objs, [a])
+ self.assert_sets_equal(l1.base_roles,["admin"])
+
+ #=========================================================
+ #test permit_list(roles, perm_descs) -> perm_objs
+ #=========================================================
+ def test_permit_list(self):
+ "test permit_list()"
+ policy = policy_01()
+ self.assertEquals(policy._links,[])
+ a, = policy.permit_list(["admin"],[dict(action="perm-a")])
+ b,c = policy.permit_list(["user","pirate"],[dict(action="perm-a"),
+ dict(action="perm-b")])
+ l1,l2 = policy._links
+ self.assertEqual(l1.perm_objs, [a])
+ self.assert_sets_equal(l1.base_roles,["admin"])
+ ##self.assert_sets_equal(l1.expanded_roles,["admin"])
+
+ self.assertEqual(l2.perm_objs, [b,c])
+ self.assert_sets_equal(l2.base_roles,["user",'pirate'])
+ ##self.assert_sets_equal(l2.expanded_roles,["admin","user","pirate"])
+
+ def test_permit_list_string(self):
+ "test permit_list() w/ single role as string"
+ policy = policy_01()
+ self.assertEquals(policy._links,[])
+ a, = policy.permit_list("admin",[dict(action="perm-a")])
+ l1, = policy._links
+ self.assertEqual(l1.perm_objs, [a])
+ self.assert_sets_equal(l1.base_roles,["admin"])
+
+ #=========================================================
+ #test create_permission(action, *a, **k) -> perm
+ #=========================================================
+ def test_create_permission(self):
+ "test create_permission"
+ policy = Policy()
+ p = policy.create_permission("update")
+ self.assertEqual(p.action, "update")
+ self.assertIs(p.klass, None)
+ self.assertIs(p.attrs,None)
+
+ p2 = policy.create_permission("update", "test")
+ self.assertEqual(p2.action, "update")
+ self.assertIs(p2.klass, "test")
+ self.assertIs(p.attrs,None)
+
+ p3 = policy.create_permission("update", "test", attrs=("bob",))
+ self.assertEqual(p3.action, "update")
+ self.assertIs(p3.klass, "test")
+ self.assert_sets_equal(p3.attrs, ["bob"])
+
+ policy.freeze()
+ self.assertRaises(AssertionError, policy.create_permission, "edit")
+
+ def test_create_permission_subclassed(self):
+ "test create_permission() honors Policy.Permission"
+ #make sure orig Perm won't take "xxx"
+ self.assertRaises(TypeError, Policy.Permission, "test", xxx=23)
+
+ #create role subclass which accepts "xxx"
+ class MyPermission(Policy.Permission):
+ def __init__(self, *a, **k):
+ self.xxx = k.pop("xxx",None)
+ self.__super.__init__(*a,**k)
+
+ #create policy class which uses MyRole
+ class MyPolicy(Policy):
+ Permission = MyPermission
+
+ #try creating role, make sure MyRole was used
+ policy = MyPolicy()
+ r = policy.create_permission("test", xxx=23)
+ self.assertIsInstance(r,MyPermission)
+ self.assertEqual(r.xxx, 23)
+
+ #=========================================================
+ #test create_link(perm_objs, roles)
+ #=========================================================
+ def test_create_link(self):
+ "test create_link()"
+ policy = policy_01()
+ a = policy.create_permission("perm-a")
+ b = policy.create_permission("perm-b")
+ c = policy.create_permission("perm-c")
+ self.assertEquals(policy._links,[])
+
+ policy.create_link([a],['admin'])
+ policy.create_link([b,c],['user','pirate'])
+ l1,l2 = policy._links
+
+ self.assertEqual(l1.perm_objs, [a])
+ self.assert_sets_equal(l1.base_roles,["admin"])
+ ##self.assert_sets_equal(l1.expanded_roles,["admin"])
+
+ self.assertEqual(l2.perm_objs, [b,c])
+ self.assert_sets_equal(l2.base_roles,["user",'pirate'])
+ ##self.assert_sets_equal(l2.expanded_roles,["user","pirate","admin"])
+
+ def test_create_link_string(self):
+ "test create_link() with single role as string"
+ policy = policy_01()
+ a = policy.create_permission("perm-a")
+ b = policy.create_permission("perm-b")
+ c = policy.create_permission("perm-c")
+ self.assertEquals(policy._links,[])
+
+ policy.create_link([a],'admin')
+ l1, = policy._links
+
+ self.assertEqual(l1.perm_objs, [a])
+ self.assert_sets_equal(l1.base_roles,["admin"])
+ ##self.assert_sets_equal(l1.expanded_roles,["admin"])
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+class PermissionExaminationTest(TestCase):
+ "test policy's permission & link examination functions"
+ #=========================================================
+ #user_has_permission
+ #=========================================================
+ #test positional params (action, klass, item)
+ #test kwd params (attr, scope)
+
+ def test_uhp(self):
+ "test basic user_has_permission() functions"
+ policy, admin, employee, client, User = policy_03x()
+
+ #check everyone inherits from 'person'
+ self.assert_(admin.has_permission("sign-in"))
+ self.assert_(employee.has_permission("sign-in"))
+ self.assert_(client.has_permission("sign-in"))
+
+ #double-check guards are working
+ self.assert_(employee.has_permission("update","user",employee))
+ self.assert_(not employee.has_permission("update","user",admin))
+
+ #check guard works for generic case
+ self.assert_(employee.has_permission("retire", "user"))
+ self.assert_(employee.has_permission("retire", "user", client))
+ self.assert_(not employee.has_permission("retire", "user", employee))
+
+ #check klass=False works
+ self.assert_(not admin.has_permission("sign-in", "user"))
+
+ #check klass=True works
+ self.assert_(not admin.has_permission("delete"))
+ self.assert_(admin.has_permission("delete", "user"))
+
+ #check inheritance matches down, not up
+ self.assert_(not employee.has_permission("delete", "user"))
+
+ #check attrs & deny are working properly
+ self.assert_(client.has_permission("update","journal"))
+ self.assert_(client.has_permission("update","journal", attr="date"))
+ self.assert_(not client.has_permission("update","journal", attr="owner"))
+
+ def test_uhp_params(self):
+ "test user_has_permission() positional vs kwd arguments"
+
+ policy, admin, employee, client, User = policy_03x()
+
+ #all kwds
+ self.assert_(employee.has_permission(action="update", klass="user", attr="xxx", item=employee))
+
+ #min positionals
+ self.assert_(employee.has_permission("sign-in"))
+ self.assert_(employee.has_permission("sign-in",attr="xxx"))
+
+ #too few positionals
+ self.assertRaises(TypeError, employee.has_permission, attr="xxx")
+
+ #all possible positionals
+ self.assert_(employee.has_permission("update", "user", employee, attr="xxx"))
+
+ #too many positionals
+ self.assertRaises(TypeError, employee.has_permission,
+ "update", "user", employee, "xxx")
+
+ #unknown kwds
+ self.assertRaises(TypeError, employee.has_permission,
+ "update", "user", employee, xxx="xxx")
+
+ #=========================================================
+ #test get_user_permissions(user,rtype=tuple)
+ #=========================================================
+ #NOTE: get_user_permissions() wraps get_role_permissons,
+ # so we rely on that test for more complicated inputs.
+ def test_gup_simple(self):
+ "test get_user_permissions()"
+ policy,a,b,u,p = policy_02x()
+
+ user = stub(roles=())
+ self.assertEquals(policy.get_user_permissions(user,rtype=list),[])
+
+ user = stub(roles=("user",))
+ self.assertEquals(policy.get_user_permissions(user,rtype=list),[b,u])
+
+ user = stub(roles=("admin",))
+ self.assertEquals(policy.get_user_permissions(user,rtype=list),[a,b,u,p])
+
+ def test_gup_rtype(self):
+ policy,a,b,u,p = policy_02x()
+
+ user = stub(roles=("user", "pirate"))
+ elems = [b,u,p]
+ check_rtype(self, partial(policy.get_user_permissions,user), tuple, elems, ordered=True)
+
+ #=========================================================
+ #test get_role_permissions(roles, inherited=True, rtype=tuple)
+ #=========================================================
+ def test_grp_00(self):
+ "test get_role_permissions() works under frozen & thawed modes"
+ policy = policy_01()
+ self.assertElementsEqual(policy.get_role_permissions(["user"]),[])
+ policy.freeze()
+ self.assertElementsEqual(policy.get_role_permissions(["user"]),[])
+ #NOTE: current implementation no longer forces policy to be frozen
+ ## "test get_role_permissions() requires frozen policy object"
+ ## policy = policy_01()
+ ## self.assertRaises(AssertionError, policy.get_role_permissions, ["client"])
+ ## policy.freeze()
+ ## self.assertElementsEqual(policy.get_role_permissions(["client"]),[])
+
+ def test_grp_01(self):
+ "test get_role_permissions() obeys role inheritance chain"
+ #test perms reported for correct roles
+ policy,a,b,u,p = policy_02x()
+
+ #test non-inherited permissions are listed correctly
+ def grp(*roles):
+ return list(policy.get_role_permissions(roles, inherited=False))
+ self.assertEqual(grp("base"),[b])
+ self.assertEqual(grp("user"),[u])
+ self.assertEqual(grp("admin"),[a])
+ self.assertEqual(grp("pirate"),[p])
+
+ #test inherited permissions are listed correctly and in order
+ def grp(*roles):
+ return list(policy.get_role_permissions(roles))
+ self.assertEqual(grp("base"),[b])
+ self.assertEqual(grp("user"),[b,u])
+ self.assertEqual(grp("admin"),[a,b,u,p])
+ self.assertEqual(grp("pirate"),[b,p])
+
+ def test_grp_02(self):
+ "test get_role_permissions() handles multiple roles"
+ #test perms reported for correct roles
+ policy,a,b,u,p = policy_02x()
+
+ #test non-inherited permissions are listed correctly
+ def grp(*roles):
+ return list(policy.get_role_permissions(roles, inherited=False))
+ self.assertEqual(grp("base","user"),[b,u])
+ self.assertEqual(grp("pirate","user"),[u,p])
+ self.assertEqual(grp("admin","base"),[a,b])
+ self.assertEqual(grp("admin","pirate"),[a,p])
+
+ #test inherited permissions are listed correctly and in order
+ def grp(*roles):
+ return list(policy.get_role_permissions(roles))
+ self.assertEqual(grp("base","user"),[b,u])
+ self.assertEqual(grp("pirate","user"),[b,u,p])
+ self.assertEqual(grp("admin","base"),[a,b,u,p])
+ self.assertEqual(grp("admin","pirate"),[a,b,u,p])
+
+ def test_grp_priority(self):
+ "test get_role_permissions() obeys priority"
+ policy = policy_01()
+ b = policy.permit("base", "perm-b")
+ p = policy.permit("pirate", "perm-p", priority=-10)
+ u = policy.permit("user", "perm-u", priority=10)
+ a = policy.permit("admin", "perm-a")
+ #NOTE: w/o priority, order would be b,p,u,a
+ policy.freeze()
+
+ def grp(*roles):
+ return list(policy.get_role_permissions(roles))
+ self.assertEqual(grp("base"),[b])
+ self.assertEqual(grp("user"),[u,b])
+ self.assertEqual(grp("admin"),[u,b,a,p])
+ self.assertEqual(grp("pirate"),[b,p])
+
+ def test_grp_rtype(self):
+ "test get_role_permissions() rtype option"
+ policy,a,b,u,p = policy_02x()
+ elems = [a,b,u,p]
+ check_rtype(self,partial(policy.get_role_permissions,["admin"]), tuple, elems)
+
+ def test_grp_string(self):
+ "test get_role_permissions() w/ single role as string"
+ policy,a,b,u,p = policy_02x()
+ self.assertElementsEqual(policy.get_role_permissions("user"),[b,u])
+
+ #=========================================================
+ #get_linked_roles(perm_obj, inherited=True, limit_roles=None, rtype=set)
+ #=========================================================
+ def test_glr_basic(self):
+ "test get_linked_roles() basic behavior"
+ policy,a,b,u,p = policy_02x()
+
+ results = policy.get_linked_roles(a)
+ self.assert_sets_equal(results,["admin"])
+
+ results = policy.get_linked_roles(b)
+ self.assert_sets_equal(results,["admin","base","user", "pirate"])
+
+ results = policy.get_linked_roles(u)
+ self.assert_sets_equal(results,["admin","user"])
+
+ results = policy.get_linked_roles(p)
+ self.assert_sets_equal(results,["admin","pirate"])
+
+ def test_glr_inherited(self):
+ "test get_linked_roles() inherited=False flag"
+ policy,a,b,u,p = policy_02x()
+
+ results = policy.get_linked_roles(a, inherited=False)
+ self.assert_sets_equal(results,["admin"])
+
+ results = policy.get_linked_roles(b, inherited=False)
+ self.assert_sets_equal(results,["base"])
+
+ results = policy.get_linked_roles(u, inherited=False)
+ self.assert_sets_equal(results,["user"])
+
+ results = policy.get_linked_roles(p, inherited=False)
+ self.assert_sets_equal(results,["pirate"])
+
+ def test_glr_limit_roles(self):
+ "test get_linked_roles() limit_roles kwd"
+ policy,a,b,u,p = policy_02x()
+
+ #check no roles
+ results = policy.get_linked_roles(a, limit_roles=[])
+ self.assert_sets_equal(results,[])
+
+ #check restricting roles
+ results = policy.get_linked_roles(b, limit_roles=["user", "pirate"])
+ self.assert_sets_equal(results,["user", "pirate"])
+
+ #check unused + used roles
+ results = policy.get_linked_roles(u, limit_roles=["admin", "user", "base"])
+ self.assert_sets_equal(results,["admin","user"])
+
+ #check unused roles
+ results = policy.get_linked_roles(p, limit_roles=["user"])
+ self.assert_sets_equal(results,[])
+
+ def test_glr_rtype(self):
+ "test get_linked_roles() rtype kwd"
+ policy,a,b,u,p = policy_02x()
+
+ match = ["admin", "base", "user", "pirate"]
+ check_rtype(self, partial(policy.get_linked_roles,b), set, match)
+
+ #=========================================================
+ #test could_allow()
+ #=========================================================
+ def test_ca_basic(self):
+ "test policy.could_allow()"
+ policy, admin, employee, client, User = policy_03x()
+
+ #test one with a guard in the perms
+ self.assert_(policy.could_allow("update", "user"))
+
+ #test one that's not listed
+ self.assert_(not policy.could_allow("smile-at", "user"))
+
+ #test one with a deny rule in the perms
+ self.assert_(policy.could_allow("update", "journal", attr="owner"))
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+class MiscTest(TestCase):
+ "test misc policy functions"
+ #=========================================================
+ #inspect_user_roles
+ #=========================================================
+ def test_iur_default(self):
+ policy = policy_01()
+ u = stub(roles=("user",))
+ self.assertElementsEqual(policy.get_user_roles(u,inherited=False),["user"])
+ self.assertElementsEqual(policy.get_user_roles(u),["user", "base"])
+
+ def test_iur_override(self):
+ policy = policy_01(inspect_user_roles=lambda u: u.alt_roles)
+ u = stub(alt_roles=("user",))
+ self.assertElementsEqual(policy.get_user_roles(u,inherited=False),["user"])
+ self.assertElementsEqual(policy.get_user_roles(u),["user", "base"])
+
+ def test_iur_valid(self):
+ policy = policy_01()
+ u = stub(roles=("user","xxx"))
+ self.assertRaises(ValueError, policy.get_user_roles, u)
+ #KeyError: xxx role undefined
+
+ def test_iur_grantable(self):
+ policy = policy_01()
+ u = stub(roles=("user","base"))
+ self.assertRaises(ValueError, policy.get_user_roles, u)
+ #ValueError: base role not grantable
+
+ #=========================================================
+ #freeze() and frozen
+ #=========================================================
+ def test_freeze(self):
+ "test freeze() and .frozen"
+ policy = Policy()
+ self.assert_(not policy.frozen)
+ policy.freeze()
+ self.assert_(policy.frozen)
+ self.assertRaises(AssertionError, policy.freeze)
+
+ #=========================================================
+ #ensure_frozen
+ #=========================================================
+ def test_ensure_frozen(self):
+ policy = Policy()
+ self.assertRaises(AssertionError, policy.ensure_frozen)
+ policy.freeze()
+ self.assert_(policy.ensure_frozen())
+
+ #=========================================================
+ #ensure_thawed
+ #=========================================================
+ def test_ensure_thawed(self):
+ policy = Policy()
+ self.assert_(policy.ensure_thawed())
+ policy.freeze()
+ self.assertRaises(AssertionError, policy.ensure_thawed)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/tests/test_security_pwgen.py b/bps/tests/test_security_pwgen.py
new file mode 100644
index 0000000..6c8c34a
--- /dev/null
+++ b/bps/tests/test_security_pwgen.py
@@ -0,0 +1,109 @@
+"""tests for bps.security.pwhash -- (c) Assurance Technologies 2003-2009"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import os
+from unittest import TestCase
+import hashlib
+import warnings
+from logging import getLogger
+#site
+#pkg
+from bps.security import pwgen
+from bps.rng import drandom, srandom
+from bps.text import asbool
+from bps.tests.utils import enable_suite
+#module
+log = getLogger(__name__)
+
+#=========================================================
+#
+#=========================================================
+if enable_suite("pwgen_dups"):
+
+ class DupTest(TestCase):
+ "Test the rate of duplicate generation for various algorithms"
+ rounds = 10**5
+ duplicates = [
+ #check random engine, mainly as a sanity check
+ dict(alg='hex', size=7, dups=25),
+
+ #check cvc engine
+ dict(alg="cvc", size=8, dups=10),
+ dict(alg="cvc", size=10, dups=0),
+
+ #check the gpw engine for various languages
+ dict(alg="gpw", language="gpw", size=8, dups=350),
+ dict(alg="gpw", language="gpw", size=10, dups=20),
+
+ #the public presets should always values at least this low
+ dict(alg='strong', dups=0),
+ dict(alg='human', dups=0),
+
+ ]
+
+ def test_duplicates(self):
+ "test rate of duplicate password generation"
+ for preset in self.duplicates:
+ self._test_duplicates_preset(preset)
+
+ def _test_duplicates_preset(self, preset):
+ info = preset.copy()
+ max_dups = info.pop('dups')
+ max_dup_rate = info.pop("dup_rate", 2)
+ log.info("Testing config for duplicates: config=%r rounds=%r", info, self.rounds)
+ gen = pwgen.generate_secret(count="iter", **info)
+ seen = set()
+ hist = {}
+ dups = 0
+ for c in xrange(self.rounds):
+ secret = gen.next()
+ if secret in seen:
+ dups += 1
+ if secret in hist:
+ hist[secret] += 1
+ else:
+ hist[secret] = 2
+ else:
+ seen.add(secret)
+ log.info("\tresults: rate=%.2f%% dups=%r max_dups=%r",
+ 100.0 * dups / self.rounds, dups, max_dups)
+ if hist:
+ def sk(pair):
+ return pair[1], pair[0]
+ values = sorted(hist.iteritems(), key=sk)[:10]
+ self.assertTrue(values[0][1] <= max_dup_rate, "bias detected: %r" % (values, ))
+ log.debug("\ttop dups: %s", values)
+
+ #NOTE: having no better measurement, we'll accept a .08% dup rate, but nothing more.
+ self.assertTrue(dups <= max_dups, "too many duplicates: %r > %r" % (dups, max_dups))
+
+class ConstantTest(TestCase):
+ "make sure predefined constants work"
+ #NOTE: this is _very_ dependant on number of times rand is called,
+ #if code changes, these constants may change.
+ presets = [
+ [ dict(alg="alphanum", count=3),
+ ['xR0uwaf5lE1mLcc9', 'B70Ux4XSb5ZGYdTM', 'FvSW1V08TN3aAY8w'] ],
+ [ dict(alg="human", count=3),
+ ['yuicoboeradai', 'rahimeyiinaa', 'fiyoujaefafuk'] ],
+ ]
+
+ def setUp(self):
+ pwgen.srandom = drandom
+
+ def tearDown(self):
+ pwgen.srandom = srandom
+
+ def test_presets(self):
+ #test preset password known to be created for a given seed value
+ for kwds, result in self.presets:
+ drandom.seed(1234)
+ out = pwgen.generate_secret(**kwds)
+ self.assertEqual(out, result)
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_security_pwhash.py b/bps/tests/test_security_pwhash.py
new file mode 100644
index 0000000..08601d0
--- /dev/null
+++ b/bps/tests/test_security_pwhash.py
@@ -0,0 +1,1220 @@
+"""tests for bps.security.pwhash -- (c) Assurance Technologies 2003-2009"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import hashlib
+import warnings
+from logging import getLogger
+#site
+#pkg
+from bps.security import pwhash
+from bps.tests.utils import TestCase, enable_suite, catch_warnings
+#module
+log = getLogger(__name__)
+
+#=========================================================
+#helper password algorithms - these serve both as simple
+# examples, and are used in the unittests
+#=========================================================
+class UnsaltedAlg(pwhash.CryptAlgorithm):
+ "example algorithm usuing constant-salt hash"
+ name = "unsalted"
+ salt_bits = 0
+
+ def identify(self, hash):
+ if hash is None:
+ return False
+ if len(hash) != 40:
+ return False
+ try:
+ int(hash, 16)
+ except ValueError:
+ return False
+ return True
+
+ def encrypt(self, secret, salt=None, keep_salt=False):
+ #NOTE: that salt / keep_salted are simply ignored
+ return hashlib.sha1("boblious" + secret).hexdigest()
+
+class SaltedAlg(pwhash.CryptAlgorithm):
+ """example naive salted algorithm which never obeys keep_salt
+ (note that the default verify() is implemented in this case)
+ """
+ name = "salted"
+ salt_bits = 6*2
+
+ def identify(self, hash):
+ if hash is None:
+ return False
+ return hash.startswith("@salt")
+
+ def _raw(self, secret, salt):
+ return "@salt%s%s" % (salt, hashlib.sha1(salt+secret).hexdigest())
+
+ def encrypt(self, secret, salt=None, keep_salt=False):
+## warn("keep_salt not supported by this algorithm")
+ real_salt = pwhash.h64_gen_salt(2)
+ return self._raw(secret, real_salt)
+
+ def verify(self, secret, hash):
+ if hash is None:
+ return False
+ salt = hash[5:7]
+ return self._raw(secret, salt) == hash
+
+class SampleAlg(pwhash.CryptAlgorithm):
+ "example salted algorithm w/ keep_salt support"
+ name = "sample"
+ salt_bits = 6*2
+
+ def identify(self, hash):
+ if hash is None:
+ return False
+ return hash.startswith("@sam")
+
+ def encrypt(self, secret, salt=None, keep_salt=False):
+ if salt and keep_salt:
+ real_salt = salt[4:6]
+ else:
+ real_salt = pwhash.h64_gen_salt(2)
+ return "@sam%s%s" % (real_salt, hashlib.sha1(real_salt+secret).hexdigest())
+
+#=========================================================
+#other unittest helpers
+#=========================================================
+
+#list of various distinct secrets that all algs are tested with
+SECRETS = [
+ '',
+ ' ',
+ 'test',
+ 'testa',
+ 'test test',
+ 'test bcdef',
+ 'testq'
+ 'testtest',
+ 'Compl3X AlphaNu3meric',
+ '4lpHa N|_|M3r1K W/ Cur51|\\|g: #$%(*)(*%#',
+ 'Really Long Password (tm), which is all the rage nowadays with the cool kids'
+ ]
+
+class _CryptTestCase(TestCase):
+ "base class for CryptAlgorithm subclass testing"
+
+ #=========================================================
+ #subclass attrs
+ #=========================================================
+ alg = None #plugin for class
+ positive_knowns = () #list of (secret,hash) pairs to verify they do match
+ negative_knowns = () #list of (secret,hash) pairs to verify they don't match
+ negative_identify = () # list of hashses that shouldn't identify as this one
+ invalid_identify = () # list of this alg's hashes w/ typo
+
+ def _prefix(self):
+ return self.alg.name
+
+ secrets = SECRETS #list of default secrets to check
+
+ #=========================================================
+ #identify
+ #=========================================================
+ def test_01_identify_positive_knowns(self):
+ "test identify() against known correct algorithm hashes"
+ for _, hash in self.positive_knowns:
+ self.assertEqual(self.do_identify(hash), True)
+ for _, hash in self.negative_knowns:
+ self.assertEqual(self.do_identify(hash), True)
+
+ def test_02_identify_negative_knowns(self):
+ "test identify() against known wrong algorithm hashes"
+ for hash in self.negative_identify:
+ self.assertEqual(self.do_identify(hash), False)
+
+ def test_03_identify_invalid_knowns(self):
+ "test identify() against known invalid algorithm hashes"
+ for hash in self.invalid_identify:
+ self.assertEqual(self.do_identify(hash), False)
+
+ def test_04_identify_none(self):
+ "test identify() reports hash=None as False"
+ self.assertEqual(self.do_identify(None), False)
+
+ #=========================================================
+ #verify
+ #=========================================================
+ def test_10_verify_positive_knowns(self):
+ "test verify() against algorithm-specific known positive matches"
+ for secret, hash in self.positive_knowns:
+ self.assertEqual(self.do_verify(secret, hash), True)
+
+ def test_11_verify_negative_knowns(self):
+ "test verify() against algorithm-specific known negative matches"
+ for secret, hash in self.negative_knowns:
+ self.assertEqual(self.do_verify(secret, hash), False)
+
+ def test_12_verify_derived_negative_knowns(self):
+ "test verify() against algorithm-specific deliberate negative matches"
+ for secret, hash in self.positive_knowns:
+ self.assertEqual(self.do_verify(self.do_concat(secret,'x'), hash), False)
+
+#XXX: haven't decided if this should be part of protocol
+## def test_13_verify_secret_none(self):
+## "test verify() accepts secret=None and reports False"
+## for _, hash in self.positive_knowns:
+## self.assert_(not self.do_verify(None, hash))
+
+ def test_14_verify_hash_none(self):
+ "test verify() reports hash=None as not matching"
+ for secret in (None, "", "xxx"):
+ self.assert_(not self.do_verify(secret, None))
+
+ #=========================================================
+ #encrypt
+ #=========================================================
+ def test_30_encrypt(self):
+ "test encrypt() against standard secrets"
+ for secret in self.secrets:
+ self.check_encrypt(secret)
+ for secret, _ in self.positive_knowns:
+ self.check_encrypt(secret)
+ for secret, _ in self.negative_knowns:
+ self.check_encrypt(secret)
+
+ def test_31_encrypt_gen_salt(self):
+ "test encrypt() generates new salt each time"
+ if not self.alg.has_salt:
+ return
+ for secret, hash in self.positive_knowns:
+ hash2 = self.do_encrypt(secret, hash)
+ self.assertNotEqual(hash, hash2)
+
+ def test_31_encrypt_keep_salt(self):
+ "test encrypt() honors keep_salt keyword"
+ if not self.alg.has_salt:
+ return
+ for secret, hash in self.positive_knowns:
+ hash2 = self.do_encrypt(secret, hash, keep_salt=True)
+ self.assertEqual(hash, hash2)
+
+ def check_encrypt(self, secret):
+ "check encrypt() behavior for a given secret"
+ #hash the secret
+ hash = self.do_encrypt(secret)
+
+ #test identification
+ self.assertEqual(self.do_identify(hash), True)
+
+ #test positive verification
+ self.assertEqual(self.do_verify(secret, hash), True)
+
+ #test negative verification
+ for other in ['', 'test', self.do_concat(secret,'x')]:
+ if other != secret:
+ self.assertEqual(self.do_verify(other, hash), False,
+ "hash collision: %r and %r => %r" % (secret, other, hash))
+
+ def test_32_secret_chars(self):
+ "test secret_chars limitation"
+ #hash a really long secret
+ secret = "too many secrets" * 16
+ tail = "my socrates note" * 8
+ hash = self.do_encrypt(secret)
+
+ sc = self.alg.secret_chars
+ if sc:
+ #bcrypt, unixcrypt
+ assert sc < len(secret), "need to increase test secret size"
+ self.assert_(self.do_verify(secret[:sc], hash))
+ self.assert_(self.do_verify(secret + tail, hash))
+ self.assert_(not self.do_verify(secret[:sc-1], hash))
+ else:
+ #if no limit, secret+tail shouldn't verify
+ self.assert_(not self.do_verify(secret[:16], hash))
+ self.assert_(not self.do_verify(secret+tail, hash))
+
+ def test_33_encrypt_none(self):
+ "test encrypt() refused secret=None"
+ self.assertRaises(TypeError, self.do_encrypt, None)
+
+ #=========================================================
+ #alg interface
+ #=========================================================
+ def do_concat(self, secret, prefix):
+ return prefix + secret
+
+ def do_encrypt(self, *args, **kwds):
+ return self.alg().encrypt(*args, **kwds)
+
+ def do_verify(self, secret, hash):
+ return self.alg().verify(secret, hash)
+
+ def do_identify(self, hash):
+ return self.alg().identify(hash)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#dummy algorithms
+#=========================================================
+#this tests the dummy algorithms defined above,
+#to make sure creating custom algorithms works properly.
+
+class UnsaltedDummyAlgTest(_CryptTestCase):
+ alg = UnsaltedAlg
+
+class SaltedDummyAlgTest(_CryptTestCase):
+ alg = SaltedAlg
+
+class SampleDummyAlgTest(_CryptTestCase):
+ alg = SampleAlg
+
+#=========================================================
+#database hashes
+#=========================================================
+class Mysql10CryptTest(_CryptTestCase):
+ alg = pwhash.Mysql10Crypt
+
+ #remove single space from secrets
+ secrets = [ x for x in _CryptTestCase.secrets if x != ' ' ]
+
+ positive_knowns = (
+ ('mypass', '6f8c114b58f2ce9e'),
+ )
+ invalid_identify = (
+ #bad char in otherwise correct hash
+ '6z8c114b58f2ce9e',
+ )
+ negative_identify = (
+ #other hashes
+ '$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc',
+ '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'
+ )
+
+ def test_whitespace(self):
+ "check whitespace is ignored properly"
+ h = self.do_encrypt("mypass")
+ h2 = self.do_encrypt("my pass")
+ self.assertEqual(h, h2)
+
+class Mysql41CryptTest(_CryptTestCase):
+ alg = pwhash.Mysql41Crypt
+ positive_knowns = (
+ ('mypass', '*6C8989366EAF75BB670AD8EA7A7FC1176A95CEF4'),
+ )
+ invalid_identify = (
+ #bad char in otherwise correct hash
+ '*6Z8989366EAF75BB670AD8EA7A7FC1176A95CEF4',
+ )
+ negative_identify = (
+ #other hashes
+ '$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc',
+ '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'
+ '6f8c114b58f2ce9e',
+ )
+
+class PostgresMd5CryptTest(_CryptTestCase):
+ alg = pwhash.PostgresMd5Crypt
+ positive_knowns = (
+ # ((secret,user),hash)
+ (('mypass', 'postgres'), 'md55fba2ea04fd36069d2574ea71c8efe9d'),
+ (('mypass', 'root'), 'md540c31989b20437833f697e485811254b'),
+ (("testpassword",'testuser'), 'md5d4fc5129cc2c25465a5370113ae9835f'),
+ )
+ invalid_identify = (
+ #bad char in otherwise correct hash
+ 'md54zc31989b20437833f697e485811254b',
+ )
+ negative_identify = (
+ #other hashes
+ '$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc',
+ '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'
+ '6f8c114b58f2ce9e',
+ )
+
+ def test_tuple_mode(self):
+ "check tuple mode works for encrypt/verify"
+ self.assertEquals(self.alg().encrypt(('mypass', 'postgres')),
+ 'md55fba2ea04fd36069d2574ea71c8efe9d')
+ self.assertEquals(self.alg().verify(('mypass', 'postgres'),
+ 'md55fba2ea04fd36069d2574ea71c8efe9d'), True)
+
+ def test_user(self):
+ "check user kwd is required for encrypt/verify"
+ self.assertRaises(ValueError, self.alg().encrypt, 'mypass')
+ self.assertRaises(ValueError, self.alg().verify, 'mypass', 'md55fba2ea04fd36069d2574ea71c8efe9d')
+
+ def do_concat(self, secret, prefix):
+ if isinstance(secret, tuple):
+ secret, user = secret
+ secret = prefix + secret
+ return secret, user
+ else:
+ return prefix + secret
+
+ def do_encrypt(self, secret, *args, **kwds):
+ if isinstance(secret, tuple):
+ secret, user = secret
+ else:
+ user = 'default'
+ assert 'user' not in kwds
+ kwds['user'] = user
+ return self.alg().encrypt(secret, *args, **kwds)
+
+ def do_verify(self, secret, hash):
+ if isinstance(secret, tuple):
+ secret, user = secret
+ else:
+ user = 'default'
+ return self.alg().verify(secret, hash, user=user)
+
+#=========================================================
+#UnixCrypt
+#=========================================================
+
+class UnixCryptTest(_CryptTestCase):
+ "test UnixCrypt algorithm"
+ alg = pwhash.UnixCrypt
+ positive_knowns = (
+ #secret, example hash which matches secret
+ ('', 'OgAwTx2l6NADI'),
+ (' ', '/Hk.VPuwQTXbc'),
+ ('test', 'N1tQbOFcM5fpg'),
+ ('Compl3X AlphaNu3meric', 'um.Wguz3eVCx2'),
+ ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', 'sNYqfOyauIyic'),
+ ('AlOtBsOl', 'cEpWz5IUCShqM'),
+ )
+ invalid_identify = (
+ #bad char in otherwise correctly formatted hash
+ '!gAwTx2l6NADI',
+ )
+ negative_identify = (
+ #hashes using other algs, which shouldn't match this algorithm
+ '$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc',
+ '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'
+ )
+
+class UnixCryptBackendTest(TestCase):
+ "test builtin unix crypt backend"
+ unix_crypt = pwhash.unix_crypt
+
+ positive_knowns = UnixCryptTest.positive_knowns
+
+ def test_knowns(self):
+ "test lowlevel unix_crypt function"
+ unix_crypt = self.unix_crypt
+ for secret, result in self.positive_knowns:
+ #make sure crypt verifies using salt
+ out = unix_crypt(secret, result[:2])
+ self.assertEqual(out, result)
+ #make sure crypt verifies using partial hash
+ out = unix_crypt(secret, result[:6])
+ self.assertEqual(out, result)
+ #make sure crypt verifies using whole hash
+ out = unix_crypt(secret, result)
+ self.assertEqual(out, result)
+
+ #TODO: deal with border cases where host crypt & bps crypt differ
+ # (none of which should impact the normal use cases)
+ #border cases:
+ # no salt given, empty salt given, 1 char salt
+ # salt w/ non-b64 chars (linux crypt handles this _somehow_)
+ #test that \x00 is NOT allowed
+ #test that other chars _are_ allowed
+
+#=========================================================
+#Md5Crypt
+#=========================================================
+class Md5CryptTest(_CryptTestCase):
+ alg = pwhash.Md5Crypt
+ positive_knowns = (
+ ('', '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'),
+ (' ', '$1$m/5ee7ol$bZn0kIBFipq39e.KDXX8I0'),
+ ('test', '$1$ec6XvcoW$ghEtNK2U1MC5l.Dwgi3020'),
+ ('Compl3X AlphaNu3meric', '$1$nX1e7EeI$ljQn72ZUgt6Wxd9hfvHdV0'),
+ ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$1$jQS7o98J$V6iTcr71CGgwW2laf17pi1'),
+ ('test', '$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'),
+ )
+ invalid_identify = (
+ #bad char in otherwise correct hash
+ '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o!',
+ )
+ negative_identify = (
+ #other hashes
+ '!gAwTx2l6NADI',
+ '$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc',
+ )
+
+#=========================================================
+#test raw sha-crypt functions
+#=========================================================
+class Sha256CryptTest(_CryptTestCase):
+ alg = pwhash.Sha256Crypt
+ positive_knowns = (
+ ('', '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3'),
+ (' ', '$5$rounds=10376$I5lNtXtRmf.OoMd8$Ko3AI1VvTANdyKhBPavaRjJzNpSatKU6QVN9uwS9MH.'),
+ ('test', '$5$rounds=11858$WH1ABM5sKhxbkgCK$aTQsjPkz0rBsH3lQlJxw9HDTDXPKBxC0LlVeV69P.t1'),
+ ('Compl3X AlphaNu3meric', '$5$rounds=10350$o.pwkySLCzwTdmQX$nCMVsnF3TXWcBPOympBUUSQi6LGGloZoOsVJMGJ09UB'),
+ ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$5$rounds=11944$9dhlu07dQMRWvTId$LyUI5VWkGFwASlzntk1RLurxX54LUhgAcJZIt0pYGT7'),
+ )
+ invalid_identify = (
+ #bad char in otherwise correct hash
+ '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe!ZGsGx2aBvxTvDFI613c3'
+ )
+ negative_identify = (
+ #other hashes
+ '!gAwTx2l6NADI',
+ '$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc',
+ '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6ox',
+ )
+
+class Sha512CryptTest(_CryptTestCase):
+ alg = pwhash.Sha512Crypt
+ positive_knowns = (
+ ('', '$6$rounds=11021$KsvQipYPWpr93wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1'),
+ (' ', '$6$rounds=11104$ED9SA4qGmd57Fq2m$q/.PqACDM/JpAHKmr86nkPzzuR5.YpYa8ZJJvI8Zd89ZPUYTJExsFEIuTYbM7gAGcQtTkCEhBKmp1S1QZwaXx0'),
+ ('test', '$6$rounds=11531$G/gkPn17kHYo0gTF$Kq.uZBHlSBXyzsOJXtxJruOOH4yc0Is13uY7yK0PvAvXxbvc1w8DO1RzREMhKsc82K/Jh8OquV8FZUlreYPJk1'),
+ ('Compl3X AlphaNu3meric', '$6$rounds=10787$wakX8nGKEzgJ4Scy$X78uqaX1wYXcSCtS4BVYw2trWkvpa8p7lkAtS9O/6045fK4UB2/Jia0Uy/KzCpODlfVxVNZzCCoV9s2hoLfDs/'),
+ ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$6$rounds=11065$5KXQoE1bztkY5IZr$Jf6krQSUKKOlKca4hSW07MSerFFzVIZt/N3rOTsUgKqp7cUdHrwV8MoIVNCk9q9WL3ZRMsdbwNXpVk0gVxKtz1'),
+ )
+ negative_identify = (
+ #other hashes
+ '!gAwTx2l6NADI',
+ '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3',
+ '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6ox',
+ )
+ invalid_identify = (
+ #bad char in otherwise correct hash
+ '$6$rounds=11021$KsvQipYPWpr9!wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1',
+ )
+
+class Sha512BackendTest(TestCase):
+ "test sha512-crypt backend against specification unittest"
+ cases512 = [
+ #salt-hash, secret, result -- taken from alg definition page
+ ("$6$saltstring", "Hello world!",
+ "$6$saltstring$svn8UoSVapNtMuq1ukKS4tPQd8iKwSMHWjl/O817G3uBnIFNjnQJu"
+ "esI68u4OTLiBFdcbYEdFCoEOfaS35inz1" ),
+
+ ( "$6$rounds=10000$saltstringsaltstring", "Hello world!",
+ "$6$rounds=10000$saltstringsaltst$OW1/O6BYHV6BcXZu8QVeXbDWra3Oeqh0sb"
+ "HbbMCVNSnCM/UrjmM0Dp8vOuZeHBy/YTBmSK6H9qs/y3RnOaw5v." ),
+
+ ( "$6$rounds=5000$toolongsaltstring", "This is just a test",
+ "$6$rounds=5000$toolongsaltstrin$lQ8jolhgVRVhY4b5pZKaysCLi0QBxGoNeKQ"
+ "zQ3glMhwllF7oGDZxUhx1yxdYcz/e1JSbq3y6JMxxl8audkUEm0" ),
+
+ ( "$6$rounds=1400$anotherlongsaltstring",
+ "a very much longer text to encrypt. This one even stretches over more"
+ "than one line.",
+ "$6$rounds=1400$anotherlongsalts$POfYwTEok97VWcjxIiSOjiykti.o/pQs.wP"
+ "vMxQ6Fm7I6IoYN3CmLs66x9t0oSwbtEW7o7UmJEiDwGqd8p4ur1" ),
+
+ ( "$6$rounds=77777$short",
+ "we have a short salt string but not a short password",
+ "$6$rounds=77777$short$WuQyW2YR.hBNpjjRhpYD/ifIw05xdfeEyQoMxIXbkvr0g"
+ "ge1a1x3yRULJ5CCaUeOxFmtlcGZelFl5CxtgfiAc0" ),
+
+ ( "$6$rounds=123456$asaltof16chars..", "a short string",
+ "$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc"
+ "elCjmw2kSYu.Ec6ycULevoBK25fs2xXgMNrCzIMVcgEJAstJeonj1" ),
+
+ ( "$6$rounds=10$roundstoolow", "the minimum number is still observed",
+ "$6$rounds=1000$roundstoolow$kUMsbe306n21p9R.FRkW3IGn.S9NPN0x50YhH1x"
+ "hLsPuWGsUSklZt58jaTfF4ZEQpyUNGc0dqbpBYYBaHHrsX." ),
+ ]
+ def test512(self):
+ crypt = pwhash.Sha512Crypt()
+ for hash, secret, result in self.cases512:
+ rec = crypt.parse(hash)
+ self.assertEqual(rec.alg, '6')
+ out = crypt.encrypt(secret, hash, keep_salt=True)
+ rec2 = crypt.parse(hash)
+ self.assertEqual(rec2.salt, rec.salt, "hash=%r secret=%r" % (hash, secret))
+ self.assertEqual(rec2.chk, rec.chk, "hash=%r secret=%r" % (hash, secret))
+ self.assertEqual(out, result, "hash=%r secret=%r" % (hash, secret))
+
+#=========================================================
+#BCrypt
+#=========================================================
+if enable_suite("bcrypt"):
+ class BCryptTest(_CryptTestCase):
+ alg = pwhash.BCrypt
+ positive_knowns = (
+ #test cases taken from bcrypt spec
+ ('', '$2a$06$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.'),
+ ('', '$2a$08$HqWuK6/Ng6sg9gQzbLrgb.Tl.ZHfXLhvt/SgVyWhQqgqcZ7ZuUtye'),
+ ('', '$2a$10$k1wbIrmNyFAPwPVPSVa/zecw2BCEnBwVS2GbrmgzxFUOqW9dk4TCW'),
+ ('', '$2a$12$k42ZFHFWqBp3vWli.nIn8uYyIkbvYRvodzbfbK18SSsY.CsIQPlxO'),
+ ('a', '$2a$06$m0CrhHm10qJ3lXRY.5zDGO3rS2KdeeWLuGmsfGlMfOxih58VYVfxe'),
+ ('a', '$2a$08$cfcvVd2aQ8CMvoMpP2EBfeodLEkkFJ9umNEfPD18.hUF62qqlC/V.'),
+ ('a', '$2a$10$k87L/MF28Q673VKh8/cPi.SUl7MU/rWuSiIDDFayrKk/1tBsSQu4u'),
+ ('a', '$2a$12$8NJH3LsPrANStV6XtBakCez0cKHXVxmvxIlcz785vxAIZrihHZpeS'),
+ ('abc', '$2a$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i'),
+ ('abc', '$2a$08$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm'),
+ ('abc', '$2a$10$WvvTPHKwdBJ3uk0Z37EMR.hLA2W6N9AEBhEgrAOljy2Ae5MtaSIUi'),
+ ('abc', '$2a$12$EXRkfkdmXn2gzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q'),
+ ('abcdefghijklmnopqrstuvwxyz', '$2a$06$.rCVZVOThsIa97pEDOxvGuRRgzG64bvtJ0938xuqzv18d3ZpQhstC'),
+ ('abcdefghijklmnopqrstuvwxyz', '$2a$08$aTsUwsyowQuzRrDqFflhgekJ8d9/7Z3GV3UcgvzQW3J5zMyrTvlz.'),
+ ('abcdefghijklmnopqrstuvwxyz', '$2a$10$fVH8e28OQRj9tqiDXs1e1uxpsjN0c7II7YPKXua2NAKYvM6iQk7dq'),
+ ('abcdefghijklmnopqrstuvwxyz', '$2a$12$D4G5f18o7aMMfwasBL7GpuQWuP3pkrZrOAnqP.bmezbMng.QwJ/pG'),
+ ('~!@#$%^&*() ~!@#$%^&*()PNBFRD', '$2a$06$fPIsBO8qRqkjj273rfaOI.HtSV9jLDpTbZn782DC6/t7qT67P6FfO'),
+ ('~!@#$%^&*() ~!@#$%^&*()PNBFRD', '$2a$08$Eq2r4G/76Wv39MzSX262huzPz612MZiYHVUJe/OcOql2jo4.9UxTW'),
+ ('~!@#$%^&*() ~!@#$%^&*()PNBFRD', '$2a$10$LgfYWkbzEvQ4JakH7rOvHe0y8pHKF9OaFgwUZ2q7W2FFZmZzJYlfS'),
+ ('~!@#$%^&*() ~!@#$%^&*()PNBFRD', '$2a$12$WApznUOJfkEGSmYRfnkrPOr466oFDCaj4b6HY3EXGvfxm43seyhgC'),
+ )
+ negative_identify = (
+ #other hashes
+ '!gAwTx2l6NADI',
+ '$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc',
+ '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6ox',
+ )
+ invalid_identify = (
+ #unsupported version
+ "$2b$12$EXRkfkdmXn!gzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q",
+ #bad char in otherwise correct hash
+ "$2a$12$EXRkfkdmXn!gzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q",
+ )
+
+ #NOTE: BCrypt backend tests stored in test_security_bcrypt
+else:
+ BCryptTest = None
+
+#=========================================================
+#utils
+#=========================================================
+class UtilsTest(TestCase):
+ "test util funcs and core class behavior"
+
+ def test_has_salt(self):
+ "check CryptAlgorithm.has_salt property works"
+
+ #make sure property function works at class level, not instance level
+ self.assertEqual(UnsaltedAlg.has_salt, False)
+ self.assertEqual(SaltedAlg.has_salt, True)
+
+ #make sure property function works at instance level too
+ self.assertEqual(UnsaltedAlg().has_salt, False)
+ self.assertEqual(SaltedAlg().has_salt, True)
+
+#=========================================================
+#CryptContext
+#=========================================================
+
+CryptContext = pwhash.CryptContext
+
+class CryptContextTest(TestCase):
+ "test CryptContext object's behavior"
+
+ #=========================================================
+ #0 constructor
+ #=========================================================
+ def test_00_constructor(self):
+ "test CryptContext constructor using classes"
+ #create crypt context
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+
+ #parse
+ a, b, c = cc
+ self.assertIsInstance(a, UnsaltedAlg)
+ self.assertIsInstance(b, SaltedAlg)
+ self.assertIsInstance(c, SampleAlg)
+
+ def test_01_constructor(self):
+ "test CryptContext constructor using instances"
+ #create crypt context
+ a = UnsaltedAlg()
+ b = SaltedAlg()
+ c = SampleAlg()
+ cc = CryptContext([a,b,c])
+
+ #verify elements
+ self.assertEquals(list(cc), [a, b, c])
+
+ #=========================================================
+ #1 list getters
+ #=========================================================
+ def test_10_getitem(self):
+ "test CryptContext.__getitem__[idx]"
+ #create crypt context
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+
+ #verify len
+ self.assertEquals(len(cc), 3)
+
+ #verify getitem
+ self.assertEquals(cc[0], a)
+ self.assertEquals(cc[1], b)
+ self.assertEquals(cc[2], c)
+ self.assertEquals(cc[-1], c)
+ self.assertRaises(IndexError, cc.__getitem__, 3)
+
+ def test_11_index(self):
+ "test CryptContext.index(elem)"
+ #create crypt context
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+ d = SampleAlg()
+
+ self.assertEquals(cc.index(a), 0)
+ self.assertEquals(cc.index(b), 1)
+ self.assertEquals(cc.index(c), 2)
+ self.assertEquals(cc.index(d), -1)
+
+ def test_12_contains(self):
+ "test CryptContext.__contains__(elem)"
+ #create crypt context
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+ d = SampleAlg()
+
+ self.assertEquals(a in cc, True)
+ self.assertEquals(b in cc, True)
+ self.assertEquals(c in cc, True)
+ self.assertEquals(d in cc, False)
+
+ #=========================================================
+ #2 list setters
+ #=========================================================
+ def test_20_setitem(self):
+ "test CryptContext.__setitem__"
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+ d = SampleAlg()
+ self.assertIsNot(c, d)
+ e = pwhash.Md5Crypt()
+
+ #check baseline
+ self.assertEquals(list(cc), [a, b, c])
+
+ #replace 0 w/ d should raise error (SampleAlg already in list)
+ self.assertRaises(KeyError, cc.__setitem__, 0, d)
+ self.assertEquals(list(cc), [a, b, c])
+
+ #replace 0 w/ e
+ cc[0] = e
+ self.assertEquals(list(cc), [e, b, c])
+
+ #replace 2 w/ d
+ cc[2] = d
+ self.assertEquals(list(cc), [e, b, d])
+
+ #replace -1 w/ c
+ cc[-1] = c
+ self.assertEquals(list(cc), [e, b, c])
+
+ #replace -2 w/ d should raise error
+ self.assertRaises(KeyError, cc.__setitem__, -2, d)
+ self.assertEquals(list(cc), [e, b, c])
+
+ def test_21_append(self):
+ "test CryptContext.__setitem__"
+ cc = CryptContext([UnsaltedAlg])
+ a, = cc
+ b = SaltedAlg()
+ c = SampleAlg()
+ d = SampleAlg()
+
+ self.assertEquals(list(cc), [a])
+
+ #try append
+ cc.append(b)
+ self.assertEquals(list(cc), [a, b])
+
+ #and again
+ cc.append(c)
+ self.assertEquals(list(cc), [a, b, c])
+
+ #try append dup
+ self.assertRaises(KeyError, cc.append, d)
+ self.assertEquals(list(cc), [a, b, c])
+
+ def test_20_insert(self):
+ "test CryptContext.insert"
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+ d = SampleAlg()
+ self.assertIsNot(c, d)
+ e = pwhash.Md5Crypt()
+ f = pwhash.Sha512Crypt()
+ g = pwhash.UnixCrypt()
+
+ #check baseline
+ self.assertEquals(list(cc), [a, b, c])
+
+ #inserting d at 0 should raise error (SampleAlg already in list)
+ self.assertRaises(KeyError, cc.insert, 0, d)
+ self.assertEquals(list(cc), [a, b, c])
+
+ #insert e at start
+ cc.insert(0, e)
+ self.assertEquals(list(cc), [e, a, b, c])
+
+ #insert f at end
+ cc.insert(-1, f)
+ self.assertEquals(list(cc), [e, a, b, f, c])
+
+ #insert g at end
+ cc.insert(5, g)
+ self.assertEquals(list(cc), [e, a, b, f, c, g])
+
+ #=========================================================
+ #3 list dellers
+ #=========================================================
+ def test_30_remove(self):
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+ d = SampleAlg()
+ self.assertIsNot(c, d)
+
+ self.assertEquals(list(cc), [a, b, c])
+
+ self.assertRaises(ValueError, cc.remove, d)
+ self.assertEquals(list(cc), [a, b, c])
+
+ cc.remove(a)
+ self.assertEquals(list(cc), [b, c])
+
+ self.assertRaises(ValueError, cc.remove, a)
+ self.assertEquals(list(cc), [b, c])
+
+ def test_31_discard(self):
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+ d = SampleAlg()
+ self.assertIsNot(c, d)
+
+ self.assertEquals(list(cc), [a, b, c])
+
+ self.assertEquals(cc.discard(d), False)
+ self.assertEquals(list(cc), [a, b, c])
+
+ self.assertEquals(cc.discard(a), True)
+ self.assertEquals(list(cc), [b, c])
+
+ self.assertEquals(cc.discard(a), False)
+ self.assertEquals(list(cc), [b, c])
+
+ #=========================================================
+ #4 list composition
+ #=========================================================
+
+ def test_40_add(self, lsc=False):
+ "test CryptContext + list"
+ #build and join cc to list
+ a = UnsaltedAlg()
+ b = SaltedAlg()
+ c = SampleAlg()
+ cc = CryptContext([a, b, c])
+ ls = [pwhash.Md5Crypt, pwhash.Sha512Crypt]
+ if lsc:
+ ls = CryptContext(ls)
+ cc2 = cc + ls
+
+ #verify types
+ self.assertIsInstance(cc, CryptContext)
+ self.assertIsInstance(cc2, CryptContext)
+ self.assertIsInstance(ls, CryptContext if lsc else list)
+
+ #verify elements
+ self.assertIsNot(cc, ls)
+ self.assertIsNot(cc, cc2)
+ self.assertIsNot(ls, cc2)
+
+ #verify cc
+ a, b, c = cc
+ self.assertIsInstance(a, UnsaltedAlg)
+ self.assertIsInstance(b, SaltedAlg)
+ self.assertIsInstance(c, SampleAlg)
+
+ #verify ls
+ d, e = ls
+ if lsc:
+ self.assertIsInstance(d, pwhash.Md5Crypt)
+ self.assertIsInstance(e, pwhash.Sha512Crypt)
+ else:
+ self.assertIs(d, pwhash.Md5Crypt)
+ self.assertIs(e, pwhash.Sha512Crypt)
+
+ #verify cc2
+ a2, b2, c2, d2, e2 = cc2
+ self.assertIs(a2, a)
+ self.assertIs(b2, b)
+ self.assertIs(c2, c)
+ if lsc:
+ self.assertIs(d2, d)
+ self.assertIs(e2, e)
+ else:
+ self.assertIsInstance(d2, pwhash.Md5Crypt)
+ self.assertIsInstance(e2, pwhash.Sha512Crypt)
+
+ def test_41_add(self):
+ "test CryptContext + CryptContext"
+ self.test_40_add(lsc=True)
+
+ def test_42_iadd(self, lsc=False):
+ "test CryptContext += list"
+ #build and join cc to list
+ a = UnsaltedAlg()
+ b = SaltedAlg()
+ c = SampleAlg()
+ cc = CryptContext([a, b, c])
+ ls = [pwhash.Md5Crypt, pwhash.Sha512Crypt]
+ if lsc:
+ ls = CryptContext(ls)
+
+ #baseline
+ self.assertEquals(list(cc), [a, b, c])
+ self.assertIsInstance(cc, CryptContext)
+ self.assertIsInstance(ls, CryptContext if lsc else list)
+ if lsc:
+ d, e = ls
+ self.assertIsInstance(d, pwhash.Md5Crypt)
+ self.assertIsInstance(e, pwhash.Sha512Crypt)
+
+ #add
+ cc += ls
+
+ #verify types
+ self.assertIsInstance(cc, CryptContext)
+ self.assertIsInstance(ls, CryptContext if lsc else list)
+
+ #verify elements
+ self.assertIsNot(cc, ls)
+
+ #verify cc
+ a2, b2, c2, d2, e2 = cc
+ self.assertIs(a2, a)
+ self.assertIs(b2, b)
+ self.assertIs(c2, c)
+ if lsc:
+ self.assertIs(d2, d)
+ self.assertIs(e2, e)
+ else:
+ self.assertIsInstance(d2, pwhash.Md5Crypt)
+ self.assertIsInstance(e2, pwhash.Sha512Crypt)
+
+ #verify ls
+ d, e = ls
+ if lsc:
+ self.assertIsInstance(d, pwhash.Md5Crypt)
+ self.assertIsInstance(e, pwhash.Sha512Crypt)
+ else:
+ self.assertIs(d, pwhash.Md5Crypt)
+ self.assertIs(e, pwhash.Sha512Crypt)
+
+ def test_43_iadd(self):
+ "test CryptContext += CryptContext"
+ self.test_42_iadd(lsc=True)
+
+ def test_44_extend(self):
+ a = UnsaltedAlg()
+ b = SaltedAlg()
+ c = SampleAlg()
+ cc = CryptContext([a, b, c])
+ ls = [pwhash.Md5Crypt, pwhash.Sha512Crypt]
+
+ cc.extend(ls)
+
+ a2, b2, c2, d2, e2 = cc
+ self.assertIs(a2, a)
+ self.assertIs(b2, b)
+ self.assertIs(c2, c)
+ self.assertIsInstance(d2, pwhash.Md5Crypt)
+ self.assertIsInstance(e2, pwhash.Sha512Crypt)
+
+ self.assertRaises(KeyError, cc.extend, [pwhash.Sha512Crypt ])
+ self.assertRaises(KeyError, cc.extend, [pwhash.Sha512Crypt() ])
+
+ #=========================================================
+ #5 basic crypt interface
+ #=========================================================
+ def test_50_resolve(self):
+ "test CryptContext.resolve()"
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+
+ self.assertEquals(cc.resolve('unsalted'), a)
+ self.assertEquals(cc.resolve('salted'), b)
+ self.assertEquals(cc.resolve('sample'), c)
+ self.assertEquals(cc.resolve('md5-crypt'), None)
+
+ self.assertEquals(cc.resolve(['unsalted']), a)
+ self.assertEquals(cc.resolve(['md5-crypt']), None)
+ self.assertEquals(cc.resolve(['unsalted', 'salted', 'md5-crypt']), b)
+
+ def test_51_identify(self):
+ "test CryptContext.identify"
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+
+ for crypt in (a, b, c):
+ h = crypt.encrypt("test")
+ self.assertEquals(cc.identify(h, resolve=True), crypt)
+ self.assertEquals(cc.identify(h), crypt.name)
+
+ self.assertEquals(cc.identify('$1$232323123$1287319827', resolve=True), None)
+ self.assertEquals(cc.identify('$1$232323123$1287319827'), None)
+
+ #make sure "None" is accepted
+ self.assertEquals(cc.identify(None), None)
+
+ def test_52_encrypt_and_verify(self):
+ "test CryptContext.encrypt & verify"
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+
+ #check encrypt/id/verify pass for all algs
+ for crypt in (a, b, c):
+ h = cc.encrypt("test", alg=crypt.name)
+ self.assertEquals(cc.identify(h, resolve=True), crypt)
+ self.assertEquals(cc.verify('test', h), True)
+ self.assertEquals(cc.verify('notest', h), False)
+
+ #check default alg
+ h = cc.encrypt("test")
+ self.assertEquals(cc.identify(h, resolve=True), c)
+
+ #check verify using algs
+ self.assertEquals(cc.verify('test', h, alg='sample'), True)
+ self.assertEquals(cc.verify('test', h, alg='salted'), False)
+
+ def test_53_encrypt_salting(self):
+ "test CryptContext.encrypt salting options"
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+ self.assert_(c.has_salt)
+
+ h = cc.encrypt("test")
+ self.assertEquals(cc.identify(h, resolve=True), c)
+
+ h2 = cc.encrypt("test", h)
+ self.assertEquals(cc.identify(h2, resolve=True), c)
+ self.assertNotEquals(h2, h)
+
+ h3 = cc.encrypt("test", h, keep_salt=True)
+ self.assertEquals(cc.identify(h3, resolve=True), c)
+ self.assertEquals(h3, h)
+
+ def test_54_verify_empty(self):
+ "test CryptContext.verify allows hash=None"
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ self.assertEquals(cc.verify('xxx', None), False)
+ for crypt in cc:
+ self.assertEquals(cc.verify('xxx', None, alg=crypt.name), False)
+
+#XXX: haven't decided if this should be part of protocol
+## def test_55_verify_empty_secret(self):
+## "test CryptContext.verify allows secret=None"
+## cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+## h = cc.encrypt("test")
+## self.assertEquals(cc.verify(None,h), False)
+
+ #=========================================================
+ #6 crypt-enhanced list interface
+ #=========================================================
+ def test_60_getitem(self):
+ "test CryptContext.__getitem__[algname]"
+ #create crypt context
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+
+ #verify getitem
+ self.assertEquals(cc['unsalted'], a)
+ self.assertEquals(cc['salted'], b)
+ self.assertEquals(cc['sample'], c)
+ self.assertRaises(KeyError, cc.__getitem__, 'md5-crypt')
+
+ def test_61_get(self):
+ "test CryptContext.get(algname)"
+ #create crypt context
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+
+ #verify getitem
+ self.assertEquals(cc.get('unsalted'), a)
+ self.assertEquals(cc.get('salted'), b)
+ self.assertEquals(cc.get('sample'), c)
+ self.assertEquals(cc.get('md5-crypt'), None)
+
+ def test_62_index(self):
+ "test CryptContext.index(algname)"
+ #create crypt context
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+
+ #verify getitem
+ self.assertEquals(cc.index('unsalted'), 0)
+ self.assertEquals(cc.index('salted'), 1)
+ self.assertEquals(cc.index('sample'), 2)
+ self.assertEquals(cc.index('md5-crypt'), -1)
+
+ def test_63_contains(self):
+ "test CryptContext.__contains__(algname)"
+ #create crypt context
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ self.assertEquals('salted' in cc, True)
+ self.assertEquals('unsalted' in cc, True)
+ self.assertEquals('sample' in cc, True)
+ self.assertEquals('md5-crypt' in cc, False)
+
+ def test_64_keys(self):
+ "test CryptContext.keys()"
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ self.assertEquals(cc.keys(), ['unsalted', 'salted', 'sample'])
+
+ def test_65_remove(self):
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+
+ self.assertEquals(list(cc), [a, b, c])
+
+ self.assertRaises(KeyError, cc.remove, 'md5-crypt')
+ self.assertEquals(list(cc), [a, b, c])
+
+ cc.remove('unsalted')
+ self.assertEquals(list(cc), [b, c])
+
+ self.assertRaises(KeyError, cc.remove, 'unsalted')
+ self.assertEquals(list(cc), [b, c])
+
+ def test_66_discard(self):
+ cc = CryptContext([UnsaltedAlg, SaltedAlg, SampleAlg])
+ a, b, c = cc
+
+ self.assertEquals(list(cc), [a, b, c])
+
+ self.assertEquals(cc.discard('md5-crypt'), False)
+ self.assertEquals(list(cc), [a, b, c])
+
+ self.assertEquals(cc.discard('unsalted'), True)
+ self.assertEquals(list(cc), [b, c])
+
+ self.assertEquals(cc.discard('unsalted'), False)
+ self.assertEquals(list(cc), [b, c])
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#quick access functions
+#=========================================================
+class QuickAccessTest(TestCase):
+ "test quick access functions"
+
+ crypt_cases = [ UnixCryptTest, Md5CryptTest, Sha256CryptTest]
+ if BCryptTest:
+ crypt_cases.append(BCryptTest)
+ crypt_cases.extend([ Sha512CryptTest ])
+
+ def test_00_identify(self):
+ "test pwhash.identify()"
+ identify = pwhash.identify
+ for cc in self.crypt_cases:
+ name = cc.alg.name
+ for _, hash in cc.positive_knowns:
+ self.assertEqual(identify(hash), name)
+ for _, hash in cc.negative_knowns:
+ self.assertEqual(identify(hash), name)
+ for hash in cc.negative_identify:
+ self.assertNotEqual(identify(hash), name)
+ for hash in cc.invalid_identify:
+ self.assertEqual(identify(hash), None)
+
+ def test_01_verify(self):
+ "test pwhash.verify()"
+ verify = pwhash.verify
+ for cc in self.crypt_cases:
+ name = cc.alg.name
+ for secret, hash in cc.positive_knowns[:3]:
+ self.assert_(verify(secret, hash))
+ self.assert_(verify(secret, hash, alg=name))
+ for secret, hash in cc.negative_knowns[:3]:
+ self.assert_(not verify(secret, hash))
+ self.assert_(not verify(secret, hash, alg=name))
+ for hash in cc.invalid_identify[:3]:
+ #context should raise ValueError because can't be identified
+ self.assertRaises(ValueError, verify, secret, hash)
+
+ def test_02_encrypt(self):
+ "test pwhash.encrypt()"
+ identify = pwhash.identify
+ verify = pwhash.verify
+ encrypt = pwhash.encrypt
+ for cc in self.crypt_cases:
+ alg = cc.alg.name
+ s = 'test'
+ h = encrypt(s, alg=alg)
+ self.assertEqual(identify(h), alg)
+ self.assertEqual(verify(s, h), True)
+ h2 = encrypt(s, h)
+ self.assertEqual(identify(h2), alg)
+ self.assertEqual(verify(s, h2, alg=alg), True)
+
+ def test_03_legacy(self):
+ "test legacy pwhash quick access funcs"
+ with catch_warnings(record=True) as wmsgs:
+ warnings.filterwarnings("always")
+
+ h = pwhash.encrypt_secret('test', alg='md5-crypt')
+ self.assertWarningEquals(wmsgs.pop(),
+ message="bps.security.pwhash: function 'encrypt_secret' is deprecated, use 'bps.security.pwhash.encrypt' instead",
+ filename=__file__,
+ )
+
+ self.assertEqual(pwhash.identify_secret(h), 'md5-crypt')
+ self.assertWarningEquals(wmsgs.pop(),
+ message="bps.security.pwhash: function 'identify_secret' is deprecated, use 'bps.security.pwhash.identify' instead",
+ filename=__file__,
+ )
+
+ self.assertEqual(pwhash.verify_secret('test', h), True)
+ self.assertWarningEquals(wmsgs.pop(),
+ message="bps.security.pwhash: function 'verify_secret' is deprecated, use 'bps.security.pwhash.verify' instead",
+ filename=__file__,
+ )
+
+ self.assertEqual(pwhash.verify_secret('notest', h), False)
+ self.assertWarningEquals(wmsgs.pop(),
+ message="bps.security.pwhash: function 'verify_secret' is deprecated, use 'bps.security.pwhash.verify' instead",
+ filename=__file__,
+ )
+
+ h2 = pwhash.encrypt_secret('test', h)
+ self.assertWarningEquals(wmsgs.pop(),
+ message="bps.security.pwhash: function 'encrypt_secret' is deprecated, use 'bps.security.pwhash.encrypt' instead",
+ filename=__file__,
+ )
+
+ self.assertEqual(pwhash.identify_secret(h2), 'md5-crypt')
+ self.assertWarningEquals(wmsgs.pop(),
+ message="bps.security.pwhash: function 'identify_secret' is deprecated, use 'bps.security.pwhash.identify' instead",
+ filename=__file__,
+ )
+
+ self.assertEqual(pwhash.verify_secret('test', h2, alg="md5-crypt"), True)
+ self.assertWarningEquals(wmsgs.pop(),
+ message="bps.security.pwhash: function 'verify_secret' is deprecated, use 'bps.security.pwhash.verify' instead",
+ filename=__file__,
+ )
+
+ self.assert_(not wmsgs)
+
+ def test_04_default_context(self):
+ "test pwhash.default_context contents"
+ dc = pwhash.default_context
+ for case in self.crypt_cases:
+ self.assert_(case.alg.name in dc)
+
+ last = 'sha512-crypt'
+ self.assertEqual(dc.keys()[-1], last)
+ h = dc.encrypt("test")
+ self.assertEqual(dc.identify(h), last)
+ self.assertEqual(dc.verify('test', h, alg=last), True)
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_stream.py b/bps/tests/test_stream.py
new file mode 100644
index 0000000..daa3eab
--- /dev/null
+++ b/bps/tests/test_stream.py
@@ -0,0 +1,33 @@
+"""tests for bps.stream -- (c) Assurance Technologies 2003-2009"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import os.path
+#site
+#pkg
+from bps import stream
+from bps.stream import BT
+from bps.meta import Params as ak
+#module
+from bps.tests.utils import TestCase
+
+#=========================================================
+#
+#=========================================================
+
+#TODO: test MUCH MORE of stream
+class SourceTypeTest(TestCase):
+
+ def test_get_source_type(self):
+ self.check_function_results(stream.get_input_type, [
+ ak(BT.RAW, ""),
+
+ #TODO: many more test cases!
+
+ ])
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_text.py b/bps/tests/test_text.py
new file mode 100644
index 0000000..5b5d90c
--- /dev/null
+++ b/bps/tests/test_text.py
@@ -0,0 +1,927 @@
+"""tests for bps.text -- (c) Assurance Technologies 2003-2009"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import os.path
+#site
+#pkg
+from bps.text import EnglishInflector, condense, asbool, clean_filename, \
+ split_condense, fmt_has_field, get_fmt_fields, parse_fmt_field, \
+ parse_fmt_string
+from bps.unstable import ellipsize
+from bps.meta import Params as ak
+#module
+from bps.tests.utils import TestCase
+#=========================================================
+#
+#=========================================================
+class ParseTest(TestCase):
+ condense_cases = [
+ (None, None),
+ ("",""),
+ (" ", ""),
+ ("a", "a"),
+ ("a\r\n", "a"),
+ (" a", "a"),
+ (" a", "a"),
+ ("a ", "a"),
+ ("a ", "a"),
+ (" a ", "a"),
+ (" a b ", "a b"),
+ (" aaa bbb ", "aaa bbb"),
+ (" aaa bbb", "aaa bbb"),
+ (" a a ", "a a"),
+ (" asas asdas ", "asas asdas"),
+ ("asas asdas", "asas asdas"),
+ (" asas asdas ", "asas asdas"),
+ ("\t asas \r\n asdas \n", "asas asdas"),
+ ]
+ condense_other = [
+ ("", "xyz", ""),
+ ("xy", "xyz", ""),
+ ("xayyybz", "xyz", "axb"),
+ ("xayyybz", " xyz", "a b"),
+ (". .. a .. d .. c ..", " .", "a d c"),
+ (". .. a .. d .. c ..", ". ", "a.d.c"),
+
+ #make sure regexp escaping works
+ ("x--[[-y]-]---z", "[-]", "x[y[z"),
+ ]
+
+ def test_condense(self):
+ for i,o in self.condense_cases:
+ self.assertEqual(condense(i), o)
+ def test_condense_other(self):
+ for i,c,o in self.condense_other:
+ r = condense(i, c)
+ self.assertEqual(r, o, "case %r: got %r, expected %r" % ((i, c), r, o))
+
+ sc_cases = [
+ ak([], None),
+ ak([''], ''),
+ ak([], '', empty="strip"),
+ ak(['a'], "a"),
+ ak(['a'], " a "),
+ ak(['a b'], " a b"),
+ ak(['a','b'], " a , b"),
+ ak(['a','','b'], " a ,, b"),
+ ak(['a','','b', ''], " a , , b,"),
+ ak(['a','b'], " a , , b,", empty="strip"),
+ ak(['a','c ; d','b ;; b'], " a , c ; d , b ;; b,", empty="strip"),
+ ak(['a','c ; d','b','b'], " a , c ; d , b ;; b,", sep=[';;', ','], empty="strip"),
+
+ #bug - char in sep occurs in strip chars, was causing sep to be lost
+ ak(['a', 'b', '', 'c', 'd'], "a\n b \n , c \t \n d", sep=['\n', ','])
+ ]
+ def test_split_condense(self):
+ self.check_function_results(split_condense, self.sc_cases)
+
+ asbool_cases = [
+ ('yes', True),
+ (' y ', True),
+ ('y', False, True),
+ ('y', True, True),
+
+ ('no', False),
+ ('n', True, False),
+ ('n', True, False),
+
+ (None, None),
+ (None, False, False),
+ ('', None),
+ ('null', None),
+ ('null', None, None),
+ ('null', False, False),
+ ('null', True, True),
+ ]
+
+ def test_asbool(self):
+ for elem in self.asbool_cases:
+ if len(elem) == 2:
+ i, o = elem
+ self.assertEqual(asbool(i), o)
+ else:
+ i, d, o = elem
+ self.assertEqual(asbool(i, d), o)
+ self.assertEqual(asbool(i, default=d), o)
+ self.assertRaises(ValueError, asbool, "not a bool")
+
+ cleanfn_cases = [
+ #row format: (expected_result, *args, **kwds)
+
+ #
+ #check safe preset
+ #
+
+ #random data
+ ak("c def ____ ___", "/a/b/c def ::!% ***"),
+ ak("c def", "/a/b/c def ::!% ***", safe_char=''),
+ ak("_.._sbin run", "../../sbin run"),
+ ak("sbin run", "../../sbin run", safe_char=' '),
+
+ #
+ #check clean preset
+ #
+
+ #random data
+ ak("c def", "/a/b/c def ::!% ***", preset='clean'),
+ ak("c def", "/a/b/c def ::!% ***", safe_char='', preset='clean'),
+ ak("sbin run", "../../sbin run", preset='clean'),
+ ak("sbin run", "../../sbin run", safe_char=' ', preset='clean'),
+
+ #
+ #check paranoid preset
+ #
+
+ #random data
+ ak("c_def", "/a/b/c def", preset="paranoid"),
+ ak("x_y_z_123", "% .. / / .. $#^& *(x)[]/ y\\!!! z 123",preset="paranoid"),
+
+ #
+ #check other presets
+ #
+## ak("/a/b/c", " / a / b / c ::\\!%", preset="posix-path", safe=""),
+## ak("/a/b/c", "/a/b/c", dialect="posix-path"),
+ ak("_.._sbin run _this_ _ this", "../../sbin run [this] & this", preset="excel_sheet"),
+
+ #
+ #default should be honored for None and badset
+ #
+ ak(None, None), #the default default is None
+ ak("xxx", None, "xxx"), #custom default is honored
+ ak("xxx", ".", "xxx"),
+ ak("xxx", "..", "xxx"),
+
+ #
+ #should strip absolute paths entirely
+ #
+ ak("name", "/a/b/c/name"), #absolute posix path
+ ak("name", "xxx:\\bbb\\ccc\\name"), #absolute dos path
+ ak("name", "\\\\bbb\\ccc\\name"), #absolute win network path
+
+ #
+ #should clean path-like chars (slashes, etc)
+ #
+ ak("_x_y_z run", "../x/y/z run"),
+ ak("x_y_z__", "x\\y\\z%%"),
+ ak("x_y_z__", "x\\y\\z$$"),
+ ak("_ProgramFiles__EvilApp.exe", "%ProgramFiles%\\EvilApp.exe"),
+
+ #
+ #should remove badset entirely
+ #
+ ak(None, ""),
+ ak(None, " "),
+ ak(None, "."),
+ ak(None, ".."),
+ ak(None, "..."),
+ ak(None, " !.!!.! ", safe_char=""),
+
+ #
+ #should strip dots/spaces
+ #
+ ak("x y z", "... .. x y z ..."),
+
+ #
+ #shouldn't allow unsafe chars to sneak into safe_chars
+ #
+ ak("xy__z", "xy%!z"),
+ ak("xyz", "xy%!z", safe_char="%"),
+
+ #
+ #test safe_char mapping
+ #
+ ak("_--y-z_", "%!&y&z%", safe_char={"default": '_', "&!": "-"}),
+ ak("_d-y-z_", "%d&y&z%", safe_char={"default": "_", "&": "-"}),
+
+ #
+ #ext_list testing
+ #
+ ak("a", "a", ext_list=""), #should strip all extension
+ ak("a", "a.b", ext_list=""),
+
+ ak("a", "a", ext_list=[]), #should strip all extensions
+ ak("a", "a.b", ext_list=[]),
+
+ ak("a.b", "a", ext_list=".b"), #should enforce '.b' extension
+ ak("a.b", "a.b", ext_list=".b"),
+ ak("a.b", "a.c", ext_list=".b"),
+
+ ak("a.b", "a", ext_list=".b:.c"), #should enforce '.b' or '.c', using ':' sep
+ ak("a.b", "a.b", ext_list=".b:.c"),
+ ak("a.c", "a.c", ext_list=".b:.c"),
+ ak("a.b", "a.d", ext_list=".b:.c"),
+
+ ak("a.b", "a", ext_list=".b;.c"), #should enforce '.b' or '.c', using ';' sep
+ ak("a.b", "a.b", ext_list=".b;.c"),
+ ak("a.c", "a.c", ext_list=".b;.c"),
+ ak("a.b", "a.d", ext_list=".b:.c"),
+
+ ak("a.b", "a", ext_list=[".b",".c"]), #should enforce '.b' or '.c', using list
+ ak("a.b", "a.b", ext_list=[".b",".c"]),
+ ak("a.c", "a.c", ext_list=[".b",".c"]),
+ ak("a.b", "a.d", ext_list=".b:.c"),
+
+ ak("a", "a", ext_list=".b:"), #should allow empty or '.b'
+ ak("a.b", "a.b", ext_list=".b:"),
+ ak("a.b", "a.c", ext_list=".b:"),
+
+ ak("a.c", "a", "x.c", ext_list=".b:.c"), #should promote default to front
+ ak("a.b", "a.b", "x.c", ext_list=".b:.c"),
+ ak("a.c", "a.c", "x.c", ext_list=".b:.c"),
+ ak("a.c", "a.d", "x.c", ext_list=".b:.c"),
+
+ ak("a.b", "a", "x.d", ext_list=".b:.c"), #should ignore default
+ ak("a.b", "a.d", "x.d", ext_list=".b:.c"),
+ ak("a.c", "a.c", "x.d", ext_list=".b:.c"),
+ ak("a.b", "a.d", "x.d", ext_list=".b:.c"),
+ ]
+ def test_cleanfn(self):
+ self.check_cases(clean_filename, self.cleanfn_cases)
+
+ #TODO: replace w/ check_function_results()
+ def check_cases(self, func, cases):
+ "helper for running through function call cases"
+ for elem in cases:
+ correct = elem.args[0]
+ result = func(*elem.args[1:], **elem.kwds)
+ self.assertEqual(result, correct,
+ "error for case %s, got %r, expected %r" % (elem.render(1), result, correct)
+ )
+
+#=========================================================
+#formatting
+#=========================================================
+S = os.path.sep
+
+class MiscTest(TestCase):
+
+ def test_ellipsize(self):
+ self.check_function_results(ellipsize, [
+ #too short
+ ak("abc", "abc", 3),
+ ak("...", "abcd", 3),
+ ak("...", "abcd", 3, "<"),
+ ak("...", "abcd", 3, "^"),
+
+ #right
+ ak("", "", 6),
+ ak("abc", "abc", 6),
+ ak("abcdef", "abcdef", 6),
+ ak("abc...", "abcdefghi", 6),
+
+ #left
+ ak("", "", 6, "<"),
+ ak("abc", "abc", 6, "<"),
+ ak("abcdef", "abcdef", 6, "<"),
+ ak("...ghi", "abcdefghi", 6, "<"),
+ ak("...jkl", "abcdefghijkl", 6, "<"),
+
+ #center
+ ak("", "", 6, "^"),
+ ak("abc", "abc", 6, "^"),
+ ak("abcdef", "abcdef", 6, "^"),
+ ak("a...hi", "abcdefghi", 6, "^"),
+ ak("a...kl", "abcdefghijkl", 6, "^"),
+
+ #custom char
+ ak("abc!!!", "abcdefghi", 6, ellipsis="!!!"),
+
+ #left / smart
+ ak("...ghijkl", "abcdefghijkl", 9, "<", mode="smart"),
+ ak("... hijkl", "abcdef hijkl", 9, "<", mode="smart"),
+ ak("... ijkl", "abcdefg ijkl", 9, "<", mode="smart"),
+ ak("... l", "abcdefghij l", 9, "<", mode="smart"),
+ ak("... l", "abcdefghij l", 9, "<", mode="smart", window=5),
+ ak("...ghij l", "abcdefghij l", 9, "<", mode="smart", window=4),
+
+ #right / plain
+ ak("abc...", "abcdefghijkl", 6),
+ ak("a c...", "a cdefghijkl", 6),
+ ak("a"+S+"c...", "a"+S+"cdefghijkl", 6),
+
+ #right / smart
+ ak("abc...", "abcdefghijkl", 6, mode="smart"),
+ ak("a ...", "a cdefghijkl", 6, mode="smart"),
+ ak("a" + S + "c...", "a" + S + "cdefghijkl", 6, mode="smart"),
+
+ #right / filepath
+ ak("abc...", "abcdefghijkl", 6, mode="filepath"),
+ ak("a c...", "a cdefghijkl", 6, mode="filepath"),
+ ak("a/...", "a"+S+"cdefghijkl", 6, mode="filepath"),
+
+ ])
+
+#=========================================================
+#inflection
+#=========================================================
+class EnglishInflectorTest(TestCase):
+ #=========================================================
+ #setup
+ #=========================================================
+ def setUp(self):
+ self.inf = EnglishInflector()
+
+ #standard pairs to test
+ pairs = [
+ #various random words
+ ('money', 'money'),
+ ('cow', 'cows'),
+ ('user', 'users'),
+ ('matrix', 'matrices'),
+ ('array', 'arrays'),
+ ('baby', 'babies'),
+ ('permission', 'permissions'),
+ ('fez', 'fezzes'),
+ ('pez', 'pez'),
+ ('fetus', 'fetuses'),
+
+ #from medicred
+ ('certification', 'certifications'),
+ ('policy', 'policies'),
+ ('product', 'products'),
+ ('contract', 'contracts'),
+ ('attachment', 'attachments'),
+ ('cert', 'certs'),
+ ('entry', 'entries'),
+ ('license', 'licenses'),
+ ('affiliation', 'affiliations'),
+ ('record', 'records'),
+
+ #from other jobs
+ ('loaf','loaves'),
+
+ #from python_inflector - http://www.bermi.org/inflector/download
+ ("search" , "searches"),
+ ("switch" , "switches"),
+ ("fix" , "fixes"),
+ ("box" , "boxes"),
+ ("process" , "processes"),
+ ("address" , "addresses"),
+ ("case" , "cases"),
+ ("stack" , "stacks"),
+ ("wish" , "wishes"),
+ ("fish" , "fish"),
+
+ ("category" , "categories"),
+ ("query" , "queries"),
+ ("ability" , "abilities"),
+ ("agency" , "agencies"),
+ ("movie" , "movies"),
+
+ ("archive" , "archives"),
+
+ ("index" , "indices"),
+
+ ("wife" , "wives"),
+ ("safe" , "saves"),
+ ("half" , "halves"),
+
+ ("move" , "moves"),
+
+ ("salesperson" , "salespeople"),
+ ("person" , "people"),
+
+ ("spokesman" , "spokesmen"),
+ ("man" , "men"),
+ ("woman" , "women"),
+
+ ("basis" , "bases"),
+ ("diagnosis" , "diagnoses"),
+
+ ("datum" , "data"),
+ ("medium" , "media"),
+ ("analysis" , "analyses"),
+
+ ("node_child" , "node_children"),
+ ("child" , "children"),
+
+ ("experience" , "experiences"),
+ ("day" , "days"),
+
+ ("comment" , "comments"),
+ ("foobar" , "foobars"),
+ ("newsletter" , "newsletters"),
+
+ ("old_news" , "old_news"),
+ ("news" , "news"),
+
+ ("series" , "series"),
+ ("species" , "species"),
+
+ ("quiz" , "quizzes"),
+
+ ("perspective" , "perspectives"),
+
+ ("ox" , "oxen"),
+ ("photo" , "photos"),
+ ("buffalo" , "buffaloes"),
+ ("tomato" , "tomatoes"),
+ ("dwarf" , "dwarves"),
+ ("elf" , "elves"),
+ ("information" , "information"),
+ ("equipment" , "equipment"),
+ ("bus" , "buses"),
+ ("status" , "statuses"),
+ ("mouse" , "mice"),
+
+ ("louse" , "lice"),
+ ("house" , "houses"),
+ ("octopus" , "octopi"),
+ ("virus" , "viri"),
+ ("alias" , "aliases"),
+ ("portfolio" , "portfolios"),
+
+ ("vertex" , "vertices"),
+ ("matrix" , "matrices"),
+
+ ("axis" , "axes"),
+ ("testis" , "testes"),
+ ("crisis" , "crises"),
+
+ ("rice" , "rice"),
+ ("shoe" , "shoes"),
+
+ ("horse" , "horses"),
+ ("prize" , "prizes"),
+ ("edge" , "edges"),
+ ]
+
+ #various prefixes
+ prefixes = [
+ '',
+ 'the ',
+ 'man-',
+ 'the baby-',
+ 'the kinda-slow ',
+ 'the kinda-slow baby-',
+ ]
+
+ #=========================================================
+ #test base singularize & pluralize behavior
+ #=========================================================
+ def test_empty(self):
+ inf = self.inf
+ self.assertEqual(inf.pluralize(None), '')
+ self.assertEqual(inf.singularize(None), '')
+ self.assertEqual(inf.pluralize(''), '')
+ self.assertEqual(inf.singularize(''), '')
+
+ def test_uncountable(self):
+ for word in self.inf.uncountable_words:
+ self.check_pair(word, word)
+
+ def test_irregular(self):
+ for singular, plural in self.inf.irregular_plurals.iteritems():
+ self.check_pair(singular, plural)
+
+ def test_std(self):
+ for singular, plural in self.pairs:
+ self.check_pair(singular, plural)
+
+ #=========================================================
+ #test prefix handling
+ #=========================================================
+ def test_prefixes(self):
+ inf = self.inf
+ for singular, plural in self.pairs:
+ for prefix in self.prefixes:
+ self.check_pair(prefix + singular, prefix + plural)
+
+ #TODO: test caps preservation
+
+ def test_countof(self):
+ inf = self.inf
+ singular = "cow"
+ plural = "cows"
+ self.assertEqual(inf.countof(0, singular), "0 " + plural)
+ self.assertEqual(inf.countof(1, singular), "1 " + singular)
+ self.assertEqual(inf.countof(2, singular), "2 " + plural)
+ self.assertEqual(inf.countof(100, singular), "100 " + plural)
+
+ #=========================================================
+ #test articles
+ #=========================================================
+ oneof_cases = [
+
+ #soft H rule (the exception)
+ ("hourglass", "an hourglass"),
+
+ #hard H rule
+ ("horse", "a horse"),
+ ("hoe", "a hoe"),
+ ("house", "a house"),
+
+ #exceptions to the vowel rule
+ ("university", "a university"), #soft U
+ ("unicorn", "a unicorn"), #soft U
+
+ #normal vowels
+ ("avian", "an avian"),
+ ("avian planet", "an avian planet"), #catches a normalization bug
+ ("umbrella", "an umbrella"),
+
+ #normal consonants
+ ("car", "a car"),
+ ]
+ def test_oneof(self):
+ inf = self.inf
+ for input, output in self.oneof_cases:
+ result = inf.oneof(input)
+ self.assertEqual(result, output)
+
+ #=========================================================
+ #test ordinals
+ #=========================================================
+ ordinal_cases = [
+ (1, "1st"),
+ (2, "2nd"),
+ (3, "3rd"),
+ (5, "5th"),
+ (199, "199th"),
+ (1042, "1042nd")
+ ]
+ def test_ordinals(self):
+ inf = self.inf
+ self.assertRaises(ValueError, inf.ordinal, -1)
+ self.assertRaises(ValueError, inf.ordinal, 0)
+ for input, output in self.ordinal_cases:
+ result = inf.ordinal(input)
+ self.assertEqual(result, output)
+
+ #=========================================================
+ #helpers
+ #=========================================================
+ def check_pair(self, singular, plural):
+ "check a given pair translated back and forth"
+ self.check_pluralize(singular, plural)
+ self.check_singularize(plural, singular)
+
+ def check_pluralize(self, singular, plural):
+ test = self.inf.pluralize(singular)
+ self.assertEqual(test, plural, "Plural of %r is %r, not %r" % (singular, plural, test))
+
+ #TODO: alg isn't idempotent yet
+## test = self.inf.pluralize(plural)
+## self.assertEqual(test, plural, "Plural %r mishandled as %r" % (plural, test))
+
+ def check_singularize(self, plural, singular):
+ test = self.inf.singularize(plural)
+ self.assertEqual(test, singular, "Singular of %r is %r, not %r" % (plural, singular, test))
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+#=========================================================
+#format introspection tests
+#=========================================================
+class FormatTest(TestCase):
+
+ #TODO: test parse_fmt_string
+ #TODO: test that render_format() works
+
+ def test_parse_fmt_field(self):
+ #detect bug under 2.5 where we get 'None' back
+ self.check_parse_fmt_field("d","d")
+
+ #detect nested attr mode
+ self.check_parse_fmt_field("1[{2}]", 1, (False,"{2}"))
+ self.check_parse_fmt_field("1.{2}", 1, (True,"{2}"))
+
+ #check dup attrs raises error
+ h, t = parse_fmt_field("1..x")
+ self.assertEquals(h, 1, "head:")
+ self.assertRaises(ValueError, tuple, t)
+ #ValueError: Empty attribute in format string
+
+ #allow weird chars (this is what py26 does)
+ self.check_parse_fmt_field("1\x00", '1\x00')
+ self.check_parse_fmt_field("1.\x00", 1, (True,"\x00"))
+ self.check_parse_fmt_field("1[\x00]", 1, (False,"\x00"))
+
+ #not sure what right thing to do here is,
+ #but this is what py26 does
+ self.check_parse_fmt_field("1{2}", "1{2}")
+
+ def check_parse_fmt_field(self, source, head, *tail):
+ h, t = parse_fmt_field(source)
+ self.assertEquals(h,head,"head:")
+ self.assertIsNot(t,None,"tail:") #should always be iter
+ self.assertEquals(tuple(t),tail,"tail:")
+
+ def test_get_fmt_fields(self):
+ self.check_function_results(get_fmt_fields, [
+ (set([0, 'a', 'b', 'c', 'd']), '{0.1:{c}d} {a[{b}]} {d}'),
+
+ #this is what py2.6 does, so I guess it's right..
+ (set([0,1,2,'a{b}']), '{0} {1.{2}} {a{b}}'),
+ ])
+
+ def test_fmt_has_field(self):
+ self.check_function_results(fmt_has_field,[
+ #check numbers
+ (True, "{0} {1} {a:s}", 0),
+ (True, "{0} {1.{2}} {a:s} {b:{c}d}", 1),
+ (True, "{0} {1.{2}} {a:s} {b:{c}d}", 2),
+
+ #check letters
+ (True, "{0} {a:s}", 'a'),
+ (True, "{0} {1} {a:s} {b:{c}d}", 'b'),
+ (True, "{0} {1} {a:s} {b:{c}d}", 'c'),
+
+ #check stringified numbers
+ (False, "{0} {1} {b:d}", '0'),
+ (False, "{0} {1} {b:d}", '1'),
+ (False, "{0} {1} {b:d}", '2'),
+
+ #check missing numbers
+ (False, "{0} {1} {b:d}", 2),
+
+ #check missing letters
+ (False, "{0} {1} {a:s} {b:d}", 'x'),
+ (False, "{0} {1} {a:s} {b:{c}d}", 'd'),
+ ])
+
+#=========================================================
+#email
+#=========================================================
+from bps.unstable import parse_email_addr, compile_email_addr, validate_email_parts, norm_email_addr
+
+class ParseEmailTest(TestCase):
+ "test parse_email_addr()"
+
+ #=========================================================
+ #parse_email_addr
+ #=========================================================
+
+ #addrs that should always parse
+ valid_addrs = [
+ #check simple addrs & chars
+ ("abc@def", None, "abc", "def"),
+ ("abc@def", None, "abc", "def"),
+ ('abc+def@369.live.com', None, 'abc+def', '369.live.com'),
+ (u'Pel\xe9@live.com', None, u'Pel\xe9', u'live.com'),
+
+ #check name parser
+ ("Name <abc@def>", "Name", "abc", "def"),
+ ("John Jackson <abc@def>", "John Jackson", "abc", "def"),
+ ('"John Jackson" <abc@def>', "John Jackson", "abc", "def"),
+ ("'John Jackson' <abc@def>", "John Jackson", "abc", "def"),
+ ("N@me <abc@def>", "N@me", "abc", "def"), #this is questionable
+
+ #check periods in local part
+ ("a.b.c@def", None, "a.b.c","def"),
+
+ #check periods in domain
+ ("abc@def.com", None, "abc", "def.com"),
+ ("abc@def.com.", None, "abc", "def.com."),
+ ("abc@def.abc.com", None, "abc", "def.abc.com"),
+
+ #check hyphens in domain
+ ("abc@def-ghi", None, "abc", "def-ghi"),
+ ("abc@def-ghi.xmas-fun.", None, "abc", "def-ghi.xmas-fun."),
+ ]
+
+ #addrs that require strip=True to parse
+ strip_addrs = [
+ #should parse space between parts & at ends
+ (" user @ example.com ", None, "user","example.com"),
+
+ #should get space w/in brackets
+ ("Jeff Harris < user@example.com >", "Jeff Harris", "user","example.com"),
+
+ #should condense name part
+ (" Jeff Harris< user @ example.com >", "Jeff Harris", "user","example.com"),
+ ]
+
+ #addrs that require strict=False to parse
+ relaxed_addrs = [
+ #source, name, local, domain
+
+ #there only be one '@'
+ ("A@b@c@example.com", None, "A@b@c", "example.com"), # only one @ is allowed outside quotations marks
+ #NOTE: the fact that this parses as A@b@c / example.com, not A / b@c@example.com, is a border case whose behavior is not guaranteed
+
+ #local part can't have periods at start, end, or doubled
+ ("Abc.@example.com", None, "Abc.", "example.com"),
+ (" user. @ example.com ", None, "user.", "example.com"),
+ (" .user @ example.com ", None, ".user", "example.com"),
+ ("Abc..123@example.com", None, "Abc..123", "example.com"),
+ (" user..x @ example.com ", None, "user..x", "example.com"),
+
+ #local part can't have []
+ ("user[xxx]@def.eu", None, "user[xxx]","def.eu"),
+
+ #domain part can't have []
+ ("user@def[xxx].eu", None, "user","def[xxx].eu"),
+
+ #domain part can't have period at start, or doubled
+ ("user@.def.eu", None, "user",".def.eu"),
+ ("user@def..eu", None, "user","def..eu"),
+
+ #domain part can't have hypen at start or end of element
+ ("user@-def", None, "user", "-def"),
+ ("user@abc.-def", None, "user", "abc.-def"),
+ ("user@def-", None, "user", "def-"),
+ ("user@def-.ghi", None, "user", "def-.ghi"),
+
+ #invalid attrs (when not in unicode)
+ ('Pel\x01@live.com', None, 'Pel\x01', 'live.com'),
+ ('Pel@liv\x02.com', None, 'Pel', 'liv\x02.com'),
+ ]
+
+ #addrs that will always be rejected
+ invalid_addrs = [
+ #must at least have local & domain parts
+ " @ ",
+ "l@ ",
+ "@d",
+
+ #must have matching <> in correct spot
+ "n <l@d",
+ "n l@d>",
+ "n <l@d> x",
+ "<n> l@d",
+ "n> l@d",
+
+ #must have @
+ "Abc.example.com",
+ "jimmy abc.example.com",
+
+ #always invalid attrs
+ "xyz<>@example.com",
+ ]
+
+ #addrs that should all parse as empty addresses
+ empty_addrs = [
+ None,
+ "",
+ " \t ",
+ " <>",
+ "'' < > ",
+ ]
+
+ #test addrs that require clarify=True to parse
+ clarify_addrs = [
+ ("steven <jimmy at well dot net>", "steven", "jimmy","well.net"),
+ ("jimmy at well dot net", None, "jimmy","well.net"),
+ ("jimmy (at) well (dot) net", None, "jimmy","well.net"),
+ ("jimmy (at)well(dot) net", None, "jimmy","well.net"),
+ ("jimmy [at] well [dot] net", None, "jimmy","well.net"),
+ ]
+
+ def test_parse_valid(self):
+ "test parse_email_addr() with valid addresses"
+ for addr, name, local, domain in self.valid_addrs:
+ result = parse_email_addr(addr, strip=False)
+ self.assertEquals(result, (name,local,domain))
+
+ def test_parse_strip(self):
+ "test parse_email_addr() with valid addresses that require strip=True"
+ for addr, name, local, domain in self.strip_addrs:
+ self.assertRaises(ValueError, parse_email_addr, addr, strip=False)
+ result = parse_email_addr(addr)
+ self.assertEquals(result, (name,local,domain))
+
+ def test_parse_relaxed(self):
+ "test parse_email_addr() with valid addresses that require strict=False"
+ for addr, name, local, domain in self.relaxed_addrs:
+ self.assertRaises(ValueError, parse_email_addr, addr)
+ result = parse_email_addr(addr, strict=False)
+ self.assertEquals(result, (name,local,domain))
+
+ def test_parse_invalid(self):
+ "test parse_email_addr() with invalid addrs"
+ for addr in self.invalid_addrs:
+ self.assertRaises(ValueError, parse_email_addr, addr, strict=False)
+ self.assertRaises(ValueError, parse_email_addr, addr, strict=False, clarify=True)
+
+ def test_parse_clarify(self):
+ "test parse_email_addr() with valid addresses that require clarify=True"
+ for addr, name, local, domain in self.clarify_addrs:
+ self.assertRaises(ValueError, parse_email_addr, addr)
+ result = parse_email_addr(addr, clarify=True)
+ self.assertEquals(result, (name,local,domain))
+
+ def test_parse_empty(self):
+ "test parse_email_addr() with empty strings"
+ for value in self.empty_addrs:
+ self.assertRaises(ValueError, parse_email_addr, value)
+ result = parse_email_addr(value, allow_empty=True)
+ self.assertEquals(result,(None,None,None))
+
+class ValidateEmailTest(TestCase):
+ "test validate_email_parts()"
+
+ valid_parts = [
+ ("loc", "dom"),
+ ("loc","dom"),
+
+ ("a.b","dom"),
+ ("a.b.c","dom"),
+ ("ab+c","dom"),
+ ("ab-c","dom"),
+
+ ("loc","dom.dom"),
+ ("loc","dom.dom."),
+ ("loc","dom-ghi"),
+ ("loc","3com.com"),
+ ]
+
+ relaxed_parts = [
+ ("loc.", "dom"),
+ (".loc", "dom"),
+ ("loc..loc", "dom"),
+ ("loc loc", "dom"),
+
+ ("loc",".dom"),
+ ("loc","dom..dom"),
+ ("loc","dom-"),
+ ("loc","dom-.dom"),
+ ("loc","-dom"),
+ ("loc","dom.-dom"),
+ ]
+
+ invalid_parts = [
+ ("", "dom"),
+ ("loc", ""),
+ ]
+
+ def test_validate_valid(self):
+ "test validate_email_parts() against valid tuples"
+ n = None
+ for l,d in self.valid_parts:
+ validate_email_parts(n,l,d)
+
+ def test_validate_relaxed(self):
+ "test validate_email_parts() against valid tuples that require strict=False"
+ n = None
+ for l,d in self.relaxed_parts:
+ self.assertRaises(ValueError, validate_email_parts,n,l,d)
+ validate_email_parts(n,l,d, strict=False)
+
+ def test_validate_invalid(self):
+ "test validate_email_parts() against invalid tuples"
+ n = None
+ for l,d in self.invalid_parts:
+ self.assertRaises(ValueError, validate_email_parts,n,l,d)
+ self.assertRaises(ValueError, validate_email_parts,n,l,d, strict=False)
+
+class NormEmailTest(TestCase):
+ "test norm_email_addr()"
+
+ valid_addrs = [
+ ("user@local","user@local"),
+ (" user @ local ","user@local"),
+ ("<user@local>","user@local"),
+
+ ("Name J<user@local>",'"Name J" <user@local>'),
+ (" ' Name J ' <user@local>",'"Name J" <user@local>'),
+ ]
+
+ clarify_addrs =[
+ ("joe at cell dot net","joe@cell.net"),
+ ("joe (at) cell.net","joe@cell.net"),
+ ("joe (at) cell (dot) net","joe@cell.net"),
+ ]
+
+ relaxed_addrs = [
+ ("user..name@local", "user..name@local"),
+ ]
+
+ invalid_addrs = [
+ "user",
+ "user\x01@local",
+ ]
+
+ def test_norm_valid(self):
+ "test norm_email_addr() with valid addrs"
+ for value, real in self.valid_addrs:
+ result = norm_email_addr(value)
+ self.assertEquals(result, real)
+
+ def test_norm_relaxed(self):
+ "test norm_email_addr() with valid addrs which require strict=False"
+ for value, real in self.relaxed_addrs:
+ self.assertRaises(ValueError, norm_email_addr, value)
+ result = norm_email_addr(value, strict=False)
+ self.assertEquals(result, real)
+
+ def test_norm_invalid(self):
+ "test norm_email_addr() with invalid addrs"
+ for value in self.invalid_addrs:
+ self.assertRaises(ValueError, norm_email_addr, value)
+
+ def test_norm_clarify(self):
+ "test norm_email_addr() with obfucated addrs"
+ for value, real in self.clarify_addrs:
+ self.assertRaises(ValueError, norm_email_addr, value)
+ result = norm_email_addr(value, clarify=True)
+ self.assertEquals(result, real)
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_text_format.py b/bps/tests/test_text_format.py
new file mode 100644
index 0000000..ec7b10a
--- /dev/null
+++ b/bps/tests/test_text_format.py
@@ -0,0 +1,1118 @@
+"""tests for bps.text._string_format.
+
+Most of these tests are adapted from the python source,
+to make sure our custom implementation passes all the
+tests the real format() has to pass.
+"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import sys
+from functools import partial
+#site
+#pkg
+import bps.text.patch_format
+import bps.text._string_format as tsf
+#module
+from bps.types import stub
+from bps.meta import Params as ak
+from bps.tests.utils import TestCase
+#=========================================================
+#
+#=========================================================
+
+string = stub(Formatter=tsf.Formatter)
+
+#=========================================================
+#custom string.format tests
+#=========================================================
+class BaseClassTest(TestCase):
+ def setUp(self):
+ self.formatter = tsf.Formatter()
+
+ def test_formatter(self, format=None):
+ if format is None:
+ format = self.formatter.format
+ self.check_function_results(format, [
+ ak("1",
+ "{a}", a=1),
+ ak("1 2",
+ "{0} {a}", 1, a=2),
+ ak("This is a test of } 3e8 hex 3 a 200000{",
+ "This is a test of }} {0:x} {x} {y[2]} {2[2]} {1:5n}{{",
+ 1000, 200000, 'grag', x='hex', y=[1,2,3]),
+ ak("5 5 5 +5", "{0:n} {0: n} {0:-n} {0:+n}", 5),
+ ak("-5 -5 -5 -5", "{0:n} {0: n} {0:-n} {0:+n}", -5),
+ ])
+
+ if sys.version_info >= (2, 6):
+ def test_formatter_test(self):
+ "run test cases through python's builtin format, errors here mean test itself is wrong"
+ def wrapper(s, *a, **k):
+ return s.format(*a, **k)
+ self.test_formatter(wrapper)
+
+class StdlibTest(TestCase):
+ "this is a copy of Python 2.6.2's Formatter tests"
+
+ #from Lib/test/test_builtin line 1368
+ def test_format_builtin(self):
+ # Test the basic machinery of the format() builtin. Don't test
+ # the specifics of the various formatters
+ self.assertEqual(format(3, ''), '3')
+
+ # Returns some classes to use for various tests. There's
+ # an old-style version, and a new-style version
+ def classes_new():
+ class A(object):
+ def __init__(self, x):
+ self.x = x
+ def __format__(self, format_spec):
+ return str(self.x) + format_spec
+ class DerivedFromA(A):
+ pass
+
+ class Simple(object): pass
+ class DerivedFromSimple(Simple):
+ def __init__(self, x):
+ self.x = x
+ def __format__(self, format_spec):
+ return str(self.x) + format_spec
+ class DerivedFromSimple2(DerivedFromSimple): pass
+ return A, DerivedFromA, DerivedFromSimple, DerivedFromSimple2
+
+ # In 3.0, classes_classic has the same meaning as classes_new
+ def classes_classic():
+ class A:
+ def __init__(self, x):
+ self.x = x
+ def __format__(self, format_spec):
+ return str(self.x) + format_spec
+ class DerivedFromA(A):
+ pass
+
+ class Simple: pass
+ class DerivedFromSimple(Simple):
+ def __init__(self, x):
+ self.x = x
+ def __format__(self, format_spec):
+ return str(self.x) + format_spec
+ class DerivedFromSimple2(DerivedFromSimple): pass
+ return A, DerivedFromA, DerivedFromSimple, DerivedFromSimple2
+
+ def class_test(A, DerivedFromA, DerivedFromSimple, DerivedFromSimple2):
+ self.assertEqual(format(A(3), 'spec'), '3spec')
+ self.assertEqual(format(DerivedFromA(4), 'spec'), '4spec')
+ self.assertEqual(format(DerivedFromSimple(5), 'abc'), '5abc')
+ self.assertEqual(format(DerivedFromSimple2(10), 'abcdef'),
+ '10abcdef')
+
+ class_test(*classes_new())
+ class_test(*classes_classic())
+
+ def empty_format_spec(value):
+ # test that:
+ # format(x, '') == str(x)
+ # format(x) == str(x)
+ self.assertEqual(format(value, ""), str(value))
+ self.assertEqual(format(value), str(value))
+
+ # for builtin types, format(x, "") == str(x)
+ empty_format_spec(17**13)
+ empty_format_spec(1.0)
+ empty_format_spec(3.1415e104)
+ empty_format_spec(-3.1415e104)
+ empty_format_spec(3.1415e-104)
+ empty_format_spec(-3.1415e-104)
+ empty_format_spec(object)
+ empty_format_spec(None)
+
+ # TypeError because self.__format__ returns the wrong type
+ class BadFormatResult:
+ def __format__(self, format_spec):
+ return 1.0
+ self.assertRaises(TypeError, format, BadFormatResult(), "")
+
+ # TypeError because format_spec is not unicode or str
+ self.assertRaises(TypeError, format, object(), 4)
+ self.assertRaises(TypeError, format, object(), object())
+
+ # tests for object.__format__ really belong elsewhere, but
+ # there's no good place to put them
+ #JEC -- replaced below ##x = object().__format__('')
+ x = tsf.object_format(object(), '')
+ self.assert_(x.startswith('<object object at'))
+
+ # first argument to object.__format__ must be string
+ self.assertRaises(TypeError, partial(tsf.object_format, object()), 3)
+ self.assertRaises(TypeError, partial(tsf.object_format, object()), object())
+ self.assertRaises(TypeError, partial(tsf.object_format, object()), None)
+
+ # make sure we can take a subclass of str as a format spec
+ class DerivedFromStr(str): pass
+ self.assertEqual(format(0, DerivedFromStr('10')), ' 0')
+
+ # Lib/test/test_datetime, line 860
+## def test_format_date(self):
+## dt = self.theclass(2007, 9, 10)
+## self.assertEqual(dt.__format__(''), str(dt))
+##
+## # check that a derived class's __str__() gets called
+## class A(self.theclass):
+## def __str__(self):
+## return 'A'
+## a = A(2007, 9, 10)
+## self.assertEqual(a.__format__(''), 'A')
+##
+## # check that a derived class's strftime gets called
+## class B(self.theclass):
+## def strftime(self, format_spec):
+## return 'B'
+## b = B(2007, 9, 10)
+## self.assertEqual(b.__format__(''), str(dt))
+##
+## for fmt in ["m:%m d:%d y:%y",
+## "m:%m d:%d y:%y H:%H M:%M S:%S",
+## "%z %Z",
+## ]:
+## self.assertEqual(dt.__format__(fmt), dt.strftime(fmt))
+## self.assertEqual(a.__format__(fmt), dt.strftime(fmt))
+## self.assertEqual(b.__format__(fmt), 'B')
+
+ # Lib/test/test_datetime, line 1169
+## def test_format_datetime(self):
+## dt = self.theclass(2007, 9, 10, 4, 5, 1, 123)
+## self.assertEqual(dt.__format__(''), str(dt))
+##
+## # check that a derived class's __str__() gets called
+## class A(self.theclass):
+## def __str__(self):
+## return 'A'
+## a = A(2007, 9, 10, 4, 5, 1, 123)
+## self.assertEqual(a.__format__(''), 'A')
+##
+## # check that a derived class's strftime gets called
+## class B(self.theclass):
+## def strftime(self, format_spec):
+## return 'B'
+## b = B(2007, 9, 10, 4, 5, 1, 123)
+## self.assertEqual(b.__format__(''), str(dt))
+##
+## for fmt in ["m:%m d:%d y:%y",
+## "m:%m d:%d y:%y H:%H M:%M S:%S",
+## "%z %Z",
+## ]:
+## self.assertEqual(dt.__format__(fmt), dt.strftime(fmt))
+## self.assertEqual(a.__format__(fmt), dt.strftime(fmt))
+## self.assertEqual(b.__format__(fmt), 'B')
+
+ # Lib/test/test_datetime, line 1827
+## def test_format(self):
+## t = self.theclass(1, 2, 3, 4)
+## self.assertEqual(t.__format__(''), str(t))
+##
+## # check that a derived class's __str__() gets called
+## class A(self.theclass):
+## def __str__(self):
+## return 'A'
+## a = A(1, 2, 3, 4)
+## self.assertEqual(a.__format__(''), 'A')
+##
+## # check that a derived class's strftime gets called
+## class B(self.theclass):
+## def strftime(self, format_spec):
+## return 'B'
+## b = B(1, 2, 3, 4)
+## self.assertEqual(b.__format__(''), str(t))
+##
+## for fmt in ['%H %M %S',
+## ]:
+## self.assertEqual(t.__format__(fmt), t.strftime(fmt))
+## self.assertEqual(a.__format__(fmt), t.strftime(fmt))
+## self.assertEqual(b.__format__(fmt), 'B')
+
+
+ #from Lib/test/test_string line 109
+ def test_formatter_class(self):
+ fmt = string.Formatter()
+ self.assertEqual(fmt.format("foo"), "foo")
+
+ self.assertEqual(fmt.format("foo{0}", "bar"), "foobar")
+ self.assertEqual(fmt.format("foo{1}{0}-{1}", "bar", 6), "foo6bar-6")
+ self.assertEqual(fmt.format("-{arg!r}-", arg='test'), "-'test'-")
+
+ # override get_value ############################################
+ class NamespaceFormatter(string.Formatter):
+ def __init__(self, namespace={}):
+ string.Formatter.__init__(self)
+ self.namespace = namespace
+
+ def get_value(self, key, args, kwds):
+ if isinstance(key, str):
+ try:
+ # Check explicitly passed arguments first
+ return kwds[key]
+ except KeyError:
+ return self.namespace[key]
+ else:
+ string.Formatter.get_value(key, args, kwds)
+
+ fmt = NamespaceFormatter({'greeting':'hello'})
+ self.assertEqual(fmt.format("{greeting}, world!"), 'hello, world!')
+
+
+ # override format_field #########################################
+ class CallFormatter(string.Formatter):
+ def format_field(self, value, format_spec):
+ return format(value(), format_spec)
+
+ fmt = CallFormatter()
+ self.assertEqual(fmt.format('*{0}*', lambda : 'result'), '*result*')
+
+
+ # override convert_field ########################################
+ class XFormatter(string.Formatter):
+ def convert_field(self, value, conversion):
+ if conversion == 'x':
+ return None
+ return super(XFormatter, self).convert_field(value, conversion)
+
+ fmt = XFormatter()
+ self.assertEqual(fmt.format("{0!r}:{0!x}", 'foo', 'foo'), "'foo':None")
+
+
+ # override parse ################################################
+ class BarFormatter(string.Formatter):
+ # returns an iterable that contains tuples of the form:
+ # (literal_text, field_name, format_spec, conversion)
+ def parse(self, format_string):
+ for field in format_string.split('|'):
+ if field[0] == '+':
+ # it's markup
+ field_name, _, format_spec = field[1:].partition(':')
+ yield '', field_name, format_spec, None
+ else:
+ yield field, None, None, None
+
+ fmt = BarFormatter()
+ self.assertEqual(fmt.format('*|+0:^10s|*', 'foo'), '* foo *')
+
+ # test all parameters used
+ class CheckAllUsedFormatter(string.Formatter):
+ def check_unused_args(self, used_args, args, kwargs):
+ # Track which arguments actuallly got used
+ unused_args = set(kwargs.keys())
+ unused_args.update(range(0, len(args)))
+
+ for arg in used_args:
+ unused_args.remove(arg)
+
+ if unused_args:
+ raise ValueError("unused arguments")
+
+ fmt = CheckAllUsedFormatter()
+ self.assertEqual(fmt.format("{0}", 10), "10")
+ self.assertEqual(fmt.format("{0}{i}", 10, i=100), "10100")
+ self.assertEqual(fmt.format("{0}{i}{1}", 10, 20, i=100), "1010020")
+ self.assertRaises(ValueError, fmt.format, "{0}{i}{1}", 10, 20, i=100, j=0)
+ self.assertRaises(ValueError, fmt.format, "{0}", 10, 20)
+ self.assertRaises(ValueError, fmt.format, "{0}", 10, 20, i=100)
+ self.assertRaises(ValueError, fmt.format, "{i}", 10, 20, i=100)
+
+ # Alternate formatting is not supported
+ self.assertRaises(ValueError, format, '', '#')
+ self.assertRaises(ValueError, format, '', '#20')
+
+ #from Lib/test/test_str, line 141
+ def test_format(self):
+ self.assertEqual(''.format(), '')
+ self.assertEqual('a'.format(), 'a')
+ self.assertEqual('ab'.format(), 'ab')
+ self.assertEqual('a{{'.format(), 'a{')
+ self.assertEqual('a}}'.format(), 'a}')
+ self.assertEqual('{{b'.format(), '{b')
+ self.assertEqual('}}b'.format(), '}b')
+ self.assertEqual('a{{b'.format(), 'a{b')
+
+ # examples from the PEP:
+ import datetime
+ self.assertEqual("My name is {0}".format('Fred'), "My name is Fred")
+ self.assertEqual("My name is {0[name]}".format(dict(name='Fred')),
+ "My name is Fred")
+ self.assertEqual("My name is {0} :-{{}}".format('Fred'),
+ "My name is Fred :-{}")
+
+ d = datetime.date(2007, 8, 18)
+ self.assertEqual("The year is {0.year}".format(d),
+ "The year is 2007")
+
+ # classes we'll use for testing
+ class C:
+ def __init__(self, x=100):
+ self._x = x
+ def __format__(self, spec):
+ return spec
+
+ class D:
+ def __init__(self, x):
+ self.x = x
+ def __format__(self, spec):
+ return str(self.x)
+
+ # class with __str__, but no __format__
+ class E:
+ def __init__(self, x):
+ self.x = x
+ def __str__(self):
+ return 'E(' + self.x + ')'
+
+ # class with __repr__, but no __format__ or __str__
+ class F:
+ def __init__(self, x):
+ self.x = x
+ def __repr__(self):
+ return 'F(' + self.x + ')'
+
+ # class with __format__ that forwards to string, for some format_spec's
+ class G:
+ def __init__(self, x):
+ self.x = x
+ def __str__(self):
+ return "string is " + self.x
+ def __format__(self, format_spec):
+ if format_spec == 'd':
+ return 'G(' + self.x + ')'
+ #jec - replaced below
+ return tsf.object_format(self, format_spec)
+
+ # class that returns a bad type from __format__
+ class H:
+ def __format__(self, format_spec):
+ return 1.0
+
+ class I(datetime.date):
+ def __format__(self, format_spec):
+ return self.strftime(format_spec)
+
+ class J(int):
+ def __format__(self, format_spec):
+ #jec -- replaced below
+ return tsf.int_format(self * 2, format_spec)
+
+
+ self.assertEqual(''.format(), '')
+ self.assertEqual('abc'.format(), 'abc')
+ self.assertEqual('{0}'.format('abc'), 'abc')
+ self.assertEqual('{0:}'.format('abc'), 'abc')
+ self.assertEqual('X{0}'.format('abc'), 'Xabc')
+ self.assertEqual('{0}X'.format('abc'), 'abcX')
+ self.assertEqual('X{0}Y'.format('abc'), 'XabcY')
+ self.assertEqual('{1}'.format(1, 'abc'), 'abc')
+ self.assertEqual('X{1}'.format(1, 'abc'), 'Xabc')
+ self.assertEqual('{1}X'.format(1, 'abc'), 'abcX')
+ self.assertEqual('X{1}Y'.format(1, 'abc'), 'XabcY')
+ self.assertEqual('{0}'.format(-15), '-15')
+ self.assertEqual('{0}{1}'.format(-15, 'abc'), '-15abc')
+ self.assertEqual('{0}X{1}'.format(-15, 'abc'), '-15Xabc')
+ self.assertEqual('{{'.format(), '{')
+ self.assertEqual('}}'.format(), '}')
+ self.assertEqual('{{}}'.format(), '{}')
+ self.assertEqual('{{x}}'.format(), '{x}')
+ self.assertEqual('{{{0}}}'.format(123), '{123}')
+ self.assertEqual('{{{{0}}}}'.format(), '{{0}}')
+ self.assertEqual('}}{{'.format(), '}{')
+ self.assertEqual('}}x{{'.format(), '}x{')
+
+ # weird field names
+ self.assertEqual("{0[foo-bar]}".format({'foo-bar':'baz'}), 'baz')
+ self.assertEqual("{0[foo bar]}".format({'foo bar':'baz'}), 'baz')
+ self.assertEqual("{0[ ]}".format({' ':3}), '3')
+
+ self.assertEqual('{foo._x}'.format(foo=C(20)), '20')
+ self.assertEqual('{1}{0}'.format(D(10), D(20)), '2010')
+ self.assertEqual('{0._x.x}'.format(C(D('abc'))), 'abc')
+ self.assertEqual('{0[0]}'.format(['abc', 'def']), 'abc')
+ self.assertEqual('{0[1]}'.format(['abc', 'def']), 'def')
+ self.assertEqual('{0[1][0]}'.format(['abc', ['def']]), 'def')
+ self.assertEqual('{0[1][0].x}'.format(['abc', [D('def')]]), 'def')
+
+ # strings
+ self.assertEqual('{0:.3s}'.format('abc'), 'abc')
+ self.assertEqual('{0:.3s}'.format('ab'), 'ab')
+ self.assertEqual('{0:.3s}'.format('abcdef'), 'abc')
+ self.assertEqual('{0:.0s}'.format('abcdef'), '')
+ self.assertEqual('{0:3.3s}'.format('abc'), 'abc')
+ self.assertEqual('{0:2.3s}'.format('abc'), 'abc')
+ self.assertEqual('{0:2.2s}'.format('abc'), 'ab')
+ self.assertEqual('{0:3.2s}'.format('abc'), 'ab ')
+ self.assertEqual('{0:x<0s}'.format('result'), 'result')
+ self.assertEqual('{0:x<5s}'.format('result'), 'result')
+ self.assertEqual('{0:x<6s}'.format('result'), 'result')
+ self.assertEqual('{0:x<7s}'.format('result'), 'resultx')
+ self.assertEqual('{0:x<8s}'.format('result'), 'resultxx')
+ self.assertEqual('{0: <7s}'.format('result'), 'result ')
+ self.assertEqual('{0:<7s}'.format('result'), 'result ')
+ self.assertEqual('{0:>7s}'.format('result'), ' result')
+ self.assertEqual('{0:>8s}'.format('result'), ' result')
+ self.assertEqual('{0:^8s}'.format('result'), ' result ')
+ self.assertEqual('{0:^9s}'.format('result'), ' result ')
+ self.assertEqual('{0:^10s}'.format('result'), ' result ')
+ self.assertEqual('{0:10000}'.format('a'), 'a' + ' ' * 9999)
+ self.assertEqual('{0:10000}'.format(''), ' ' * 10000)
+ self.assertEqual('{0:10000000}'.format(''), ' ' * 10000000)
+
+ # format specifiers for user defined type
+ self.assertEqual('{0:abc}'.format(C()), 'abc')
+
+ # !r and !s coersions
+ self.assertEqual('{0!s}'.format('Hello'), 'Hello')
+ self.assertEqual('{0!s:}'.format('Hello'), 'Hello')
+ self.assertEqual('{0!s:15}'.format('Hello'), 'Hello ')
+ self.assertEqual('{0!s:15s}'.format('Hello'), 'Hello ')
+ self.assertEqual('{0!r}'.format('Hello'), "'Hello'")
+ self.assertEqual('{0!r:}'.format('Hello'), "'Hello'")
+ self.assertEqual('{0!r}'.format(F('Hello')), 'F(Hello)')
+
+ # test fallback to object.__format__
+ self.assertEqual('{0}'.format({}), '{}')
+ self.assertEqual('{0}'.format([]), '[]')
+ self.assertEqual('{0}'.format([1]), '[1]')
+ self.assertEqual('{0}'.format(E('data')), 'E(data)')
+ self.assertEqual('{0:^10}'.format(E('data')), ' E(data) ')
+ self.assertEqual('{0:^10s}'.format(E('data')), ' E(data) ')
+ self.assertEqual('{0:d}'.format(G('data')), 'G(data)')
+ self.assertEqual('{0:>15s}'.format(G('data')), ' string is data')
+ self.assertEqual('{0!s}'.format(G('data')), 'string is data')
+
+ self.assertEqual("{0:date: %Y-%m-%d}".format(I(year=2007,
+ month=8,
+ day=27)),
+ "date: 2007-08-27")
+
+ # test deriving from a builtin type and overriding __format__
+ self.assertEqual("{0}".format(J(10)), "20")
+
+
+ # string format specifiers
+ self.assertEqual('{0:}'.format('a'), 'a')
+
+ # computed format specifiers
+ self.assertEqual("{0:.{1}}".format('hello world', 5), 'hello')
+ self.assertEqual("{0:.{1}s}".format('hello world', 5), 'hello')
+ self.assertEqual("{0:.{precision}s}".format('hello world', precision=5), 'hello')
+ self.assertEqual("{0:{width}.{precision}s}".format('hello world', width=10, precision=5), 'hello ')
+ self.assertEqual("{0:{width}.{precision}s}".format('hello world', width='10', precision='5'), 'hello ')
+
+ # test various errors
+ self.assertRaises(ValueError, '{'.format)
+ self.assertRaises(ValueError, '}'.format)
+ self.assertRaises(ValueError, 'a{'.format)
+ self.assertRaises(ValueError, 'a}'.format)
+ self.assertRaises(ValueError, '{a'.format)
+ self.assertRaises(ValueError, '}a'.format)
+ self.assertRaises(IndexError, '{0}'.format)
+ self.assertRaises(IndexError, '{1}'.format, 'abc')
+ self.assertRaises(KeyError, '{x}'.format)
+ self.assertRaises(ValueError, "}{".format)
+ self.assertRaises(ValueError, "{".format)
+ self.assertRaises(ValueError, "}".format)
+ self.assertRaises(ValueError, "abc{0:{}".format)
+ self.assertRaises(ValueError, "{0".format)
+ self.assertRaises(IndexError, "{0.}".format)
+ self.assertRaises(ValueError, "{0.}".format, 0)
+ self.assertRaises(IndexError, "{0[}".format)
+ self.assertRaises(ValueError, "{0[}".format, [])
+ self.assertRaises(KeyError, "{0]}".format)
+ self.assertRaises(ValueError, "{0.[]}".format, 0)
+ self.assertRaises(ValueError, "{0..foo}".format, 0)
+ self.assertRaises(ValueError, "{0[0}".format, 0)
+ self.assertRaises(ValueError, "{0[0:foo}".format, 0)
+ self.assertRaises(KeyError, "{c]}".format)
+ self.assertRaises(ValueError, "{{ {{{0}}".format, 0)
+ self.assertRaises(ValueError, "{0}}".format, 0)
+ self.assertRaises(KeyError, "{foo}".format, bar=3)
+ self.assertRaises(ValueError, "{0!x}".format, 3)
+ self.assertRaises(ValueError, "{0!}".format, 0)
+ self.assertRaises(ValueError, "{0!rs}".format, 0)
+ self.assertRaises(ValueError, "{!}".format)
+ self.assertRaises(ValueError, "{:}".format)
+ self.assertRaises(ValueError, "{:s}".format)
+ self.assertRaises(ValueError, "{}".format)
+
+ # can't have a replacement on the field name portion
+ self.assertRaises(TypeError, '{0[{1}]}'.format, 'abcdefg', 4)
+
+ # exceed maximum recursion depth
+ self.assertRaises(ValueError, "{0:{1:{2}}}".format, 'abc', 's', '')
+ self.assertRaises(ValueError, "{0:{1:{2:{3:{4:{5:{6}}}}}}}".format,
+ 0, 1, 2, 3, 4, 5, 6, 7)
+
+ # string format spec errors
+ self.assertRaises(ValueError, "{0:-s}".format, '')
+ self.assertRaises(ValueError, format, "", "-")
+ self.assertRaises(ValueError, "{0:=s}".format, '')
+
+ # Lib/test/test_types, line 93
+ def test_float_to_string(self):
+ def test(f, result):
+ self.assertEqual(tsf.float_format(f,'e'), result)
+ self.assertEqual('%e' % f, result)
+
+ # test all 2 digit exponents, both with __format__ and with
+ # '%' formatting
+ for i in range(-99, 100):
+ test(float('1.5e'+str(i)), '1.500000e{0:+03d}'.format(i))
+
+ # test some 3 digit exponents
+ self.assertEqual(tsf.float_format(1.5e100, 'e'), '1.500000e+100')
+ self.assertEqual('%e' % 1.5e100, '1.500000e+100')
+
+ self.assertEqual(tsf.float_format(1.5e101, 'e'), '1.500000e+101')
+ self.assertEqual('%e' % 1.5e101, '1.500000e+101')
+
+ self.assertEqual(tsf.float_format(1.5e-100, 'e'), '1.500000e-100')
+ self.assertEqual('%e' % 1.5e-100, '1.500000e-100')
+
+ self.assertEqual(tsf.float_format(1.5e-101, 'e'), '1.500000e-101')
+ self.assertEqual('%e' % 1.5e-101, '1.500000e-101')
+
+ # Lib/test/test_unicode, line 880
+ def test_format_unicode(self):
+ self.assertEqual(u''.format(), u'')
+ self.assertEqual(u'a'.format(), u'a')
+ self.assertEqual(u'ab'.format(), u'ab')
+ self.assertEqual(u'a{{'.format(), u'a{')
+ self.assertEqual(u'a}}'.format(), u'a}')
+ self.assertEqual(u'{{b'.format(), u'{b')
+ self.assertEqual(u'}}b'.format(), u'}b')
+ self.assertEqual(u'a{{b'.format(), u'a{b')
+
+ # examples from the PEP:
+ import datetime
+ self.assertEqual(u"My name is {0}".format(u'Fred'), u"My name is Fred")
+ self.assertEqual(u"My name is {0[name]}".format(dict(name=u'Fred')),
+ u"My name is Fred")
+ self.assertEqual(u"My name is {0} :-{{}}".format(u'Fred'),
+ u"My name is Fred :-{}")
+
+ # datetime.__format__ doesn't work with unicode
+ #d = datetime.date(2007, 8, 18)
+ #self.assertEqual("The year is {0.year}".format(d),
+ # "The year is 2007")
+
+ # classes we'll use for testing
+ class C:
+ def __init__(self, x=100):
+ self._x = x
+ def __format__(self, spec):
+ return spec
+
+ class D:
+ def __init__(self, x):
+ self.x = x
+ def __format__(self, spec):
+ return str(self.x)
+
+ # class with __str__, but no __format__
+ class E:
+ def __init__(self, x):
+ self.x = x
+ def __str__(self):
+ return u'E(' + self.x + u')'
+
+ # class with __repr__, but no __format__ or __str__
+ class F:
+ def __init__(self, x):
+ self.x = x
+ def __repr__(self):
+ return u'F(' + self.x + u')'
+
+ # class with __format__ that forwards to string, for some format_spec's
+ class G:
+ def __init__(self, x):
+ self.x = x
+ def __str__(self):
+ return u"string is " + self.x
+ def __format__(self, format_spec):
+ if format_spec == 'd':
+ return u'G(' + self.x + u')'
+ return tsf.object_format(self, format_spec)
+
+ # class that returns a bad type from __format__
+ class H:
+ def __format__(self, format_spec):
+ return 1.0
+
+ class I(datetime.date):
+ def __format__(self, format_spec):
+ return self.strftime(format_spec)
+
+ class J(int):
+ def __format__(self, format_spec):
+ return tsf.int_format(self * 2, format_spec)
+
+
+ self.assertEqual(u''.format(), u'')
+ self.assertEqual(u'abc'.format(), u'abc')
+ self.assertEqual(u'{0}'.format(u'abc'), u'abc')
+ self.assertEqual(u'{0:}'.format(u'abc'), u'abc')
+ self.assertEqual(u'X{0}'.format(u'abc'), u'Xabc')
+ self.assertEqual(u'{0}X'.format(u'abc'), u'abcX')
+ self.assertEqual(u'X{0}Y'.format(u'abc'), u'XabcY')
+ self.assertEqual(u'{1}'.format(1, u'abc'), u'abc')
+ self.assertEqual(u'X{1}'.format(1, u'abc'), u'Xabc')
+ self.assertEqual(u'{1}X'.format(1, u'abc'), u'abcX')
+ self.assertEqual(u'X{1}Y'.format(1, u'abc'), u'XabcY')
+ self.assertEqual(u'{0}'.format(-15), u'-15')
+ self.assertEqual(u'{0}{1}'.format(-15, u'abc'), u'-15abc')
+ self.assertEqual(u'{0}X{1}'.format(-15, u'abc'), u'-15Xabc')
+ self.assertEqual(u'{{'.format(), u'{')
+ self.assertEqual(u'}}'.format(), u'}')
+ self.assertEqual(u'{{}}'.format(), u'{}')
+ self.assertEqual(u'{{x}}'.format(), u'{x}')
+ self.assertEqual(u'{{{0}}}'.format(123), u'{123}')
+ self.assertEqual(u'{{{{0}}}}'.format(), u'{{0}}')
+ self.assertEqual(u'}}{{'.format(), u'}{')
+ self.assertEqual(u'}}x{{'.format(), u'}x{')
+
+ # weird field names
+ self.assertEqual(u"{0[foo-bar]}".format({u'foo-bar':u'baz'}), u'baz')
+ self.assertEqual(u"{0[foo bar]}".format({u'foo bar':u'baz'}), u'baz')
+ self.assertEqual(u"{0[ ]}".format({u' ':3}), u'3')
+
+ self.assertEqual(u'{foo._x}'.format(foo=C(20)), u'20')
+ self.assertEqual(u'{1}{0}'.format(D(10), D(20)), u'2010')
+ self.assertEqual(u'{0._x.x}'.format(C(D(u'abc'))), u'abc')
+ self.assertEqual(u'{0[0]}'.format([u'abc', u'def']), u'abc')
+ self.assertEqual(u'{0[1]}'.format([u'abc', u'def']), u'def')
+ self.assertEqual(u'{0[1][0]}'.format([u'abc', [u'def']]), u'def')
+ self.assertEqual(u'{0[1][0].x}'.format(['abc', [D(u'def')]]), u'def')
+
+ # strings
+ self.assertEqual(u'{0:.3s}'.format(u'abc'), u'abc')
+ self.assertEqual(u'{0:.3s}'.format(u'ab'), u'ab')
+ self.assertEqual(u'{0:.3s}'.format(u'abcdef'), u'abc')
+ self.assertEqual(u'{0:.0s}'.format(u'abcdef'), u'')
+ self.assertEqual(u'{0:3.3s}'.format(u'abc'), u'abc')
+ self.assertEqual(u'{0:2.3s}'.format(u'abc'), u'abc')
+ self.assertEqual(u'{0:2.2s}'.format(u'abc'), u'ab')
+ self.assertEqual(u'{0:3.2s}'.format(u'abc'), u'ab ')
+ self.assertEqual(u'{0:x<0s}'.format(u'result'), u'result')
+ self.assertEqual(u'{0:x<5s}'.format(u'result'), u'result')
+ self.assertEqual(u'{0:x<6s}'.format(u'result'), u'result')
+ self.assertEqual(u'{0:x<7s}'.format(u'result'), u'resultx')
+ self.assertEqual(u'{0:x<8s}'.format(u'result'), u'resultxx')
+ self.assertEqual(u'{0: <7s}'.format(u'result'), u'result ')
+ self.assertEqual(u'{0:<7s}'.format(u'result'), u'result ')
+ self.assertEqual(u'{0:>7s}'.format(u'result'), u' result')
+ self.assertEqual(u'{0:>8s}'.format(u'result'), u' result')
+ self.assertEqual(u'{0:^8s}'.format(u'result'), u' result ')
+ self.assertEqual(u'{0:^9s}'.format(u'result'), u' result ')
+ self.assertEqual(u'{0:^10s}'.format(u'result'), u' result ')
+ self.assertEqual(u'{0:10000}'.format(u'a'), u'a' + u' ' * 9999)
+ self.assertEqual(u'{0:10000}'.format(u''), u' ' * 10000)
+ self.assertEqual(u'{0:10000000}'.format(u''), u' ' * 10000000)
+
+ # format specifiers for user defined type
+ self.assertEqual(u'{0:abc}'.format(C()), u'abc')
+
+ # !r and !s coersions
+ self.assertEqual(u'{0!s}'.format(u'Hello'), u'Hello')
+ self.assertEqual(u'{0!s:}'.format(u'Hello'), u'Hello')
+ self.assertEqual(u'{0!s:15}'.format(u'Hello'), u'Hello ')
+ self.assertEqual(u'{0!s:15s}'.format(u'Hello'), u'Hello ')
+ self.assertEqual(u'{0!r}'.format(u'Hello'), u"u'Hello'")
+ self.assertEqual(u'{0!r:}'.format(u'Hello'), u"u'Hello'")
+ self.assertEqual(u'{0!r}'.format(F(u'Hello')), u'F(Hello)')
+
+ # test fallback to object.__format__
+ self.assertEqual(u'{0}'.format({}), u'{}')
+ self.assertEqual(u'{0}'.format([]), u'[]')
+ self.assertEqual(u'{0}'.format([1]), u'[1]')
+ self.assertEqual(u'{0}'.format(E(u'data')), u'E(data)')
+ self.assertEqual(u'{0:^10}'.format(E(u'data')), u' E(data) ')
+ self.assertEqual(u'{0:^10s}'.format(E(u'data')), u' E(data) ')
+ self.assertEqual(u'{0:d}'.format(G(u'data')), u'G(data)')
+ self.assertEqual(u'{0:>15s}'.format(G(u'data')), u' string is data')
+ self.assertEqual(u'{0!s}'.format(G(u'data')), u'string is data')
+
+ self.assertEqual("{0:date: %Y-%m-%d}".format(I(year=2007,
+ month=8,
+ day=27)),
+ "date: 2007-08-27")
+
+ # test deriving from a builtin type and overriding __format__
+ self.assertEqual("{0}".format(J(10)), "20")
+
+
+ # string format specifiers
+ self.assertEqual('{0:}'.format('a'), 'a')
+
+ # computed format specifiers
+ self.assertEqual("{0:.{1}}".format('hello world', 5), 'hello')
+ self.assertEqual("{0:.{1}s}".format('hello world', 5), 'hello')
+ self.assertEqual("{0:.{precision}s}".format('hello world', precision=5), 'hello')
+ self.assertEqual("{0:{width}.{precision}s}".format('hello world', width=10, precision=5), 'hello ')
+ self.assertEqual("{0:{width}.{precision}s}".format('hello world', width='10', precision='5'), 'hello ')
+
+ # test various errors
+ self.assertRaises(ValueError, '{'.format)
+ self.assertRaises(ValueError, '}'.format)
+ self.assertRaises(ValueError, 'a{'.format)
+ self.assertRaises(ValueError, 'a}'.format)
+ self.assertRaises(ValueError, '{a'.format)
+ self.assertRaises(ValueError, '}a'.format)
+ self.assertRaises(IndexError, '{0}'.format)
+ self.assertRaises(IndexError, '{1}'.format, 'abc')
+ self.assertRaises(KeyError, '{x}'.format)
+ self.assertRaises(ValueError, "}{".format)
+ self.assertRaises(ValueError, "{".format)
+ self.assertRaises(ValueError, "}".format)
+ self.assertRaises(ValueError, "abc{0:{}".format)
+ self.assertRaises(ValueError, "{0".format)
+ self.assertRaises(IndexError, "{0.}".format)
+ self.assertRaises(ValueError, "{0.}".format, 0)
+ self.assertRaises(IndexError, "{0[}".format)
+ self.assertRaises(ValueError, "{0[}".format, [])
+ self.assertRaises(KeyError, "{0]}".format)
+ self.assertRaises(ValueError, "{0.[]}".format, 0)
+ self.assertRaises(ValueError, "{0..foo}".format, 0)
+ self.assertRaises(ValueError, "{0[0}".format, 0)
+ self.assertRaises(ValueError, "{0[0:foo}".format, 0)
+ self.assertRaises(KeyError, "{c]}".format)
+ self.assertRaises(ValueError, "{{ {{{0}}".format, 0)
+ self.assertRaises(ValueError, "{0}}".format, 0)
+ self.assertRaises(KeyError, "{foo}".format, bar=3)
+ self.assertRaises(ValueError, "{0!x}".format, 3)
+ self.assertRaises(ValueError, "{0!}".format, 0)
+ self.assertRaises(ValueError, "{0!rs}".format, 0)
+ self.assertRaises(ValueError, "{!}".format)
+ self.assertRaises(ValueError, "{:}".format)
+ self.assertRaises(ValueError, "{:s}".format)
+ self.assertRaises(ValueError, "{}".format)
+
+ # can't have a replacement on the field name portion
+ self.assertRaises(TypeError, '{0[{1}]}'.format, 'abcdefg', 4)
+
+ # exceed maximum recursion depth
+ self.assertRaises(ValueError, "{0:{1:{2}}}".format, 'abc', 's', '')
+ self.assertRaises(ValueError, "{0:{1:{2:{3:{4:{5:{6}}}}}}}".format,
+ 0, 1, 2, 3, 4, 5, 6, 7)
+
+ # string format spec errors
+ self.assertRaises(ValueError, "{0:-s}".format, '')
+ self.assertRaises(ValueError, format, "", "-")
+ self.assertRaises(ValueError, "{0:=s}".format, '')
+
+ # test combining string and unicode
+ self.assertEqual(u"foo{0}".format('bar'), u'foobar')
+ # This will try to convert the argument from unicode to str, which
+ # will succeed
+ self.assertEqual("foo{0}".format(u'bar'), 'foobar')
+ # This will try to convert the argument from unicode to str, which
+ # will fail
+ self.assertRaises(UnicodeEncodeError, "foo{0}".format, u'\u1000bar')
+
+
+#=========================================================
+# Lib/test/test_format.py, in it's entirety
+#=========================================================
+
+import sys
+verbose = False
+have_unicode = True
+TestFailed = AssertionError
+##from test.test_support import verbose, have_unicode, TestFailed
+##import test.test_support as test_support
+import unittest
+
+##maxsize = test_support.MAX_Py_ssize_t
+maxsize = 1<<32
+
+# test string formatting operator (I am not sure if this is being tested
+# elsewhere but, surely, some of the given cases are *not* tested because
+# they crash python)
+# test on unicode strings as well
+
+overflowok = 1
+overflowrequired = 0
+
+def checkformat(formatstr, args, output=None, limit=None):
+ if verbose:
+ if output:
+ print "%s %% %s =? %s ..." %\
+ (repr(formatstr), repr(args), repr(output)),
+ else:
+ print "%s %% %s works? ..." % (repr(formatstr), repr(args)),
+ try:
+ result = formatstr % args
+ except OverflowError:
+ if not overflowok:
+ raise
+ if verbose:
+ print 'overflow (this is fine)'
+ else:
+ if overflowrequired:
+ if verbose:
+ print 'no'
+ print "overflow expected on %s %% %s" % \
+ (repr(formatstr), repr(args))
+ elif output and limit is None and result != output:
+ if verbose:
+ print 'no'
+ print "%s %% %s == %s != %s" % \
+ (repr(formatstr), repr(args), repr(result), repr(output))
+ # when 'limit' is specified, it determines how many characters
+ # must match exactly; lengths must always match.
+ # ex: limit=5, '12345678' matches '12345___'
+ # (mainly for floating point format tests for which an exact match
+ # can't be guaranteed due to rounding and representation errors)
+ elif output and limit is not None and (
+ len(result)!=len(output) or result[:limit]!=output[:limit]):
+ if verbose:
+ print 'no'
+ print "%s %% %s == %s != %s" % \
+ (repr(formatstr), repr(args), repr(result), repr(output))
+ else:
+ if verbose:
+ print 'yes'
+
+def checkboth(formatstr, *args):
+ checkformat(formatstr, *args)
+ if have_unicode:
+ checkformat(unicode(formatstr), *args)
+
+class FormatTest(unittest.TestCase):
+ def test_format(self):
+ checkboth("%.1d", (1,), "1")
+ checkboth("%.*d", (sys.maxint,1)) # expect overflow
+ checkboth("%.100d", (1,), '0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001')
+ checkboth("%#.117x", (1,), '0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001')
+ checkboth("%#.118x", (1,), '0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001')
+
+ checkboth("%f", (1.0,), "1.000000")
+ # these are trying to test the limits of the internal magic-number-length
+ # formatting buffer, if that number changes then these tests are less
+ # effective
+ checkboth("%#.*g", (109, -1.e+49/3.))
+ checkboth("%#.*g", (110, -1.e+49/3.))
+ checkboth("%#.*g", (110, -1.e+100/3.))
+
+ # test some ridiculously large precision, expect overflow
+ checkboth('%12.*f', (123456, 1.0))
+
+ # check for internal overflow validation on length of precision
+ overflowrequired = 1
+ checkboth("%#.*g", (110, -1.e+100/3.))
+ checkboth("%#.*G", (110, -1.e+100/3.))
+ checkboth("%#.*f", (110, -1.e+100/3.))
+ checkboth("%#.*F", (110, -1.e+100/3.))
+ overflowrequired = 0
+
+ # Formatting of long integers. Overflow is not ok
+ overflowok = 0
+ checkboth("%x", 10L, "a")
+ checkboth("%x", 100000000000L, "174876e800")
+ checkboth("%o", 10L, "12")
+ checkboth("%o", 100000000000L, "1351035564000")
+ checkboth("%d", 10L, "10")
+ checkboth("%d", 100000000000L, "100000000000")
+
+ big = 123456789012345678901234567890L
+ checkboth("%d", big, "123456789012345678901234567890")
+ checkboth("%d", -big, "-123456789012345678901234567890")
+ checkboth("%5d", -big, "-123456789012345678901234567890")
+ checkboth("%31d", -big, "-123456789012345678901234567890")
+ checkboth("%32d", -big, " -123456789012345678901234567890")
+ checkboth("%-32d", -big, "-123456789012345678901234567890 ")
+ checkboth("%032d", -big, "-0123456789012345678901234567890")
+ checkboth("%-032d", -big, "-123456789012345678901234567890 ")
+ checkboth("%034d", -big, "-000123456789012345678901234567890")
+ checkboth("%034d", big, "0000123456789012345678901234567890")
+ checkboth("%0+34d", big, "+000123456789012345678901234567890")
+ checkboth("%+34d", big, " +123456789012345678901234567890")
+ checkboth("%34d", big, " 123456789012345678901234567890")
+ checkboth("%.2d", big, "123456789012345678901234567890")
+ checkboth("%.30d", big, "123456789012345678901234567890")
+ checkboth("%.31d", big, "0123456789012345678901234567890")
+ checkboth("%32.31d", big, " 0123456789012345678901234567890")
+ #python 2.5 chokes on this...
+## checkboth("%d", float(big), "123456________________________", 6)
+
+ big = 0x1234567890abcdef12345L # 21 hex digits
+ checkboth("%x", big, "1234567890abcdef12345")
+ checkboth("%x", -big, "-1234567890abcdef12345")
+ checkboth("%5x", -big, "-1234567890abcdef12345")
+ checkboth("%22x", -big, "-1234567890abcdef12345")
+ checkboth("%23x", -big, " -1234567890abcdef12345")
+ checkboth("%-23x", -big, "-1234567890abcdef12345 ")
+ checkboth("%023x", -big, "-01234567890abcdef12345")
+ checkboth("%-023x", -big, "-1234567890abcdef12345 ")
+ checkboth("%025x", -big, "-0001234567890abcdef12345")
+ checkboth("%025x", big, "00001234567890abcdef12345")
+ checkboth("%0+25x", big, "+0001234567890abcdef12345")
+ checkboth("%+25x", big, " +1234567890abcdef12345")
+ checkboth("%25x", big, " 1234567890abcdef12345")
+ checkboth("%.2x", big, "1234567890abcdef12345")
+ checkboth("%.21x", big, "1234567890abcdef12345")
+ checkboth("%.22x", big, "01234567890abcdef12345")
+ checkboth("%23.22x", big, " 01234567890abcdef12345")
+ checkboth("%-23.22x", big, "01234567890abcdef12345 ")
+ checkboth("%X", big, "1234567890ABCDEF12345")
+ checkboth("%#X", big, "0X1234567890ABCDEF12345")
+ checkboth("%#x", big, "0x1234567890abcdef12345")
+ checkboth("%#x", -big, "-0x1234567890abcdef12345")
+ checkboth("%#.23x", -big, "-0x001234567890abcdef12345")
+ checkboth("%#+.23x", big, "+0x001234567890abcdef12345")
+ checkboth("%# .23x", big, " 0x001234567890abcdef12345")
+ checkboth("%#+.23X", big, "+0X001234567890ABCDEF12345")
+ checkboth("%#-+.23X", big, "+0X001234567890ABCDEF12345")
+ checkboth("%#-+26.23X", big, "+0X001234567890ABCDEF12345")
+ checkboth("%#-+27.23X", big, "+0X001234567890ABCDEF12345 ")
+ checkboth("%#+27.23X", big, " +0X001234567890ABCDEF12345")
+ # next one gets two leading zeroes from precision, and another from the
+ # 0 flag and the width
+ checkboth("%#+027.23X", big, "+0X0001234567890ABCDEF12345")
+ # same, except no 0 flag
+ checkboth("%#+27.23X", big, " +0X001234567890ABCDEF12345")
+ #python 2.5 chokes on this...
+## checkboth("%x", float(big), "123456_______________", 6)
+
+ big = 012345670123456701234567012345670L # 32 octal digits
+ checkboth("%o", big, "12345670123456701234567012345670")
+ checkboth("%o", -big, "-12345670123456701234567012345670")
+ checkboth("%5o", -big, "-12345670123456701234567012345670")
+ checkboth("%33o", -big, "-12345670123456701234567012345670")
+ checkboth("%34o", -big, " -12345670123456701234567012345670")
+ checkboth("%-34o", -big, "-12345670123456701234567012345670 ")
+ checkboth("%034o", -big, "-012345670123456701234567012345670")
+ checkboth("%-034o", -big, "-12345670123456701234567012345670 ")
+ checkboth("%036o", -big, "-00012345670123456701234567012345670")
+ checkboth("%036o", big, "000012345670123456701234567012345670")
+ checkboth("%0+36o", big, "+00012345670123456701234567012345670")
+ checkboth("%+36o", big, " +12345670123456701234567012345670")
+ checkboth("%36o", big, " 12345670123456701234567012345670")
+ checkboth("%.2o", big, "12345670123456701234567012345670")
+ checkboth("%.32o", big, "12345670123456701234567012345670")
+ checkboth("%.33o", big, "012345670123456701234567012345670")
+ checkboth("%34.33o", big, " 012345670123456701234567012345670")
+ checkboth("%-34.33o", big, "012345670123456701234567012345670 ")
+ checkboth("%o", big, "12345670123456701234567012345670")
+ checkboth("%#o", big, "012345670123456701234567012345670")
+ checkboth("%#o", -big, "-012345670123456701234567012345670")
+ checkboth("%#.34o", -big, "-0012345670123456701234567012345670")
+ checkboth("%#+.34o", big, "+0012345670123456701234567012345670")
+ checkboth("%# .34o", big, " 0012345670123456701234567012345670")
+ checkboth("%#+.34o", big, "+0012345670123456701234567012345670")
+ checkboth("%#-+.34o", big, "+0012345670123456701234567012345670")
+ checkboth("%#-+37.34o", big, "+0012345670123456701234567012345670 ")
+ checkboth("%#+37.34o", big, " +0012345670123456701234567012345670")
+ # next one gets one leading zero from precision
+ checkboth("%.33o", big, "012345670123456701234567012345670")
+ # base marker shouldn't change that, since "0" is redundant
+ checkboth("%#.33o", big, "012345670123456701234567012345670")
+ # but reduce precision, and base marker should add a zero
+ checkboth("%#.32o", big, "012345670123456701234567012345670")
+ # one leading zero from precision, and another from "0" flag & width
+ checkboth("%034.33o", big, "0012345670123456701234567012345670")
+ # base marker shouldn't change that
+ checkboth("%0#34.33o", big, "0012345670123456701234567012345670")
+ #python 2.5 chokes on this...
+## checkboth("%o", float(big), "123456__________________________", 6)
+
+ # Some small ints, in both Python int and long flavors).
+ checkboth("%d", 42, "42")
+ checkboth("%d", -42, "-42")
+ checkboth("%d", 42L, "42")
+ checkboth("%d", -42L, "-42")
+ checkboth("%d", 42.0, "42")
+ checkboth("%#x", 1, "0x1")
+ checkboth("%#x", 1L, "0x1")
+ checkboth("%#X", 1, "0X1")
+ checkboth("%#X", 1L, "0X1")
+ checkboth("%#x", 1.0, "0x1")
+ checkboth("%#o", 1, "01")
+ checkboth("%#o", 1L, "01")
+ checkboth("%#o", 0, "0")
+ checkboth("%#o", 0L, "0")
+ checkboth("%o", 0, "0")
+ checkboth("%o", 0L, "0")
+ checkboth("%d", 0, "0")
+ checkboth("%d", 0L, "0")
+ checkboth("%#x", 0, "0x0")
+ checkboth("%#x", 0L, "0x0")
+ checkboth("%#X", 0, "0X0")
+ checkboth("%#X", 0L, "0X0")
+
+ checkboth("%x", 0x42, "42")
+ checkboth("%x", -0x42, "-42")
+ checkboth("%x", 0x42L, "42")
+ checkboth("%x", -0x42L, "-42")
+ checkboth("%x", float(0x42), "42")
+
+ checkboth("%o", 042, "42")
+ checkboth("%o", -042, "-42")
+ checkboth("%o", 042L, "42")
+ checkboth("%o", -042L, "-42")
+ checkboth("%o", float(042), "42")
+
+ # Test exception for unknown format characters
+ if verbose:
+ print 'Testing exceptions'
+
+ def test_exc(formatstr, args, exception, excmsg):
+ try:
+ checkformat(formatstr, args)
+ except exception, exc:
+ if str(exc) == excmsg:
+ if verbose:
+ print "yes"
+ else:
+ if verbose: print 'no'
+ print 'Unexpected ', exception, ':', repr(str(exc))
+ except:
+ if verbose: print 'no'
+ print 'Unexpected exception'
+ raise
+ else:
+ raise TestFailed, 'did not get expected exception: %s' % excmsg
+
+ test_exc('abc %a', 1, ValueError,
+ "unsupported format character 'a' (0x61) at index 5")
+ if have_unicode:
+ test_exc(unicode('abc %\u3000','raw-unicode-escape'), 1, ValueError,
+ "unsupported format character '?' (0x3000) at index 5")
+
+ test_exc('%d', '1', TypeError, "int argument required") ##"%d format: a number is required, not str")
+ test_exc('%g', '1', TypeError, "float argument required") ##", not str")
+ test_exc('no format', '1', TypeError,
+ "not all arguments converted during string formatting")
+ test_exc('no format', u'1', TypeError,
+ "not all arguments converted during string formatting")
+ test_exc(u'no format', '1', TypeError,
+ "not all arguments converted during string formatting")
+ test_exc(u'no format', u'1', TypeError,
+ "not all arguments converted during string formatting")
+
+ class Foobar(long):
+ def __oct__(self):
+ # Returning a non-string should not blow up.
+ return self + 1
+
+ test_exc('%o', Foobar(), TypeError,
+ "expected string or Unicode object, long found")
+
+ if maxsize == 2**31-1:
+ # crashes 2.2.1 and earlier:
+ try:
+ "%*d"%(maxsize, -127)
+ except MemoryError:
+ pass
+ else:
+ raise TestFailed, '"%*d"%(maxsize, -127) should fail'
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_types.py b/bps/tests/test_types.py
new file mode 100644
index 0000000..8b6327b
--- /dev/null
+++ b/bps/tests/test_types.py
@@ -0,0 +1,278 @@
+"""bps.types unittest script -- (c) 2004-2009 Assurance Technologies 2003-2006"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+import os
+import warnings
+import gc
+#site
+#pkg
+from bps.tests.utils import TestCase
+#module
+#=========================================================
+#baseclass
+#=========================================================
+from bps.types import BaseClass
+class BaseClassTest(TestCase):
+
+ def test_multisuper(self):
+ "test MultipleSuperProperty handler for BaseClass"
+ buffer = []
+ class Core(BaseClass):
+ def bob(self):
+ buffer.append("core")
+
+ class Test(Core):
+ "test 1"
+ def bob(self):
+ buffer.append("test1")
+ self.__super.bob()
+ Test1 = Test
+
+ class Test(Test):
+ "test 2"
+ def bob(self):
+ buffer.append("test2")
+ self.__super.bob()
+ Test2 = Test
+
+ #check core
+ del buffer[:]
+ test0 = Core()
+ test0.bob()
+ self.assertEqual(buffer, ['core'])
+
+ #check single-class call
+ del buffer[:]
+ test1 = Test1()
+ test1.bob()
+ self.assertEqual(buffer, ['test1', 'core'])
+
+ #check multi-class call
+ #if MultipleSuperProperty fails, we'll have dups of test1 or test2 in buffer.
+ del buffer[:]
+ test2 = Test2()
+ test2.bob()
+ self.assertEqual(buffer, ['test2', 'test1', 'core'])
+
+#=========================================================
+#closeableclass
+#=========================================================
+from bps.types import CloseableClass
+class CloseableClassTest(TestCase):
+
+ def test_closed_property(self):
+ #check init state
+ c = CloseableClass()
+ self.assert_(not c.closed)
+
+ #make sure it changes
+ c.close()
+ self.assert_(c.closed)
+
+ #make sure double call is NOOP
+ c.close()
+ self.assert_(c.closed)
+
+ def test_close_on_del(self):
+ #NOTE: since this relies on GC flushing, and implicitly calling del
+ #(which CPython doesn't guarantee will happen), this test may fail.
+ #should write better way to test things.
+ d = [False]
+ def setter():
+ d[0] = True
+ c = CloseableClass()
+ c.on_close(setter)
+ del c
+ gc.collect()
+ self.assert_(d[0])
+
+ def test_cleanup(self):
+ class Test(CloseableClass):
+ x = 1
+
+ def _cleanup(self):
+ self.x += 1
+
+ #check init state
+ c = Test()
+ self.assertEquals(c.x, 1)
+
+ #make sure it's called right
+ c.close()
+ self.assertEquals(c.x, 2)
+
+ #make sure it's not double-called
+ c.close()
+ self.assertEquals(c.x, 2)
+
+ #NOTE: 'close' method not explicitly tested,
+ #as at least one of the other tests should fail
+ #if something was wrong with it
+
+ def test_close(self):
+
+ #test init state
+ c = CloseableClass()
+ self.assert_(not c.closed)
+
+ #test 'close' returns true on success
+ self.assert_(c.close())
+ self.assert_(c.closed)
+
+ #test 'close' returns false if already closed
+ self.assert_(not c.close())
+ self.assert_(c.closed)
+
+ def test_recursive_close(self):
+ c = CloseableClass()
+ c._in_closer = False
+
+ def closer():
+ self.assert_(not c._in_closer)
+ c._in_closer = True
+ try:
+ #NOTE: policy is to simply ignore recursive calls, might revise in future
+ self.assert_(not c.closed)
+ r = c.close()
+ self.assert_(not c.closed)
+ self.assertIs(r, None)
+ finally:
+ c._in_closer = False
+ c.on_close(closer)
+
+ r = c.close()
+ self.assert_(r)
+ self.assert_(c.closed)
+
+ #TODO: should decide on / test policy for what happens if on_close() / delete_on_close()
+ # is called AFTER class has been closed!
+
+ def test_on_close_func(self):
+ c = CloseableClass()
+
+ #prepare & register callback
+ d = []
+ def func():
+ d.append(1)
+ c.on_close(func)
+ self.assertEquals(d,[])
+
+ #make sure it's called right
+ c.close()
+ self.assertEquals(d,[1])
+
+ #make sure it's not double-called
+ c.close()
+ self.assertEquals(d,[1])
+
+ def test_on_close_multi_func(self):
+ c = CloseableClass()
+
+ #prepare & register two callbacks
+ d = []
+ c.on_close(d.append, 1)
+ c.on_close(d.append, 2)
+ self.assertEquals(d,[])
+
+ #make sure they're calling in LIFO order
+ c.close()
+ self.assertEquals(d,[2,1])
+
+ #make sure they're not double-called
+ c.close()
+ self.assertEquals(d,[2,1])
+
+ #XXX: if there's cyclic ref in kwds, GC will have loop
+
+ def test_on_close_func_kwds(self):
+ c = CloseableClass()
+
+ #prepare & register callback
+ d = []
+ def func(value=1):
+ d.append(value)
+ c.on_close(func, value=5)
+ self.assertEquals(d,[])
+
+ #make sure it's called right
+ c.close()
+ self.assertEquals(d,[5])
+
+ #make sure it's not double-called
+ c.close()
+ self.assertEquals(d,[5])
+
+ def test_delete_on_close(self):
+ c = CloseableClass()
+
+ #prepare & register attrs to purge
+ c.x = 1
+ c.y = 2
+ c.z = 3
+ c.delete_on_close('x','y')
+
+ #check attrs kept
+ self.assertEquals(c.x, 1)
+ self.assertEquals(c.y, 2)
+ self.assertEquals(c.z, 3)
+
+ #check purge works
+ c.close()
+ self.assertIs(c.x, None)
+ self.assertIs(c.y, None)
+ self.assertEquals(c.z, 3)
+
+ #check purge doesn't get called again (though I guess it could)
+ c.x = 5
+ c.y = 10
+ c.close()
+ self.assertEquals(c.x, 5)
+ self.assertEquals(c.y, 10)
+ self.assertEquals(c.z, 3)
+
+ def test_callback_order(self):
+ #delete_on_close & on_close calls should share same LIFO stack
+ #_cleanup() should be called last
+
+ class Test(CloseableClass):
+ x = 1
+ y = 2
+
+ def __init__(self):
+ self.buf = []
+
+ def _cleanup(self):
+ self.buf.append("cleanup")
+
+ #prepare test to check ordering of events
+ c = Test()
+ def read_x():
+ c.buf.append(c.x)
+ def read_y():
+ c.buf.append(c.y)
+
+ #_cleanup - fourth
+ c.on_close(read_x) #third
+ c.delete_on_close('x','y') #second
+ c.on_close(read_y) #first
+
+ #check init state
+ self.assertEquals(c.buf, [])
+ self.assertEquals(c.x, 1)
+ self.assertEquals(c.y, 2)
+
+ #call
+ c.close()
+ self.assertEquals(c.buf, [2,None,"cleanup"])
+ self.assertIs(c.x, None)
+ self.assertIs(c.y, None)
+
+ #=========================================================
+ #EOC
+ #=========================================================
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/tests/test_unstable_ansi.py b/bps/tests/test_unstable_ansi.py
new file mode 100644
index 0000000..80ead52
--- /dev/null
+++ b/bps/tests/test_unstable_ansi.py
@@ -0,0 +1,344 @@
+"""bps.unstable.ansi tests"""
+#=========================================================
+#
+#=========================================================
+from bps.tests.utils import TestCase
+from bps.meta import Params as ak
+from bps.unstable.ansi import CODESET, AnsiCode, \
+ AnsiError, AnsiCommandError, AnsiParseError, \
+ parse_ansi_string
+
+#=========================================================
+#
+#=========================================================
+class AnsiCodeTest(TestCase):
+ "test AnsiCode class"
+
+ #=========================================================
+ #constants
+ #=========================================================
+ c0_good = [
+ #source=code
+ '\x00',
+ '\x01',
+ '\x02',
+ '\x03',
+ '\x04',
+ '\x05',
+ '\x06',
+ '\x07',
+ '\n',
+ ]
+
+ c1_good = [
+ #source, code
+ ('\x1bA','A'),
+ ('\x1bZ','Z'),
+ ]
+
+ icf_good = [
+ #source, code
+ ('\x1b~','~'),
+ ]
+
+ cseq_good = [
+ #source, code, argstr, **other-attrs-to-test
+
+ ak("\x1b[31;33x","x","31;33", args=(31,33)),
+
+ ak("\x1b[A","A","", args=(), offset=1),
+ ak("\x1b[1A","A","1", args=(1,), offset=1),
+ ak("\x1b[3A","A","3", args=(3,), offset=3),
+
+ ak("\x1b[m","m","", args=()),
+ ak("\x1b[31m","m","31", args=(31,)),
+ ak("\x1b[31;32;33m","m","31;32;33", args=(31,32,33)),
+
+ ak("\x1b[31!x", "!x","31"),
+ ]
+
+ bad_parse = [
+ "", #empty string not allowed
+ " ",
+ "a",
+ "abc",
+ "\x1b", #raw ESC isn't allowed
+ "\x1b\x7F", #code not in c1 or icf range
+ "\x1b[", #raw CSI isn't allowed
+ "\x1b[31~x", #'~' not a valid intermediate byte
+ "\x1b[31!31x", #'!' valid intermediate byte, but can't be interspersed w/ params
+ "\x1b[!", #not a valid code
+ ]
+
+ #source strings which should parse correctly, and are in normalized form
+ all_good = c0_good + \
+ [ x[0] for x in c1_good ] + \
+ [ x[0] for x in icf_good ] + \
+ [ x[0] for x in cseq_good ]
+
+ #source strings which should get normalized
+ all_unnormalized = [
+ ("\x1b[0;0f", "\x1b[0;0H"),
+ ]
+
+ #=========================================================
+ #parse tests
+ #=========================================================
+ def test_c0_good(self):
+ "test known good c0 codes parse"
+ codeset = CODESET.C0
+ for source in self.c0_good:
+ code = source
+ c = AnsiCode.parse(source)
+ self.assertEqual(c.codeset, codeset)
+ self.assertEqual(c.code, code)
+ self.assertEqual(repr(c), repr(AnsiCode(codeset, code)))
+
+ def test_c1_good(self):
+ "test known good c1 codes parse"
+ codeset = CODESET.C1
+ for source, code in self.c1_good:
+ c = AnsiCode.parse(source)
+ self.assertEqual(c.codeset, codeset)
+ self.assertEqual(c.code, code)
+ self.assertEqual(repr(c), repr(AnsiCode(codeset, code)))
+
+ def test_icf_good(self):
+ "test known good c1 codes parse"
+ codeset = CODESET.ICF
+ for source, code in self.icf_good:
+ c = AnsiCode.parse(source)
+ self.assertEqual(c.codeset, codeset)
+ self.assertEqual(c.code, code)
+ self.assertEqual(repr(c), repr(AnsiCode(codeset, code)))
+
+ def test_cseq_good(self):
+ "test known good cseq codes parse"
+ codeset = CODESET.CSEQ
+ for row in self.cseq_good:
+ source, code, argstr = row.args
+ c = AnsiCode.parse(source)
+ self.assertEqual(c.codeset, codeset)
+ self.assertEqual(c.code, code)
+ self.assertEqual(c.argstr, argstr)
+ for k,v in row.kwds.iteritems():
+ self.assertEqual(getattr(c,k),v)
+ self.assertEqual(repr(c), repr(AnsiCode(codeset, code, argstr=argstr)))
+
+ def test_bad(self):
+ "test known bad codes don't parse"
+ for source in self.bad_parse:
+ self.assertRaises(AnsiParseError, AnsiCode.parse, source)
+
+ #=========================================================
+ #test try-parse
+ #=========================================================
+ def test_try_parse_good(self):
+ for source in self.all_good:
+ ok, result = AnsiCode.try_parse(source)
+ self.assert_(ok)
+ self.assertIsInstance(result, AnsiCode)
+ self.assertEqual(result.render(), source)
+
+ def test_try_parse_bad(self):
+ for source in self.bad_parse:
+ ok, result = AnsiCode.try_parse(source)
+ self.assert_(not ok)
+ self.assertIsInstance(result, AnsiParseError)
+
+ #=========================================================
+ #render
+ #=========================================================
+ def test_render_good(self):
+ "test known good codes render/str like source"
+ for source in self.all_good:
+ c = AnsiCode.parse(source)
+ self.assertEquals(c.render(), source)
+ self.assertEquals(str(c), source)
+
+ def test_render_normalized(self):
+ "test known redundant codes render/str properly"
+ for source, result in self.all_unnormalized:
+ c = AnsiCode.parse(source)
+ norm = c.render()
+ self.assertEquals(norm, result)
+ self.assertEquals(str(c), source) #since this contains 'source', will render original
+ c2 = AnsiCode.parse(norm)
+ self.assertEquals(c2.render(), result)
+ self.assertEquals(str(c2), result)
+
+ #=========================================================
+ #repr
+ #=========================================================
+ def test_repr(self):
+ "test known good codes have working repr"
+ for source in self.all_good:
+ c = AnsiCode.parse(source)
+ c2 = eval(repr(c))
+ self.assertEqual(repr(c), repr(c2))
+ c2.source = source #so dicts match
+ self.assertEquals(c.__dict__, c2.__dict__)
+
+ def test_repr_normalized(self):
+ for source, result in self.all_unnormalized:
+ c = AnsiCode.parse(source)
+ c2 = eval(repr(c))
+ self.assertEqual(repr(c), repr(c2))
+ #NOTE: 'source' should have been preserved in repr
+ self.assertEquals(c2.source, source)
+ self.assertEquals(c.__dict__, c2.__dict__)
+
+ #TODO: test malformed
+
+ #=========================================================
+ #test eq
+ #=========================================================
+ def test_eq(self):
+ "test known good codes have working __eq__"
+ codeset = CODESET.C0
+ for source in self.all_good:
+ c1 = AnsiCode.parse(source)
+ for other in self.all_good:
+ c2 = AnsiCode.parse(other)
+ if other == source:
+ self.assert_(c1 == c2)
+ self.assert_(not c1 != c2)
+ else:
+ self.assert_(not c1 == c2)
+ self.assert_(c1 != c2)
+
+ #TODO: test malformed
+
+ #=========================================================
+ #xxx_code attrs
+ #=========================================================
+ def test_code_attrs(self):
+ for source in self.all_good:
+ c = AnsiCode.parse(source)
+ for cs in (CODESET.C0, CODESET.C1, CODESET.ICF, CODESET.CSEQ):
+ v = getattr(c, cs + "_code")
+ if cs == c.codeset:
+ self.assertEquals(v, c.code)
+ else:
+ self.assertIs(v,None)
+
+ #=========================================================
+ #test specific init funcs
+ #=========================================================
+
+ #=========================================================
+ #test malformed methods (kwd, is_malformed, get_malformed_reasons)
+ #=========================================================
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#utils
+#=========================================================
+class UtilTest(TestCase):
+ "test util funcs"
+
+ # is_ansi_code
+ # is_malformed_ansi_code
+ # len_ansi_string
+ # strip_ansi_string
+
+class AnsiStripperTest(TestCase):
+ "test AnsiStripper class"
+
+#=========================================================
+#
+#=========================================================
+class ParseTest(TestCase):
+ 'test parse_ansi_string(source, rtype=list, malformed_codes="ignore")'
+
+ def test_sample_normal(self):
+ "test normal functioning"
+ text = "\x1b[34mXYZ\x1b[Dtest test\x1b(sdasda\x00asdasda\x1b~sdfsdf\n\nrwerwer"
+ result = parse_ansi_string(text)
+ self.assertEquals(result,[
+ AnsiCode('cseq','m','34'),
+ 'XYZ',
+ AnsiCode('cseq','D'),
+ 'test test',
+ AnsiCode('c1','('),
+ "sdasda",
+ AnsiCode('c0','\x00'),
+ 'asdasda',
+ AnsiCode('icf','~'),
+ 'sdfsdf\n\nrwerwer',
+ ])
+
+ def test_empty(self):
+ result = parse_ansi_string("")
+ self.assertEquals(result,[""])
+
+ def test_alpha(self):
+ result = parse_ansi_string("abc")
+ self.assertEquals(result,["abc"])
+
+ def test_1code(self):
+ result = parse_ansi_string("\x1b[X")
+ self.assertEquals(result,[AnsiCode("cseq",'X')])
+
+ def test_rtype(self):
+ "test rtype kwd"
+ text = "abc\ndef\x1b[34mghi\x1b~"
+ correct = [
+ 'abc\ndef',
+ AnsiCode('cseq','m','34'),
+ 'ghi',
+ AnsiCode('icf','~'),
+ ]
+
+ #test 'list'
+ result = parse_ansi_string(text)
+ self.assertIsInstance(result,list)
+ self.assertEquals(result, correct)
+
+ #test 'iter'
+ result = parse_ansi_string(text, rtype=iter)
+ self.assert_(hasattr(result,"next"))
+ self.assertIs(iter(result),result)
+ self.assertEquals(list(result), correct)
+
+ #test 'tuple'
+ result = parse_ansi_string(text, rtype=tuple)
+ self.assertIsInstance(result,tuple)
+ self.assertEquals(list(result), correct)
+
+ def test_sample_malformed_codes(self):
+ "test malformed_codes kwd"
+ text = "XYZ\x1b[4;Dtest test"
+
+ #try the default ("ignore")
+ result = parse_ansi_string(text)
+ self.assertEquals(result,[
+ 'XYZ',
+ '\x1b[4;D',
+ 'test test',
+ ])
+
+ #try explicit parse mode
+ result = parse_ansi_string(text, malformed_codes="parse")
+ self.assertEquals(result,[
+ 'XYZ',
+ AnsiCode(None, None, malformed="argstr contains non-integer: '4;'", source='\x1b[4;D'),
+ 'test test',
+ ])
+
+ #try strip mode
+ result = parse_ansi_string(text, malformed_codes="strip")
+ self.assertEquals(result,[
+ 'XYZ',
+ 'test test',
+ ])
+
+ #try raise-error mode
+ self.assertRaises(AnsiParseError, parse_ansi_string, text, malformed_codes="raise")
+
+#=========================================================
+#
+#=========================================================
diff --git a/bps/tests/test_warndep.py b/bps/tests/test_warndep.py
new file mode 100644
index 0000000..ba4b525
--- /dev/null
+++ b/bps/tests/test_warndep.py
@@ -0,0 +1,480 @@
+"""tests for bps.warnup -- (c) Assurance Technologies 2009"""
+#=========================================================
+#imports
+#=========================================================
+from __future__ import with_statement
+#core
+from unittest import __file__ as ut_file
+#site
+#pkg
+from bps import warndep, filepath
+from bps.error.types import ParamError
+#module
+from bps.tests.utils import TestCase, catch_warnings, __file__ as util_file
+
+def print_warnings(msgs):
+ print "warning list: %r warnings" % len(msgs)
+ for idx, msg in enumerate(msgs):
+ print "\t%d: %s" % (idx,msg)
+ print
+
+#=========================================================
+#dep func
+#=========================================================
+class DepFuncTest(TestCase):
+ "test deprecated_function decorator"
+
+ # use, name, msg, removal
+
+ def test_plain(self):
+ "test basic depfunc call"
+ @warndep.deprecated_function()
+ def myfunc(a,b=2):
+ return a*b
+ with catch_warnings(record=True) as msgs:
+ #make sure func can be called in various ways
+ self.assertEquals(myfunc(3), 6)
+ self.assertEquals(myfunc(2.5,b=3), 7.5)
+ #and fails if not called correctly
+ self.assertRaises(TypeError, myfunc)
+## print_warnings(msgs)
+ x = "bps.tests.test_warndep: function 'myfunc' is deprecated"
+ self.assert_warning(msgs.pop(0), message=x, category=DeprecationWarning, filename=__file__)
+ self.assert_warning(msgs.pop(0), message=x, category=DeprecationWarning, filename=__file__)
+ self.assert_warning(msgs.pop(0), message=x, category=DeprecationWarning, filename=util_file)
+ self.assert_(not msgs)
+
+ def test_removal(self):
+ "test depfunc removal kwd"
+ @warndep.deprecated_function(removal=True)
+ def myfunc1():
+ return 1
+ @warndep.deprecated_function(removal="2009-10-1")
+ def myfunc2():
+ return 2
+ with catch_warnings(record=True) as msgs:
+ myfunc1()
+ myfunc2()
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'myfunc1' is deprecated; it will be removed in the future"
+ x2 = "bps.tests.test_warndep: function 'myfunc2' is deprecated; it will be removed after 2009-10-1"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_warning(msgs.pop(0), message=x2, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_use(self):
+ "test depfunc use kwd"
+ @warndep.deprecated_function(use="otherfunc")
+ def myfunc():
+ return 1
+ with catch_warnings(record=True) as msgs:
+ myfunc()
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'myfunc' is deprecated, use 'otherfunc' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_use_old(self):
+ "test depfunc use kwd"
+ @warndep.deprecated_function(use="otherfunc")
+ def myfunc():
+ return 1
+ with catch_warnings(record=True) as msgs:
+ myfunc()
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'myfunc' is deprecated, use 'otherfunc' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_name(self):
+ "test depfunc name kwd"
+ @warndep.deprecated_function(name="otherfunc")
+ def myfunc():
+ return 1
+ with catch_warnings(record=True) as msgs:
+ myfunc()
+ x1 = "bps.tests.test_warndep: function 'otherfunc' is deprecated"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_name_old(self):
+ "test depfunc name kwd"
+ @warndep.deprecated_function(name="otherfunc")
+ def myfunc():
+ return 1
+ with catch_warnings(record=True) as msgs:
+ myfunc()
+ x1 = "bps.tests.test_warndep: function 'otherfunc' is deprecated"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_msg(self):
+ "test depfunc msg kwd"
+ @warndep.deprecated_function(msg="help me, %(name)s")
+ def myfunc():
+ return 1
+ with catch_warnings(record=True) as msgs:
+ myfunc()
+ x1 = "bps.tests.test_warndep: help me, myfunc"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_msg2(self):
+ "test depfunc msg kwd 2"
+ @warndep.deprecated_function(msg="help me, %(mod)s.%(name)s")
+ def myfunc():
+ return 1
+ with catch_warnings(record=True) as msgs:
+ myfunc()
+ x1 = "help me, bps.tests.test_warndep.myfunc"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+def rf_01(x=1,y=2):
+ return (x,y)
+
+#NOTE: this is wrong, "name" and "use" are flipped,
+# used for unittest
+rf_02 = warndep.relocated_function("rf_01", "rf_02")
+
+class RelFuncTest(TestCase):
+ "test relocated function proxy-maker"
+
+ def test_basic_00(self):
+ "basic operation w/ explicit module path"
+ b = warndep.relocated_function("b", __name__ + ".rf_01")
+ self.assertEquals(b.__name__, "b")
+ with catch_warnings(record=True) as msgs:
+ self.assertEqual(b(5,y=3), (5,3))
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'b' is deprecated, use 'bps.tests.test_warndep.rf_01' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_basic_01(self):
+ "basic operation w/ explicit module path 2"
+ b = warndep.relocated_function("b", __name__ + ":rf_01")
+ self.assertEquals(b.__name__, "b")
+ with catch_warnings(record=True) as msgs:
+ self.assertEqual(b(5,y=3), (5,3))
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'b' is deprecated, use 'bps.tests.test_warndep.rf_01' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_basic_02(self):
+ "basic operation w/in module"
+ b = warndep.relocated_function("b", "rf_01")
+ self.assertEquals(b.__name__, "b")
+ with catch_warnings(record=True) as msgs:
+ self.assertEqual(b(5,y=3), (5,3))
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'b' is deprecated, use 'bps.tests.test_warndep.rf_01' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_basic_03(self):
+ "basic operation w/callable"
+ b = warndep.relocated_function("b", rf_01)
+ self.assertEquals(b.__name__, "b")
+ with catch_warnings(record=True) as msgs:
+ self.assertEqual(b(5,y=3), (5,3))
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'b' is deprecated, use 'bps.tests.test_warndep.rf_01' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_nonlazy(self):
+ "basic operation w/in module"
+ #FIXME: do something harser to test for sure, such as contained module.
+ b = warndep.relocated_function("b", "rf_01", lazy=False)
+ with catch_warnings(record=True) as msgs:
+ self.assertEqual(b(5,y=3), (5,3))
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'b' is deprecated, use 'bps.tests.test_warndep.rf_01' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_removal(self):
+ "test depfunc removal kwd"
+ f1 = warndep.relocated_function("b1", "rf_01", removal=True)
+ f2 = warndep.relocated_function("b2", "rf_01", removal="2009-10-1")
+ self.assertEquals(f1.__name__, "b1")
+ self.assertEquals(f2.__name__, "b2")
+ with catch_warnings(record=True) as msgs:
+ f1()
+ f2()
+ x1 = "bps.tests.test_warndep: function 'b1' is deprecated, use 'bps.tests.test_warndep.rf_01' instead; it will be removed in the future"
+ x2 = "bps.tests.test_warndep: function 'b2' is deprecated, use 'bps.tests.test_warndep.rf_01' instead; it will be removed after 2009-10-1"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_warning(msgs.pop(0), message=x2, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_mistake_00_reversed(self):
+ with catch_warnings(record=True) as msgs:
+ self.assertRaises(ParamError,rf_02)
+ #^ "name and use parameters reversed"
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'rf_01' is deprecated, use 'bps.tests.test_warndep.rf_02' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=util_file)
+ self.assert_(not msgs)
+
+ def test_mistake_01_missing_module(self):
+ b = warndep.relocated_function("b", __name__ + "_xxx.rf_01")
+ with catch_warnings(record=True) as msgs:
+ self.assertRaises(ImportError,b)
+ #^ no such module test_warndepxxx
+ x1 = "bps.tests.test_warndep: function 'b' is deprecated, use 'bps.tests.test_warndep_xxx.rf_01' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=util_file)
+ self.assert_(not msgs)
+
+ def test_mistake_02_missing_func(self):
+ b = warndep.relocated_function("b", __name__ + ".rf_xxx")
+ with catch_warnings(record=True) as msgs:
+ self.assertRaises(AttributeError,b)
+ #^ module has no such attr rf_xxx
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep: function 'b' is deprecated, use 'bps.tests.test_warndep.rf_xxx' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=util_file)
+ self.assert_(not msgs)
+
+ def test_mistake_03_missing_nonlazy(self):
+ self.assertRaises(AttributeError, warndep.relocated_function, "b", __name__ + ".rf_xxx", lazy=False)
+ self.assertRaises(ImportError, warndep.relocated_function, "b", __name__ + "_xxx.rf_01", lazy=False)
+
+#=========================================================
+#dep meth
+#=========================================================
+class DepMethTest(TestCase):
+ "test deprecated_method decorator"
+
+ def test_plain(self):
+ "test basic depmeth call"
+ class Test:
+ @warndep.deprecated_method()
+ def myfunc(self,a,b=2):
+ return self,a,b
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ #make sure func can be called in various ways
+ self.assertEquals(t.myfunc(3), (t,3,2))
+ self.assertEquals(t.myfunc(2.5,b=3), (t,2.5,3))
+ #and fails if not called correctly
+ self.assertRaises(TypeError, t.myfunc)
+## print_warnings(msgs)
+ x = "bps.tests.test_warndep.Test: method 'myfunc' is deprecated"
+ self.assert_warning(msgs.pop(0), message=x, category=DeprecationWarning, filename=__file__)
+ self.assert_warning(msgs.pop(0), message=x, category=DeprecationWarning, filename=__file__)
+ self.assert_warning(msgs.pop(0), message=x, category=DeprecationWarning, filename=util_file)
+ self.assert_(not msgs)
+
+ def test_removal(self):
+ "test depmeth removal kwd"
+ class Test:
+ @warndep.deprecated_method(removal=True)
+ def myfunc1(self):
+ return 1
+ @warndep.deprecated_method(removal="2009-10-1")
+ def myfunc2(self):
+ return 2
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ t.myfunc1()
+ t.myfunc2()
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep.Test: method 'myfunc1' is deprecated; it will be removed in the future"
+ x2 = "bps.tests.test_warndep.Test: method 'myfunc2' is deprecated; it will be removed after 2009-10-1"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_warning(msgs.pop(0), message=x2, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_use(self):
+ "test depmeth use kwd"
+ class Test:
+ @warndep.deprecated_method(use="otherfunc")
+ def myfunc(self):
+ return 1
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ t.myfunc()
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep.Test: method 'myfunc' is deprecated, use 'otherfunc' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_use_old(self):
+ "test depmeth use kwd"
+ class Test:
+ @warndep.deprecated_method(use="otherfunc")
+ def myfunc(self):
+ return 1
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ t.myfunc()
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep.Test: method 'myfunc' is deprecated, use 'otherfunc' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_name(self):
+ "test depmeth name kwd"
+ class Test:
+ @warndep.deprecated_method(name="otherfunc")
+ def myfunc(self):
+ return 1
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ t.myfunc()
+ x1 = "bps.tests.test_warndep.Test: method 'otherfunc' is deprecated"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_name_old(self):
+ "test depmeth name kwd"
+ class Test:
+ @warndep.deprecated_method(name="otherfunc")
+ def myfunc(self):
+ return 1
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ t.myfunc()
+ x1 = "bps.tests.test_warndep.Test: method 'otherfunc' is deprecated"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_msg(self):
+ "test depmeth msg kwd"
+ class Test:
+ @warndep.deprecated_method(msg="help me, %(name)s")
+ def myfunc(self):
+ return 1
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ t.myfunc()
+ x1 = "bps.tests.test_warndep.Test: help me, myfunc"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_msg2(self):
+ "test depmeth msg kwd"
+ class Test:
+ @warndep.deprecated_method(msg="help me, %(mod)s.%(cls)s:%(name)s")
+ def myfunc(self):
+ return 1
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ t.myfunc()
+ x1 = "help me, bps.tests.test_warndep.Test:myfunc"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+class RelMethTest(TestCase):
+ "test relocated method proxy-maker"
+
+ def test_basic_00(self):
+ "basic operation w/ method name"
+ class Test:
+ def a(self,x=1,y=2):
+ return (self,x,y)
+ b = warndep.relocated_method("b","a")
+ self.assertEquals(Test.b.__name__, "b")
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ self.assertEqual(t.b(5,y=3), (t,5,3))
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep.Test: method 'b' is deprecated, use 'a' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_basic_01(self):
+ "basic operation w/ method callable"
+ class Test:
+ def a(self,x=1,y=2):
+ return (self,x,y)
+ b = warndep.relocated_method("b", a)
+ self.assertEquals(Test.b.__name__, "b")
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ self.assertEqual(t.b(5,y=3), (t,5,3))
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep.Test: method 'b' is deprecated, use 'a' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_name_detect(self):
+ "basic operation w/ name not specified"
+ class Test:
+ def a(self,x=1,y=2):
+ return (self,x,y)
+ b = warndep.relocated_method(None, "a")
+ self.assertEquals(Test.b.__name__, "<deprecated alias for 'a'>")
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ self.assertEqual(t.b(5,y=3), (t,5,3))
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep.Test: method 'b' is deprecated, use 'a' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_removal(self):
+ "test depfunc removal kwd"
+ class Test:
+ def a(self,x=1,y=2):
+ return (self,x,y)
+ f1 = warndep.relocated_method("b1", "a", removal=True)
+ f2 = warndep.relocated_method("b2", "a", removal="2009-10-1")
+ self.assertEquals(Test.f1.__name__, "b1")
+ self.assertEquals(Test.f2.__name__, "b2")
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ t.f1()
+ t.f2()
+ x1 = "bps.tests.test_warndep.Test: method 'b1' is deprecated, use 'a' instead; it will be removed in the future"
+ x2 = "bps.tests.test_warndep.Test: method 'b2' is deprecated, use 'a' instead; it will be removed after 2009-10-1"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=__file__)
+ self.assert_warning(msgs.pop(0), message=x2, category=DeprecationWarning, filename=__file__)
+ self.assert_(not msgs)
+
+ def test_mistake_00_reversed(self):
+ class Test:
+ def a(self,x=1,y=2):
+ return (self,x,y)
+ b = warndep.relocated_method("a", "b")
+ self.assertEquals(Test.b.__name__,"a")
+ self.assertEquals(Test.a.__name__,"a")
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ self.assertRaises(ParamError, t.b)
+ #^ "name and use parameters reversed"
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep.Test: method 'a' is deprecated, use 'b' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=util_file)
+ self.assert_(not msgs)
+
+ def test_mistake_01_missing_attr(self):
+ class Test:
+ b = warndep.relocated_method("b", "a")
+ self.assertEquals(Test.b.__name__,"b")
+ with catch_warnings(record=True) as msgs:
+ t = Test()
+ self.assertRaises(AttributeError,t.b)
+ #^ module has no such attr rf_xxx
+## print_warnings(msgs)
+ x1 = "bps.tests.test_warndep.Test: method 'b' is deprecated, use 'a' instead"
+ self.assert_warning(msgs.pop(0), message=x1, category=DeprecationWarning, filename=util_file)
+ self.assert_(not msgs)
+
+ def test_mistake_02_no_use(self):
+ self.assertRaises(ValueError, warndep.relocated_method,"a",None)
+
+#=========================================================
+#dep attr
+#=========================================================
+#TODO: deprecated_property -
+# test basic functionality,
+# test options,
+# test that deprecated_method wrapped funcs are unwrapped via ._deprecated_func
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/tests/utils.py b/bps/tests/utils.py
new file mode 100644
index 0000000..03a9b4a
--- /dev/null
+++ b/bps/tests/utils.py
@@ -0,0 +1,384 @@
+"""helpers for bps unittests"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import warnings
+import os
+import atexit
+import tempfile
+import unittest
+import sys
+from logging import getLogger; log = getLogger(__name__)
+from cStringIO import StringIO
+import logging
+#pkg
+from bps.types import BaseClass
+from bps.fs import filepath
+from bps.meta import Params, is_oseq, is_iter
+from bps.logs import config_logging
+from bps.logs.handlers import purge_handlers
+#local
+__all__ = [
+ 'ak'
+ 'TestCase',
+ 'get_tmp_path', 'enable_suite',
+ 'catch_warnings', 'capture_logger',
+]
+
+ak = Params
+
+#=========================================================
+#custom test base
+#=========================================================
+class TestCase(unittest.TestCase, BaseClass):
+ "bps-specific test case class, mainly contains messaging enhancements"
+
+ _prefix = None
+
+ def __init__(self, *a, **k):
+ #set the doc strings to begin w/ prefix
+ #yes, this is incredibly hacked
+ prefix = self._prefix
+ if prefix:
+ if callable(prefix):
+ prefix = prefix()
+ for attr in dir(self):
+ if not attr.startswith("test_"):
+ continue
+ v = getattr(self, attr)
+ d = v.im_func.__doc__ or v.im_func.__name__
+ idx = d.find(": ")
+ if idx > -1:
+ d = d[idx+1:]
+ v.im_func.__doc__ = d = "%s: %s" % (prefix, d.lstrip())
+ assert v.__doc__ == d
+ unittest.TestCase.__init__(self, *a, **k)
+
+ def assertEquals(self, real, correct, msg=None):
+ #NOTE: overriding this to get msg formatting capability
+ msg = self._format_msg(msg, "got %r, expected would equal %r", real, correct)
+ return self.assert_(real == correct, msg)
+
+## assert_equals = assertEquals
+
+ def assertEqual(self, *a, **k):
+ return self.assertEquals(*a, **k)
+
+ def assertElementsEqual(self, real, correct, msg=None):
+ "test that two objects have same set of elements"
+ real = set(real)
+ correct = set(correct)
+ msg = self._format_msg(msg, "got %r, expected would have same elements as %r", sorted(real), sorted(correct))
+ return self.assert_(real == correct, msg)
+ assert_sets_equal = assertElementsEqual #deprecated
+ assert_same_set = assertElementsEqual #preferred
+
+ def assert_same_order(self, real, correct, msg=None):
+ "test that two objects are sequences w/ same elements in same order"
+ real = list(real)
+ correct = list(correct)
+ msg = self._format_msg(msg, "got %r, expected would have same elements, in same order, as %r", real, correct)
+ return self.assert_(real == correct, msg)
+
+ def assertNotEquals(self, real, correct, msg=None):
+ #NOTE: overriding this to get msg formatting capability
+ msg = self._format_msg(msg, "got %r, expected would equal %r", real, correct)
+ return self.assert_(real != correct, msg)
+## assert_not_equals = assertNotEquals
+
+ def assertNotEqual(self, *a, **k):
+ return self.assertNotEquals(*a, **k)
+
+ def assertIs(self, real, correct, msg=None):
+ msg = self._format_msg(msg, "got %r, expected would be %r", real, correct)
+ return self.assert_(real is correct, msg)
+
+ def assertIsNot(self, real, correct, msg=None):
+ msg = self._format_msg(msg, "expected would not be %r", real)
+ return self.assert_(real is not correct, msg)
+
+ def assertIsInstance(self, obj, klass, msg=None):
+ msg = self._format_msg(msg, "got %r, expected instance of %r", obj, klass)
+ return self.assert_(isinstance(obj, klass), msg)
+
+ def assertRaises(self, type, func, *args, **kwds):
+ msg = kwds.pop("__msg__", None)
+ err = None
+ try:
+ result = func(*args, **kwds)
+ except Exception, err:
+ pass
+ if err is None:
+ msg = self._format_msg(msg, "function returned %r, expected it to raise %r", result, type)
+ raise AssertionError(msg)
+ elif not isinstance(err, type):
+ msg = self._format_msg(msg, "function raised %r, expected %r", err, type)
+ raise AssertionError(msg)
+
+ def assertAttrRaises(self, excClass, obj, attr):
+ #XXX: default msg?
+ self.assertRaises(excClass, getattr, obj, attr)
+
+ def assertWarningEquals(self, warning, **kwds):
+ "check if WarningMessage instance matches parameters"
+ for key in ("message", "category", "filename", "lineno", "file", "line"):
+ if key not in kwds:
+ continue
+ real = getattr(warning, key)
+ if key == "message":
+ real = str(real) #usually a UserWarning(value), etc
+ value = kwds[key]
+ if key == "filename":
+ if value.endswith(".pyc") or value.endswith(".pyo"):
+ value = value[:-1]
+ if real != value:
+ raise AssertionError("warning %s doesn't match pattern %r" % (warning, kwds))
+
+ assert_warning = assertWarningEquals
+
+ def check_function_results(self, func, cases):
+ "helper for running through function call cases"
+ #cases should be list of ak objects,
+ #whose first element is the function's return value
+ for elem in cases:
+ elem = Params.normalize(elem)
+ correct = elem.args[0]
+ result = func(*elem.args[1:], **elem.kwds)
+ self.assertEqual(result, correct,
+ "error for case %s: got %r, expected would equal %r" % (elem.render(1), result, correct)
+ )
+
+ def check_function_rtype(self, func, retval=None, rtype=None, ordered=False):
+ """helper for testing functions that allow return type to be specified via rtype kwd.
+
+ :arg func: function (w/ parameters bound via partial)
+ :arg retval: expected result (can be set, list, etc)
+ :arg rtype: default rtype
+ :param ordered: if order must match retval when rtype is ordered
+ """
+ #NOTE: 'self' should be test case
+ has_retval = (retval is not None)
+
+ #check default rtype is correct
+ result = func()
+ if rtype is None:
+ pass
+ elif rtype is iter:
+ self.assert_(is_iter(result))
+ else:
+ self.assertIsInstance(result, rtype)
+ if has_retval:
+ if ordered and (is_oseq(result) or is_iter(result)):
+ self.assert_same_order(result, retval)
+ else:
+ self.assert_same_set(result, retval)
+
+ #check unordered types work
+ for t in (set, frozenset):
+ result = func(rtype=t)
+ self.assertIsInstance(result,t)
+ if has_retval:
+ self.assert_same_set(result, retval)
+
+ #check ordered types work
+ for t in (list, tuple):
+ result = func(rtype=t)
+ self.assertIsInstance(result,t)
+ if has_retval:
+ if ordered:
+ self.assert_same_order(result, retval)
+ else:
+ self.assert_same_set(result, retval)
+
+ #check rtype=iter works
+ result = func(rtype=iter)
+ self.assert_(is_iter(result))
+ if has_retval:
+ if ordered:
+ self.assert_same_order(result, retval)
+ else:
+ self.assert_same_set(result, retval)
+
+ def _format_msg(self, msg, template, *args, **kwds):
+ if msg and not msg.endswith(":"):
+ return msg
+ if args:
+ template %= args
+ if kwds:
+ template %= kwds
+ if msg:
+ return msg + " " + template
+ return template
+
+#=========================================================
+#helper funcs
+#=========================================================
+_tmp_files = []
+def _tmpfile_cleaner():
+ for path in _tmp_files:
+ try:
+ path.discard()
+ except:
+ log.warning("error removing temp file: %r", path, exc_info=True)
+atexit.register(_tmpfile_cleaner)
+def get_tmp_path():
+ "returns a temporary path suitable for any use, which will be removed on exit"
+ fd, path = tempfile.mkstemp(prefix=__name__ + "-")
+ os.close(fd) #close the descriptor
+ path = filepath(path)
+ _tmp_files.append(path) #register it with cleanup routine
+ path.remove() #remove the file which was there
+ assert path.ismissing
+ return path
+
+def enable_suite(name):
+ """check if a given test should be included based on the env var.
+
+ test flags:
+ bcrypt enable basic bcrypt tests
+ slow_bcrypt enable extra check for slow bcrypt implementation
+ pwgen_dups enable duplication rate checks for pwgen
+ """
+ _flags = [ v.strip()
+ for v
+ in os.environ.get("BPS_TEST_SUITE", "").lower().split(",")
+ ]
+ if 'all' in _flags:
+ return True
+ if name in _flags:
+ return True
+ return False
+
+#=========================================================
+#python backports
+#=========================================================
+
+#this was copied from the python 2.6.2 warnings.py file,
+#so it would always be available for unit-tests
+try:
+ from warnings import catch_warnings, WarningMessage
+except ImportError:
+ class WarningMessage(object):
+
+ """Holds the result of a single showwarning() call."""
+
+ _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
+ "line")
+
+ def __init__(self, message, category, filename, lineno, file=None,
+ line=None):
+ local_values = locals()
+ for attr in self._WARNING_DETAILS:
+ setattr(self, attr, local_values[attr])
+ self._category_name = category.__name__ if category else None
+
+ def __str__(self):
+ return ("{message : %r, category : %r, filename : %r, lineno : %s, "
+ "line : %r}" % (self.message, self._category_name,
+ self.filename, self.lineno, self.line))
+
+ class catch_warnings(object):
+
+ """A context manager that copies and restores the warnings filter upon
+ exiting the context.
+
+ The 'record' argument specifies whether warnings should be captured by a
+ custom implementation of warnings.showwarning() and be appended to a list
+ returned by the context manager. Otherwise None is returned by the context
+ manager. The objects appended to the list are arguments whose attributes
+ mirror the arguments to showwarning().
+
+ The 'module' argument is to specify an alternative module to the module
+ named 'warnings' and imported under that name. This argument is only useful
+ when testing the warnings module itself.
+
+ """
+
+ def __init__(self, record=False, module=None):
+ """Specify whether to record warnings and if an alternative module
+ should be used other than sys.modules['warnings'].
+
+ For compatibility with Python 3.0, please consider all arguments to be
+ keyword-only.
+
+ """
+ self._record = record
+ self._module = sys.modules['warnings'] if module is None else module
+ self._entered = False
+
+ def __repr__(self):
+ args = []
+ if self._record:
+ args.append("record=True")
+ if self._module is not sys.modules['warnings']:
+ args.append("module=%r" % self._module)
+ name = type(self).__name__
+ return "%s(%s)" % (name, ", ".join(args))
+
+ def __enter__(self):
+ if self._entered:
+ raise RuntimeError("Cannot enter %r twice" % self)
+ self._entered = True
+ self._filters = self._module.filters
+ self._module.filters = self._filters[:]
+ self._showwarning = self._module.showwarning
+ if self._record:
+ log = []
+ def showwarning(*args, **kwargs):
+ log.append(WarningMessage(*args, **kwargs))
+ self._module.showwarning = showwarning
+ return log
+ else:
+ return None
+
+ def __exit__(self, *exc_info):
+ if not self._entered:
+ raise RuntimeError("Cannot exit %r without entering first" % self)
+ self._module.filters = self._filters
+ self._module.showwarning = self._showwarning
+
+class catch_all_warnings(catch_warnings):
+ "wrap which ensures all warnings are logged to buffer"
+ def __init__(self):
+ self.__super = super(catch_all_warnings,self)
+ self.__super.__init__(record=True)
+ def __enter__(self):
+ log = self.__super.__enter__()
+ warnings.filterwarnings("always")
+ return log
+
+#=========================================================
+#capture logging output
+#=========================================================
+
+class capture_logger(object):
+ "capture output of logger, returning StringIO buffer output is written to"
+
+ def __init__(self, name=""):
+ self.log = getLogger(name)
+
+ def __enter__(self):
+ #remove handlers but don't delete them (we'll restore later)
+ self.propagate = self.log.propagate
+ self.handlers = purge_handlers(self.log, close=False)
+
+ #create new handler
+ buffer = StringIO()
+ handler = logging.StreamHandler(buffer)
+ handler.formatter = logging.Formatter("%(name)s: %(levelname)s: %(message)s")
+ self.log.addHandler(handler)
+ self.log.propagate = False
+ return buffer
+
+ def __exit__(self, *exc_info):
+ #remove handler we added
+ purge_handlers(self.log)
+
+ #restore original list of handlers
+ self.log.handlers[:] = self.handlers
+ self.log.propagate = self.propagate
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/text/__init__.py b/bps/text/__init__.py
new file mode 100644
index 0000000..8b0d804
--- /dev/null
+++ b/bps/text/__init__.py
@@ -0,0 +1,1294 @@
+"""bps.text -- useful text manipulation funcs -- (c) Assurance Technologies 2003-2006
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import sys
+import os.path
+import re
+from warnings import warn
+import logging; log = logging.getLogger(__name__)
+#pkg
+from bps.types import BaseClass, namedtuple
+from bps.meta import abstractmethod, isstr
+#local
+__all__ = [
+ #misc string manipulation
+ 'condense', 'split_condense',
+ 'split_at',
+ 'asbool',
+
+ #inflection helpers
+ 'countof', 'oneof',
+ 'pluralize',
+ 'singularize',
+
+ #capitalization converters
+ 'lu_to_cc',
+
+ #aliased exports
+ "isstr",
+]
+
+#some constants used various places
+EMPTY = ""
+DOT = "."
+SPACE = " "
+WSCHARS = " \t\n\r"
+
+#=========================================================
+#cleaning up input strings
+#=========================================================
+_cleaner_re = re.compile("\s+")
+_re_cache = {} #cache of regexps we've compiled for various char sets
+
+def condense(value, chars=None):
+ """Strips leading & trailing whitespace, reduces internal whitespace chunks to a single space.
+
+ This function is an enhanced version of ``str.strip()``,
+ which takes care of reducing duplicate internal whitespace as well.
+ This is primarily useful when normalizing user input.
+
+ :arg value: the string to strip whitespace from
+ :param chars:
+ String containing characters that should be stripped.
+ Internal occurences of any combination of these characters
+ will be replaced with the first character in the string.
+ If not specified, this defaults to ``" \\t\\n\\r"``.
+
+ :returns: the resulting string
+
+ Some examples::
+
+ >>> from bps.text import condense
+ >>> #a simple example, removing whitespace from a provided name
+ >>> condense(" john smithson jr \\n the third \\t")
+ "john smithson jr the third"
+ >>> #a example showing a custom character set
+ >>> condense(" 123-465 -0000- 43526 ", " -")
+ "123 465 0000 43526"
+
+ """
+ global _re_cache, _cleaner_re
+
+ if value is None:
+ return None
+ elif chars:
+ #cache the regexp so we don't have to recompile it next time
+ pat = _re_cache.get(chars)
+ if pat is None:
+ pat = _re_cache[chars] = re.compile("[" + re.escape(chars) + "]+")
+ return pat.sub(chars[0], value.strip(chars))
+ else:
+ #common case is to strip all whitespace, equiv to chars=" \t\r\n"
+ return _cleaner_re.sub(SPACE,value.strip())
+
+def split_condense(value, sep=",", empty="keep", chars=None):
+ """Split string into list based on separator, and then run :func:`condense` on each element.
+
+ This function is commonly used when parsing user input,
+ and a list of values is provided in string form,
+ using a predefined separator. The string will be broken
+ along the separator, and then any whitespace removed from the elements.
+
+ :arg value: The string to split.
+ :param sep:
+ The separator to split by.
+ this can either be a single string,
+ or a list of strings, in which case
+ the all elements of the list will
+ be considered as possible separators.
+ :param empty:
+ The policy for empty elements.
+ By default, when the string has been split,
+ any empty elements will be returned (``empty="keep"``).
+ Frequently, the need arises to strip out
+ any empty elements (such as happens with a trailing separator).
+ To do this, set ``empty="strip"``.
+ :param chars:
+ This is the list of whitespace characters to strip.
+ See :func:`condense` for details of it's behavior.
+
+ :returns: a list of the resulting elements (may be empty)
+
+ Some examples::
+
+ >>> from bps.text import split_condense
+ >>> split_condense("a, b,c , d")
+ [ 'a', 'b', 'c', 'd' ]
+ >>> split_condense("123; ; 456 ; ", sep=";")
+ [ '123', '', '456', '' ]
+ >>> split_condense("123; 456 ; ", sep=";", empty="strip")
+ [ '123', '456' ]
+ """
+ assert empty in ("keep", "strip")
+ if value is None:
+ return [] #XXX: what None policy should we use? treat like ""?
+
+ #NOTE: it would be faster to do the condense first, and .strip() later,
+ # but for the case where sep chars are in the strip char list,
+ # separators are stripped out too early.
+
+ if isstr(sep):
+ result = value.split(sep)
+ else:
+ #seq of split characters, ala java's split method
+ result = [ value ]
+ for s in sep:
+ source = result
+ result = []
+ for v in source:
+ result.extend(v.split(s))
+ if empty == "keep":
+ return [ condense(value, chars) for value in result ]
+ else:
+ assert empty == "strip"
+ itr = ( condense(value, chars) for value in result )
+ def ff(elem):
+ return bool(elem)
+ return filter(ff, itr)
+
+def split_at(s, *indexes):
+ """split a sequence at an arbitrary number of specified pointers.
+
+ :arg s: string to split
+ :arg indexes: list of indexes to act as split points
+
+ Example Usage::
+ >>> from bps.text import split_at
+ >>> split_at('abc', 1)
+ [ 'a', 'bc' ]
+ >>> split_at('abcdef', 2, 5)
+ ['ab', 'cde', 'f']
+ """
+ count = len(indexes)
+ if count == 0:
+ return [ s ]
+ elif count == 1:
+ return [ s[0:index], s[index:] ]
+ else:
+ out = []
+ last = 0
+ for idx in indexes:
+ out.append(s[last:idx])
+ idx = last
+ out.append(s[last:])
+ return out
+
+#clean_string - could put all kinds of fancier things here
+# cleaning up names, detecting/cleaning up urls,
+# fixing typos, ???
+
+#=========================================================
+#clean_filename - sanitize those filthy external inputs :)
+#=========================================================
+_dos_path_re = re.compile(r"^[A-Z]+:\\.*?([^\\]*)$", re.I)
+_dosnet_path_re = re.compile(r"\\\\.*?([^\\]*)$", re.I)
+_posix_path_re = re.compile("^/.*?([^/]*)$")
+_badset = set([ EMPTY, SPACE, DOT, '..' ]) #filenames we simply don't allow, ever
+
+def _compile_safe_char(unsafe_chars, safe_char):
+ """compile safe_char description into a function which replaces safe chars.
+
+ this function first validates the safe_char input
+ so that no unsafe_chars are listed in safe char.
+ then, given the format of safe_char, it generates
+ a function of the form ``cleaner(unsafe_char) -> safe_replacement_char``.
+ """
+ #empty string would already work, None would just be too much
+## if safe_char is None:
+## #if safe char is None, we replace unsafe char 'c' with empty string
+## return lambda c: EMPTY
+ if isinstance(safe_char, str):
+ #if safe char is a str, use it as the replacement (assuming it's safe)
+ if safe_char and safe_char in unsafe_chars:
+ log.warning("safe_char %r is unsafe: %r", safe_char, unsafe_chars)
+ safe_char = EMPTY
+ return lambda c: safe_char
+ #this option is just too ornate to live...
+## elif isinstance(safe_char, (list, tuple)):
+## #if it's a list, assume it's made of (unsafe_list, replacement_char) elements,
+## #the initial element may be a single string, used as the default element.
+## if not safe_char:
+## return lambda c: EMPTY
+## if isinstance(safe_char[0], str):
+## default = safe_char[0]
+## safe_char = safe_char[1:]
+## if default in unsafe_chars:
+## log.warning("default safe_char %r is unsafe: %r", default, unsafe_chars)
+## default = EMPTY
+## else:
+## default = EMPTY
+## out = {}
+## for chars, alt in safe_char:
+## if alt and alt in unsafe_chars:
+## log.warning("safe_char %r for %r is unsafe: %r", alt, chars, unsafe_chars)
+## continue
+## for c in chars:
+## out[c] = alt
+## return lambda c: out.get(c, default)
+ #this option, while not as ornate, disabled until it's needed
+ elif isinstance(safe_char, dict):
+ #safe_char is a dict mapping unsafe chars to their replacement.
+ #the 'default' key (if present) is used for all unsafe chars not found in dict
+ if not safe_char:
+ return lambda c: EMPTY
+ default = safe_char.get("default", EMPTY)
+ if default and default in unsafe_chars:
+ log.warning("default safe_char %r is unsafe: %r", default, unsafe_chars)
+ default = EMPTY
+ out = {}
+ for chars, alt in safe_char.iteritems():
+ if chars == "default":
+ continue
+ if alt and alt in unsafe_chars:
+ log.warning("safe_char %r for %r is unsafe: %r", alt, chars, unsafe_chars)
+ continue
+ for c in chars:
+ out[c] = alt
+ return lambda c: out.get(c, default)
+ elif callable(safe_char):
+ #safe char is a callable which is used directly (with validation that unsafe chars can't slip through)
+ def wrapper(c):
+ alt = safe_char(c)
+ if alt in unsafe_chars:
+ log.warning("safe_char %r for %r is unsafe: %r", alt, c, unsafe_chars)
+ return EMPTY
+ return alt
+ return wrapper
+ else:
+ raise ValueError, "invalid safe_char: %r" % (safe_char,)
+
+def _compile_ext_list(default_filename, ext_list):
+ "normalize ext_list, integrating default's extension"
+ if ext_list is None:
+ return None
+ elif isinstance(ext_list, str):
+ #parse into list
+ ext_list = os.path.normcase(ext_list)
+ if ';' in ext_list:
+ ext_list = ext_list.split(";")
+ elif ':' in ext_list:
+ ext_list = ext_list.split(":")
+ else:
+ ext_list = [ext_list] #assume a single extension
+ elif isinstance(ext_list, (list, tuple)):
+ ext_list = [ os.path.normcase(elem) for elem in ext_list ]
+ else:
+ raise ValueError, "invalid ext_list: %r" % (ext_list,)
+ assert all(ext == EMPTY or ext.startswith(DOT) for ext in ext_list)
+ #put default_filename's ext at beginning, if it's valid at all.
+ if default_filename:
+ idx = default_filename.rfind(".", 1)
+ if idx > -1:
+ ext = os.path.normcase(default_filename[idx:])
+ if ext in ext_list:
+ if ext != ext_list[0]:
+ ext_list.remove(ext)
+ ext_list.insert(0, ext)
+ else:
+ log.info("default filename's extension not in ext_list: default=%r ext_list=%r",
+ default_filename, ext_list)
+ return ext_list
+
+class FileCleaner(BaseClass):
+ """Base class implementing routines for cleaning up a filename, used by :func:`clean_filename`.
+
+ Instances of this class will sanitize any filename passed into their ``clean()`` method,
+ according to the configuration of the instance.
+
+ New instances may be created from the class, or from existing instances using the ``copy()`` method.
+
+ See :func:`clean_filename` for the details on the various options:
+ each of it's keywords corresponds directly to an attribute in this class.
+ """
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ default_filename = None #default filename to use if the original filename has to be scrapped
+ ext_list = None #optional list of extensions which file is limited to,
+ #first one is used as the default
+
+ strip_dos_paths = True #remove dir-part of absolute dos paths if detected?
+ strip_posix_paths = True #remove dir-part of absolute posix paths if detected?
+
+ unsafe_chars = '\r\n\'"`~!#$&%^*|\\:;<>?/'
+ #list of characters considers "unsafe"
+ #the default set contains all the common chars which could be
+ #potentially dangerous, as they have special meaning in some OS.
+ safe_char = "_" #character/dict used to replace any unsafe chars found
+
+ allow_hidden = False #allow hidden files?
+
+ space_char = None #if defined, all spaces are replaced with this character
+
+ #get_safe_char - filled in by init
+ #=========================================================
+ #framework
+ #=========================================================
+ def __init__(self, _copy=None, **kwds):
+ if _copy:
+ self.__dict__.update(_copy.__dict__)
+ self.__dict__.update(kwds)
+ if not _copy or 'safe_char' in kwds or 'unsafe_chars' in kwds:
+ self.get_safe_char = _compile_safe_char(self.unsafe_chars or EMPTY, self.safe_char)
+ if not _copy or 'ext_list' in kwds or 'default_filename' in kwds:
+ self.ext_list = _compile_ext_list(self.default_filename, self.ext_list)
+ if not _copy or 'space_char' in kwds:
+ if self.space_char is None:
+ self.space_char = SPACE
+ if not _copy or 'default_filename' in kwds:
+ if self.default_filename:
+ self.default_filename = self.clean_extension(self.default_filename)
+
+ def copy(self, **kwds):
+ "make a (possibly mutated) copy"
+ return FileCleaner(_copy=self, **kwds)
+
+ def __call__(self, filename, **kwds):
+ "main frontend"
+ if kwds:
+ self = self.copy(**kwds)
+ return self.clean(filename)
+
+ #=========================================================
+ #cleaning
+ #=========================================================
+ def clean(self, filename):
+ "main frontend which is used to clean a filename"
+ if not filename:
+ return self.default_filename
+
+ #remove absolute paths
+ #NOTE: we strip these paths since it's common
+ #for them to be present in (for example) cgi form submissions,
+ #where they aren't even part of the intended name.
+ filename, path_type = self.strip_paths(filename)
+
+ #remove any unsafe characters
+ get_safe_char = self.get_safe_char
+ for c in self.unsafe_chars:
+ if c in filename:
+ alt = get_safe_char(c)
+ filename = filename.replace(c, alt)
+
+ #replace space_char with SPACE for condense()
+ space_char = self.space_char
+ if space_char not in (EMPTY, SPACE):
+ filename = filename.replace(space_char, SPACE)
+
+ #condense spaces
+ hidden = self.allow_hidden and filename.startswith(DOT)
+ filename = condense(filename).strip(" .")
+
+ #condense space around path elements
+ #FIXME: would like to this path stuff recursively, near strip_paths call
+ if path_type == "dos":
+ filename = "\\".join(elem.strip() for elem in filename.split("\\"))
+ while "\\\\" in filename:
+ filename = filename.replace("\\\\", "\\")
+ elif path_type == "posix":
+ filename = "/".join(elem.strip() for elem in filename.split("/"))
+ while "//" in filename:
+ filename = filename.replace("//", "/")
+
+ #replace SPACE with space_char
+ if space_char != SPACE:
+ filename = filename.replace(SPACE, space_char)
+
+ #restore hidden file status if it got stripped
+ if hidden and not filename.startswith(DOT):
+ filename = DOT + filename
+
+ #don't let forbidden names sneak through
+ if filename in _badset:
+ return self.default_filename
+
+ #allow only permitted extensions
+ return self.clean_extension(filename)
+
+ def strip_paths(self, filename):
+ "strip any path-like prefixes if asked, return (filename,detected)"
+ m = _dos_path_re.match(filename)
+ if m:
+ if self.strip_dos_paths:
+ filename = m.group(1)
+ return filename, None #return 'None' since path is no longer there
+ else:
+ return filename, "dos"
+ m = _dosnet_path_re.match(filename)
+ if m:
+ if self.strip_dos_paths:
+ filename = m.group(1)
+ return filename, None #return 'None' since path is no longer there
+ else:
+ return filename, "dos"
+ m = _posix_path_re.match(filename)
+ if m:
+ if self.strip_posix_paths:
+ filename = m.group(1)
+ return filename, None
+ else:
+ return filename, "posix"
+ return filename, None
+
+ def clean_extension(self, filename):
+ "make sure extension is valid, replacing with alternate if it isn't"
+ assert filename
+
+ #check if we have anything to do
+ ext_list = self.ext_list
+ if ext_list is None:
+ return filename
+
+ #check if extension is acceptable
+ idx = filename.rfind(".", 1)
+ if idx > -1:
+ ext = os.path.normcase(filename[idx:])
+ else:
+ idx = len(filename)
+ ext = ""
+ if ext in ext_list:
+ return filename
+
+ #return filename w/ default extension
+ if ext_list:
+ return filename[:idx] + ext_list[0]
+ else:
+ return filename[:idx]
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+def _init_cfn_presets():
+ "setup initial presets for clean_filename"
+ default = FileCleaner()
+ return {
+ "safe": default,
+ "clean": default.copy(
+ safe_char=SPACE,
+ ),
+ "minimal": default.copy(
+ unsafe_chars='\r\n\$%\\:;/',
+ ),
+ "paranoid": default.copy(
+ unsafe_chars=default.unsafe_chars + "\x00@()[]{},",
+ safe_char=EMPTY,
+ space_char="_",
+ ),
+ #this should work, just commented out 'til usecase is presented
+## "local_path": default.copy(
+## unsafe_chars=default.unsafe_chars.replace(os.path.sep, ""),
+## strip_dos_paths=(os.name != "nt"),
+## strip_posix_paths=(os.name == "nt"),
+## ),
+## "posix_path": default.copy(
+## unsafe_chars=default.unsafe_chars.replace("/", ""),
+## strip_posix_paths=False,
+## ),
+ "excel_sheet": default.copy(
+ unsafe_chars=default.unsafe_chars + "[]",
+ ),
+ }
+cfn_presets = _init_cfn_presets()
+
+def clean_filename(
+ #positional arguments
+ filename, default_filename=None, ext_list=None,
+
+ #kwd only arguments
+ preset="safe",
+ unsafe_chars=None,
+ safe_char=None,
+ space_char=None,
+ allow_hidden=None,
+ ):
+ """Sanitize the provided filename.
+
+ This function takes in a (unsanitized) filename, and does the following:
+ * unsafe characters (such as "&", ":", etc) are removed
+ * duplicate whitespace is condensed/stripped
+ * special files ("..", ".") are detected and removed entirely
+ * file extensions are restricted (optional)
+ * ... and more
+
+ Note that this does not operate on file *paths*, only the filename after the separator.
+ In particular, it is designed to *remove* any separators, such as when sanitizing
+ user-provided filenames in a webform.
+
+ This function is *highly* configurable, with a large number of options.
+ However, it is designed to be usable in it's default state, or
+ via one of the many presets (see next section). All arguments
+ except the filename itself are optional.
+
+ :type filename: str | None
+ :arg filename:
+ [required, can be positional]
+ String containing the potentially filename that should be sanitized.
+ ``None`` is treated the same as an empty string.
+
+ :type default_filename: str | None
+ :arg default_filename:
+ [can be positional]
+ String containing the default filename which should be used
+ if the provide filename has to be scrapped entirely.
+
+ This is ``None`` by default.
+
+ :type ext_list: str | seq of str | None
+ :arg ext_list:
+ [can be positional]
+ This specifies a list of the validate extensions (case-insensitive)
+ which will be allowed for the file. If not specified,
+ all extensions will be allowed. If the file's extension
+ is not in the list, the first extension in the list will be used.
+
+ All extensions must be specified with a leading dot,
+ except for the empty string, which is used to indicate 'no extension'.
+ This can be a string containing a single extension, a series of colon/semi-colon separated extensions,
+ or a list/tuple containing the extensions. Any inputs
+ will be converted to lower-case before use.
+
+ This is ``None`` by default.
+
+ :type preset: str
+ :param preset:
+ Specifies which preset to load.
+ A KeyError will be raised if an unknown preset is specified.
+
+ :type unsafe_chars: str
+ :param unsafe_chars:
+ This should be a list of characters to consider unsafe,
+ and replace with *safe_char*. If all should be considered safe,
+ use an empty string.
+
+ The default for this varies between presets.
+
+ :type safe_char: str
+ :param safe_char:
+ This should be a character that will be used in place of any unsafe characters.
+ To remove unsafe characters entirely, specify an empty string.
+ This can also be a callable, of the form ``safe_char(c) -> r``,
+ which will be called with an unsafe character *c*,
+ and should return a string *r* to replace it with.
+ This is useful, for example, to escape unsafe characters as hex codes, for recovery.
+
+ This defaults to an underscore.
+
+ :type space_char: str
+ :param space_char:
+ This will replace all spaces with the specified character.
+ By default, this is ``None``, which is the same as keeping
+ the normal space character.
+
+ :type allow_hidden: bool
+ :param allow_hidden:
+ If ``True``, hidden files (those beginning with ``.``) will be allowed.
+ Otherwise any leading dots will be stripped (this is the default behavior).
+
+ This function defines a number of preset configurations,
+ which can be selected via the ``preset`` keyword. Any additional
+ options which are specified will override those set by the preset,
+ which merely provides defaults.
+
+ The following presets are available:
+
+ ``safe``
+ This is the default preset, which attempts to keep
+ as much of the original filename's structure intact,
+ while preventing any unsafe characters from getting through.
+
+ To this end, it removes any characters known to be dangerous
+ under Windows, Posix, or MacOS.
+
+ Unsafe characters are replaced by an underscore.
+
+ ``clean``
+ This basically the same as the default preset,
+ except that unsafe characters are replaced with spaces.
+ The result looks much prettier, but the original structure
+ of the filename is not preserved, thus making it much harder
+ to tell someone was trying to pass in malicious paths
+ (hence why this is not the default preset)
+
+ ``paranoid``
+ This allows pretty much nothing besides alphanumeric characters,
+ removing any unsafe characters entirely, and replacing all spaces
+ with underscores.
+
+ ``minimal``
+ This leaves most characters alone, except for certain ones
+ which are almost guaranteed to cause problems under (at least one of)
+ windows or posix, in particular: ``\\/;:$%``.
+
+ ``excel_sheet``
+ A special preset designed to allow through only filenames
+ which are valid names for an Excel spreadsheet.
+
+ Some Usage Examples::
+
+ >>> from bps.text import clean_filename
+ >>> #the default preset is designed to preserve the original name,
+ >>> #to make detected hackers easier
+ >>> clean_filename("../../../usr/bin/rm -rf")
+ "_.._.._usr_bin_rm -rf"
+ >>> #but if you just want to get a good clean name...
+ >>> clean_filename("../../../usr/bin/rm -rf", preset="clean")
+ "usr bin rm -rf"
+ >>> #for those who want to feel _really_ safe
+ >>> clean_filename("../../../usr/bin/rm -rf &; wget http://hack.tgz", preset="paranoid")
+ "usrbinrm_-rf_wget_httphack.tgz"
+ """
+ #group back into kwds #NOTE: did it like this just for easy param reference in function
+ kwds = {}
+ if default_filename is not None:
+ kwds['default_filename'] = default_filename
+ if ext_list is not None:
+ kwds['ext_list'] = ext_list
+ if unsafe_chars is not None:
+ kwds['unsafe_chars'] = unsafe_chars
+ if safe_char is not None:
+ kwds['safe_char'] = safe_char
+ if space_char is not None:
+ kwds['space_char'] = space_char
+ if allow_hidden is not None:
+ kwds['allow_hidden'] = allow_hidden
+
+ #load & run cleaner
+ cleaner = cfn_presets[preset]
+ return cleaner(filename, **kwds)
+
+#=========================================================
+#displaying strings
+#=========================================================
+#TODO: ellipsize() -- like to do this intelligently
+#TODO: ellipsize_block() -- take from medicred.backend.utils, does multiline version
+#TODO: might want medicred.backend.utils:decimal_format
+
+#=========================================================
+#shell
+#=========================================================
+
+#=========================================================
+#html utilties
+#=========================================================
+def html_escape(data):
+ "helper function for escaping html strings"
+ instr = str(data)
+ out = ''
+ for c in instr:
+ val = ord(c)
+ if c == "<":
+ out += "&lt;"
+ elif c == ">":
+ out += "&gt;"
+ elif c == "&":
+ out += "&amp;"
+ elif c in ["\n","\t"]:
+ out += c
+ elif (val < 32 or val > 127):
+ out += "%%%02x" % val
+ else:
+ out += c
+ return out
+
+#=========================================================
+#boolean coercion
+#=========================================================
+basestr = str.__bases__[0]
+true_set = set([ 'true', 't', 'yes', 'y', 'on', '1', 'enable'])
+false_set = set([ 'false', 'f', 'no', 'n', 'off', '0', 'disable'])
+none_set = set([ 'none', 'null', '', 'noval', 'novalue' ])
+
+def asbool(obj, default=None):
+ """convert boolean string to boolean value.
+
+ If the input object is a string, it will be coerced
+ to one of ``True``, ``False``, or ``None`` based on preset recognized strings...
+ spaces & case are ignored.
+
+ If the input object is any other type, it is converted to
+ one of ``True`` or False`` via ``bool()``.
+
+ If the resulting value is ``None``, the default value will be
+ returned if specified. This allows asbool to chain the default
+ of other input source, with ``"none"`` and the like acting
+ as a "use the default" option.
+
+ :arg obj: the object to convert to boolean
+ :param default: the default value to return if ``obj`` evalutes to ``None``.
+ """
+ if isinstance(obj, basestr):
+ obj = obj.strip().lower()
+ if obj in true_set: return True
+ if obj in false_set: return False
+ if obj in none_set: return default
+ raise ValueError, "string is no a recognized boolean constant: %r" % (obj,)
+ elif obj is None:
+ return default
+ else:
+ return bool(obj)
+
+#=========================================================
+#inflector - inspired by RoR's inflector
+#=========================================================
+class Inflector(BaseClass):
+ "base inflector class, inspired by RoR's inflector, but not as complete"
+ #=========================================================
+ #subclass attrs
+ #=========================================================
+ uncountable_words = None #list of uncountable words
+ irregular_plurals = None #dict of irregular singular => plural pairs
+ irregular_indefinites = None #dict of words w/ irregular indefinite articles
+
+ plural_rules = None #list of (re-pat, re-sub) strings for pluralization
+ singular_rules = None #list of (re-pat, re-sub) strings for singularization
+ indefinite_rules = None #list of (re-pat, re-sub) strings for indefinite articles
+
+ #the following are (re)built by compile()
+ _uncountable_words = None #frozenset of uncountable_words
+ _irregular_singulars = None #reverse map of irregular_plurals
+ _plural_rules = None #list of re's from irregular_plurals & plural_rules
+ _singular_rules = None #list of re's from irregular_plurals & singular_rules
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, **kwds):
+ self.__super.__init__(**kwds)
+ self.compile()
+
+ #=========================================================
+ #registry
+ #=========================================================
+ def compile(self):
+ #build set of uncountables
+ self._uncountable_words = frozenset(self.uncountable_words)
+
+ #build reverse map for irregular_words
+ self._irregular_singulars = dict(
+ (v, k) for k, v in self.irregular_plurals.iteritems()
+ )
+
+ #compile plural rules
+ self._plural_rules = [
+ self.compile_rule(source)
+ for source in self.plural_rules
+ ]
+
+ #compile singular rules
+ self._singular_rules = [
+ self.compile_rule(source)
+ for source in self.singular_rules
+ ]
+
+ #compile indefinite rules
+ self._indefinite_rules = [
+ self.compile_rule(source)
+ for source in self.indefinite_rules
+ ]
+
+ def compile_rule(self, (pat, sub)):
+ pat = re.compile(pat, re.IGNORECASE)
+ return pat, sub
+
+ #=========================================================
+ #inflectors
+ #=========================================================
+ def _normalize_word(self, word):
+ if word.rstrip() != word:
+ raise ValueError, "trailing whitespace not supported"
+ word = word.lstrip().lower()
+ #strip out everything to left of rightmost separator
+ idx = max(word.rfind(sep) for sep in " _-")
+ if idx:
+ word = word[idx+1:]
+ return word
+
+ def is_uncountable(self, noun):
+ "test if noun is a known uncountable noun (eg, 'information')"
+ return self._normalize_word(noun) in self._uncountable_words
+
+ def countof(self, count, noun, zero="0"):
+ """Returns a string representation of a counted number of a given noun (eg "3 cows").
+
+ :param count: the number of *noun* objects
+ :param noun: a (countable) noun in singular form.
+ :key zero:
+ optional keyword to override text
+ when count is 0. for example,
+ ``countof(0,"goats", zero="no")`` would
+ display "no goats" instead of "0 goats".
+
+ :returns: an inflected string
+
+ Some usage examples::
+
+ >>> from bps.text import countof
+ >>> countof(3,"cow")
+ "3 cows"
+ >> countof(1,"larch tree")
+ "1 larch tree"
+ >> countof(0,"goats")
+ "0 goats"
+ >> countof(-1,"goats") + " (uhoh!)"
+ "-1 goats (uhoh!)"
+ """
+ if self.is_uncountable(noun):
+ warn("countof() called with known uncountable noun: %r" % (noun,))
+ if count == 0:
+ return "%s %s" % (zero, self.pluralize(noun))
+ elif count == 1:
+ #NOTE: we assume the singular form was provided
+ return "1 " + noun
+ else:
+ return "%s %s" % (count, self.pluralize(noun))
+
+ @abstractmethod
+ def ordinal(self, number, long=False):
+ """return ordinal form of number (1st, 2nd, etc).
+
+ :arg number: number to render
+ :param long: if true, returns 'first' instead of '1st'
+ """
+
+ def oneof(self, noun):
+ """returns indefinite article followed by noun (eg: "an allosaur").
+
+ :arg noun: the noun to add the article to.
+ :returns: noun with prepending article.
+
+ Some examples::
+
+ >>> from bps.text import oneof
+ >>> oneof("cow")
+ "a cow"
+ >>> oneof("allosaur")
+ "an allosaur"
+
+ .. note::
+ The english language a/an rules regarding the letter "h"
+ are a little hazy, and implemented according to what
+ "sounds right" to the BPS programmers.
+ """
+ if self.is_uncountable(noun):
+ warn("oneof() called with known uncountable noun: %r" % (noun,))
+ #we'll do this, but it doesn't make much sense, at least in english.
+ test = noun.lstrip().lower()
+ if not test:
+ return ''
+
+ #check for irregular indefinites, preserve case of first letter only
+ #NOTE: this is pretty english-specific, as it provides suffixes only
+ if test in self.irregular_indefinites:
+ return self.irregular_indefinites[test] + " " + noun
+
+ for pat, prefix in self._indefinite_rules:
+ match = pat.search(test)
+ if match:
+ return prefix + " " + noun
+
+ #if no rules matches, use the last prefix
+ warn("no rules matches oneof(): %r" % (noun,))
+ return self._indefinite_rules[-1][1] + " " + noun
+
+ def pluralize(self, word):
+ """Return plural form of singular noun.
+
+ Some examples::
+
+ >>> from bps.text import pluralize
+ >>> pluralize("cow")
+ "cows"
+ >>> pluralize("horse fly")
+ "horse flies"
+
+ .. note::
+ While it would be nice for this function to be idempotent,
+ so that ``pluralize("cows")`` returned ``"cows"``, the english
+ language rules are too complex for this to work in a context-free manner.
+ It may work for some words, but don't rely on it.
+ """
+ if not word:
+ return ''
+ test = self._normalize_word(word)
+
+ #check for uncountable words
+ if test in self._uncountable_words:
+ return word
+
+ #check for irregular plurals, preserve case of first letter only
+ if test in self.irregular_plurals:
+ return word[:-len(test)+1] + self.irregular_plurals[test][1:]
+
+ #apply normal plurality rules
+ for pat, sub in self._plural_rules:
+ match = pat.search(test)
+ if match:
+ groups = match.groups()
+ for k in xrange(len(groups)):
+ #remove any unmatched groups from pattern
+ if groups[k] is None:
+ sub = sub.replace('\\'+str(k+1), '')
+ return pat.sub(sub, word)
+
+ #assume it's plural
+ return word
+
+ def singularize(self, word):
+ """Return single form of plural noun.
+
+ Some examples::
+
+ >>> from bps.text import singularize
+ >>> singularize("cows")
+ "cow"
+ >>> singularize("horse flies")
+ "horse fly"
+
+ .. note::
+ While it would be nice for this function to be idempotent,
+ so that ``singularize("cow")`` returned ``"cow"``, the english
+ language rules are too complex for this to work in a context-free manner.
+ It may work for some words, but don't rely on it.
+ """
+ if not word:
+ return ''
+ test = self._normalize_word(word)
+
+ #check for uncountable words
+ if test in self._uncountable_words:
+ return word
+
+ #check for irregular singulars, preserve case of first letter only
+ if test in self.irregular_plurals:
+ return word
+ if test in self._irregular_singulars:
+ return word[:-len(test)+1] + self._irregular_singulars[test][1:]
+
+ #apply normal plurality rules
+ for pat, sub in self._singular_rules:
+ match = pat.search(test)
+ if match:
+ groups = match.groups()
+ for k in xrange(len(groups)):
+ #remove any unmatched groups from pattern
+ if groups[k] is None:
+ sub = sub.replace('\\'+str(k+1), '')
+ return pat.sub(sub, word)
+
+ #assume it's plural
+ return word
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+class EnglishInflector(Inflector):
+ #XXX: this information was gotten from a source
+ #which probably didn't do a very thourough job.
+ #should study http://www.csse.monash.edu.au/~damian/papers/HTML/Plurals.html
+ #for a replacement
+
+ uncountable_words = [
+ 'equipment', 'fish', 'information',
+ 'money', 'rice',
+ 'series', 'sheep', 'species',
+ 'pez',
+ ]
+
+ irregular_plurals = {
+ 'fez': 'fezzes',
+ 'child' : 'children',
+ 'goose': 'geese',
+ 'louse': 'lice',
+ 'mouse': 'mice',
+ 'move' : 'moves',
+ 'ox': 'oxen',
+ 'quiz': 'quizzes',
+ 'sex' : 'sexes',
+ 'woman': 'women',
+ 'fetus': 'fetuses',
+ 'loaf': 'loaves',
+ }
+
+ plural_rules = [
+ ['(person)$', 'people' ], #irregular root
+ ['(man)$', 'men' ], #irregular root
+ ['(matr|vert|ind)ix|ex$' , '\\1ices'],
+ ['(x|ch|ss|sh)$' , '\\1es'],
+## ['([^aeiouy]|qu)ies$' , '\\1y'], #does this belong here?
+ ['([^aeiouy]|qu)y$' , '\\1ies'],
+ ['(hive)$' , '\\1s'],
+ ['([^f])fe$', '\\1ves'],
+ ['([lr])f$', '\\1ves'],
+ ['sis$' , 'ses'],
+ ['([ti])um$' , '\\1a'],
+ ['(buffal|tomat)o$' , '\\1oes'],
+ ['(bu)s$' , '\\1ses'],
+ ['(alias|status)$' , '\\1es'],
+ ['(octop|vir)us$' , '\\1i'],
+ ['(ax|test)is$' , '\\1es'],
+ ['s$' , 's'],
+ ['$' , 's']
+ ]
+
+ singular_rules = [
+ ['(people)$', 'person' ], #irregular root
+ ['(men)$', 'man' ], #irregular root
+ ['(matr)ices$' , '\\1ix'],
+ ['(vert|ind)ices$' , '\\1ex'],
+ ['(alias|status)es$' , '\\1'],
+ ['([octop|vir])i$' , '\\1us'],
+ ['(cris|ax|test)es$' , '\\1is'],
+ ['(shoe)s$' , '\\1'],
+ ['(o)es$' , '\\1'],
+ ['(bus)es$' , '\\1'],
+ ['([ml])ice$' , '\\1ouse'],
+ ['(x|ch|ss|sh)es$' , '\\1'],
+ ['(m)ovies$' , '\\1ovie'],
+ ['(s)eries$' , '\\1eries'],
+ ['([^aeiouy]|qu)ies$' , '\\1y'],
+ ['([lr])ves$' , '\\1f'],
+ ['(tive)s$' , '\\1'],
+ ['(hive)s$' , '\\1'],
+ ['([^f])ves$' , '\\1fe'],
+ ['(^|\W)(analy)ses$', '\\1\\2sis'],
+ ['((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)ses$' , '\\1\\2sis'],
+ ['([ti])a$' , '\\1um'],
+ ['(n)ews$' , '\\1ews'],
+ ['s$' , ''],
+ ]
+
+ irregular_indefinites = {}
+ indefinite_rules = [
+ #general rule.. all vowels + 'h', unless the vowel/h is soft
+ ['^hour', 'an'], #soft H (the exception) uses an
+ ['^h', 'a'], #the basic h rule uses 'a'
+ ['^uni', 'a'], #soft vowel exceptions use 'a' not 'an'
+ ['^[aeiuo]', 'an'], #the basic vowel rule uses 'an'
+ ['.', 'a'], #the catchall for everything else uses 'a'
+ ]
+
+ _ord_map = {
+ 0: "th",
+ 1: "st", 2: "nd", 3: "rd",
+ 4: "th", 5: "th", 6: "th",
+ 7: "th", 8: "th", 9: "th",
+ }
+ def ordinal(self, number, long=False):
+ if number < 1:
+ raise ValueError, "ordinal numbers must be >= 1: %r" % (number,)
+ if long:
+ raise NotImplementedError, "should get to this someday"
+ return "%d%s" % (number, self._ord_map[number % 10])
+ ordinal.__doc__ = Inflector.ordinal.__doc__
+
+#---------------------------------------------------
+#build a default inflector for easy shortcuts
+#---------------------------------------------------
+#TODO: have a way to specify language etc
+
+default_inflector = EnglishInflector()
+
+def pluralize(word):
+ return default_inflector.pluralize(word)
+pluralize.__doc__ = default_inflector.pluralize.__doc__
+
+def singularize(word):
+ return default_inflector.singularize(word)
+singularize.__doc__ = default_inflector.singularize.__doc__
+
+def countof(count, noun, zero="0"):
+ return default_inflector.countof(count,noun,zero=zero)
+countof.__doc__ = default_inflector.countof.__doc__
+
+def oneof(noun):
+ return default_inflector.oneof(noun)
+oneof.__doc__ = default_inflector.oneof.__doc__
+
+def ordinal(noun):
+ return default_inflector.ordinal(noun)
+ordinal.__doc__ = default_inflector.ordinal.__doc__
+
+#=========================================================
+#other inflection-related stuff
+#=========================================================
+#TODO: this could be made MUCH more flexible,
+# much more reliable, etc.
+
+_lu_re = re.compile("(^|_)(.)")
+def lu_to_cc(value):
+ """convert variable lowercase w/ underscore (lu) -> camel case (cc).
+
+ :raises ValueError: when input is not in lu format.
+
+ for example::
+ >>> from bps.text import lu_to_cc
+ >>> lu_to_cc("my_variable_name")
+ "MyVariableName"
+ """
+ if value.lower() != value:
+ raise ValueError, "input value is not in LU format: %r" % (value,)
+ def rf(m):
+ return m.group(2).upper()
+ return _lu_re.sub(rf, value)
+
+#TODO: cc_to_lu()
+
+#FIXME: _really_ wish the re module has a lower-case and upper-case wildcard
+_cc_re = re.compile("(^|[a-z0-9])([A-Z])")
+def cc_to_lu(value):
+ #FIXME: this is a quick hack, probably fails in some cases.
+ def func(m):
+ a, b = m.group(1, 2)
+ if a:
+ return "%s_%s" % (a, b.lower())
+ else:
+ return b.lower()
+ return _cc_re.sub(func, value)
+
+#=========================================================
+#string format examination
+#=========================================================
+
+FormatElement = namedtuple('TemplateElement','text field spec conv')
+
+#----------------------------------------------------
+#bps has a pure-python implementation of PEP3101,
+#which should be used only if native isn't available.
+#
+#this imports the native or backport versions of "format"
+#and "Formatter" if possible.
+#
+#also, this defines a publically visible set of functions
+#for parsing fmt strings.
+#----------------------------------------------------
+if sys.version_info > (2, 6):
+ #running under >= py26, can use native support
+
+ from __builtin__ import format
+ from string import Formatter
+
+ def render_format(format_string, *a, **k):
+ return format_string.format(*a, **k)
+
+ def parse_fmt_string(format_string):
+ #TODO: support resolution of embedded templates?
+ for elem in format_string._formatter_parser():
+ yield FormatElement(*elem)
+
+ def parse_fmt_field(field_name):
+ return field_name._formatter_field_name_split()
+
+ def _get_field_head(field_name):
+ return field_name._formatter_field_name_split()[0]
+
+else:
+ #use pure-python implementation
+ from bps.text._string_format import format, Formatter, \
+ _parse_template, _parse_field_name, _formatter
+
+ def render_format(format_string, *args, **kwds):
+ return _formatter.format(format_string, *args, **kwds)
+
+ def parse_fmt_string(format_string):
+ #TODO: support resolution of embedded templates?
+ for elem in _parse_template(format_string):
+ yield FormatElement(*elem)
+
+ def parse_fmt_field(field_name):
+ #TODO: support resolution of embedded templates?
+ return _parse_field_name(field_name)
+
+ def _get_field_head(field_name):
+ return _parse_field_name(field_name)[0]
+
+render_format.__doc__ = """renders a format string.
+
+This uses the native string format method if available.
+"""
+
+parse_fmt_string.__doc__ = """iterates over the elements
+of a {} format template.
+
+Each element returned by the iterator will be a namedtuple of the form::
+
+ (text, field, spec, conv)
+
+Where the elements are defined as follows:
+
+ text
+ This will be a (possible empty) string
+ containing all the text which came before
+ the format directive.
+
+ field
+ This will be the name of the field, containing
+ any item or attribute accessors. This can
+ be parsed by :func:`parse_fmt_field`.
+ If the format string has trailing text,
+ the last element returned will have ``None`` for the field.
+
+ spec
+ The format specifier for the field,
+ suitable for passing into the :func:`format` function.
+ This is a (possibly empty) string.
+ If the format string has trailing text,
+ the last element returned will have ``None`` for the spec.
+
+ conv
+ Option conversion specifier ("r" or "s"),
+ ``None`` if not present.
+
+If any parsing errors occur, a :exc:`ValueError` will be raised.
+
+.. note::
+ This function is simply a wrapper for native implementation (if available),
+ but under Python 2.5 a pure-python implementation is provided.
+"""
+
+parse_fmt_field.__doc__ = """Parses field name as returned by :func:`parse_fmt_string`.
+
+The return value will be a tuple of ``(head, tail)``
+where ``head`` is the int / string of the template argument / key to start with,
+and ``tail`` is a list of ``(is_attr,value)`` tuples.
+
+.. note::
+ *value* may contain embedded template strings.
+"""
+
+#----------------------------------------------------
+#quick testing
+#----------------------------------------------------
+def _iter_fmt_fields(format_string):
+ stack = [format_string]
+ while stack:
+ fmt = stack.pop()
+ if not fmt:
+ continue
+ for elem in parse_fmt_string(fmt):
+ #XXX: detect and handle "{} {} {}" style fields
+ #XXX: should this honor & raise recursion error when >2 deep?
+ if elem.field:
+ head, tail = parse_fmt_field(elem.field)
+ yield head
+ for attr, name in tail:
+ if name:
+ stack.append(name) #for nested fields
+ if elem.spec:
+ stack.append(elem.spec) #for nested fields
+
+def fmt_has_field(format_string, key):
+ "check if string references specified field name"
+ return any(key == elem for elem in _iter_fmt_fields(format_string))
+
+def get_fmt_fields(format_string):
+ "return set of position arguments and keywords referenced in format string"
+ return set(_iter_fmt_fields(format_string))
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/text/_string_format.py b/bps/text/_string_format.py
new file mode 100644
index 0000000..eb57b34
--- /dev/null
+++ b/bps/text/_string_format.py
@@ -0,0 +1,742 @@
+"""
+This is a cobbled together implemenation of PEP3101, for pre-2.6 python.
+it should *not* be used for 2.6 and on.
+
+It implements a Formatter lifted directly from Python 2.6.1's string.Formatter,
+but with custom pure-python parsers replacing the CPython parsing functions.
+
+It's format() implementation uses code taken from the "historical purposes only"
+implementation stored in PEP3101's sandbox. This could probably use a cleanup.
+
+This module shouldn't be used directly,
+any applicable methods will be imported into bps.text.
+
+.. note::
+ This also contains some parsing code which
+ should be reimplemented so that they use
+ native python implementations when possible.
+"""
+#=========================================================
+#imports
+#=========================================================
+from logging import getLogger
+from array import array
+log = getLogger(__name__)
+import re
+from decimal import Decimal
+
+from math import log as logarithm
+try:
+ import locale
+except:
+ locale = None
+try:
+ import fpformat
+except:
+ fpformat = None
+
+__all__ = [
+ #backported funcs
+ 'format', #equivalent to python builtin
+ 'Formatter', #work-alike for 2.6's string.Formatter
+ ##NOTE: importing bps.text.patch_format will add format method to str & unicode
+
+ #format parsing helpers
+]
+
+from bps.numeric import int_to_base
+
+#=========================================================
+#constants
+#=========================================================
+class FormatError(ValueError):
+ "base exception for Formatter errors"
+ pass
+
+def EmptyFieldAttr():
+ return FormatError("Empty attribute in format string")
+
+def AltFormError(spec_type):
+ return FormatError("Alternate form (#) not allowed in %s format specifier" % spec_type)
+
+def UnknownFormatCodeError(code, value):
+ "helper for raising error when ctype is unknown"
+ return FormatError("Unknown format code %r for object of type %r" % (code, type(value)))
+
+def InvalidFormatSpecError():
+ "helper for raising error when spec malformed"
+ return FormatError("Invalid conversion specification")
+
+def UnexpectedFieldChar(field_name, ch):
+ return FormatError, "unexpected %r character in field %r" % (ch, field_name)
+## raise FormatError, "unexpected character in field %s" % (field_name,)
+
+#=========================================================
+#field formatting - taken from StringFormat.py example in PEP3101,
+# then recoded to pass the python 2.6.2 unitests
+#=========================================================
+
+def format(value, spec=''):
+ """pure-python implementation of python 2.6's builtin format() function"""
+ log.debug("format(%r,%r)...", value, spec)
+
+ #class-defined formatter
+ if hasattr(value, "__format__"):
+ result = value.__format__(spec)
+ else:
+ result = object_format(value, spec)
+
+ #validate
+ log.debug("... format=%r", result)
+ if not isinstance(result, (str, unicode)):
+ raise TypeError, "%s.__format__ must return string or unicode, not %r" % (type(value), type(result),)
+ return result
+
+def object_format(value, spec):
+ "helper for unitests, equivalent of object.__format__"
+
+ #TODO: add support for python's datetime, date objects
+
+ #int formatters
+ if isinstance(value, (int, long)):
+ return int_format(value, spec)
+ elif isinstance(value, bool):
+ return bool_format(value, spec)
+
+ #float formatters
+ elif isinstance(value, float):
+ return float_format(value, spec)
+ elif isinstance(value, Decimal):
+ return decimal_format(value, spec)
+
+ #string & fallback formatters
+ elif isinstance(value, (str, unicode)):
+ return string_format(value, spec)
+ else:
+ return other_format(value, spec)
+
+def other_format(value, spec):
+ return string_format(str(value), spec)
+
+#=========================================================
+#string formatting
+#=========================================================
+
+def string_format(value, spec):
+ fill, align, sign_fmt, alt, zero, width, prec, ctype = _parse_std_spec(spec)
+ if ctype is None:
+ ctype = 's'
+ elif ctype != 's':
+ raise UnknownFormatCodeError(ctype, value)
+ if zero and fill is None:
+ fill = '0'
+ if sign_fmt:
+ raise FormatError, "Sign not allowed in string format specifier"
+ if alt:
+ raise AltFormError("string")
+ if align is None:
+ align = "<"
+ elif align == "=":
+ raise ValueError, "'=' alignment not allowed in string format specifier"
+ if prec is not None:
+ #clip string (not documented, but py2.6 does it)
+ value = value[:prec]
+ return _pad_output('', value, fill, align, width)
+
+#=========================================================
+#int/long/bool formatting
+#=========================================================
+def bool_format(value, spec):
+ fill, align, sign_fmt, alt, zero, width, prec, ctype = _parse_std_spec(spec)
+ if ctype is None:
+ return other_format(value, spec)
+ elif ctype in 'bcdoxXn':
+ value = 1 if value else 0
+ return int_format(value, spec)
+ elif ctype in 'eEfFgGn%':
+ value = 1.0 if value else 0.0
+ return float_format(value, spec)
+ else:
+ raise UnknownFormatCodeError(ctype, value)
+
+def int_format(value, spec):
+ fill, align, sign_fmt, alt, zero, width, prec, ctype = _parse_std_spec(spec)
+ if ctype is None:
+ ctype = 'd'
+ elif ctype not in 'bcdoxXn':
+ raise UnknownFormatCodeError(ctype, value)
+ if align is None:
+ align = "="
+ if zero and fill is None:
+ #FIXME: when 'alt' is enabled,
+ # fill, align, and zero interact in a weird way,
+ # not quite like a default fill
+ fill = '0'
+ sign, value = split_sign(value)
+ prefix = _get_sign_char(sign, sign_fmt)
+ if ctype == 'b':
+ result = int_to_base(value, 2)
+ if alt:
+ prefix += '0b'
+ elif ctype == 'c':
+ result = chr(value)
+ elif ctype == 'd':
+ result = '%d' % (value,)
+ elif ctype == 'o':
+ result = "%o" % (value,)
+ if alt:
+ prefix += '0o'
+ elif ctype == 'x':
+ result = "%x" % (value,)
+ if alt:
+ prefix += '0x'
+ elif ctype == 'X':
+ result = "%X" % (value,)
+ if alt:
+ prefix += '0X'
+ elif ctype == 'n':
+ if locale:
+ result = locale.format("%d", value)
+ else:
+ result = "%d" % (value,)
+ else:
+ raise AssertionError, "shouldn't be here"
+ return _pad_output(prefix, result, fill, align, width)
+
+#=========================================================
+#float / decimal formatting
+#=========================================================
+def decimal_format(value, spec):
+ return float_format(value, spec)
+
+def float_format(value, spec):
+ fill, align, sign_fmt, alt, zero, width, prec, ctype = _parse_std_spec(spec)
+ if ctype is None:
+ ctype = 'g'
+ elif ctype not in 'eEfFgGn%':
+ raise UnknownFormatCodeError(ctype, value)
+ if zero and fill is None:
+ fill = '0'
+ if align is None:
+ align = "="
+ if alt:
+ raise AltFormError("string")
+ sign, value = split_sign(value)
+ prefix = _get_sign_char(sign, sign_fmt)
+ if ctype == '%':
+ ctype = 'f'
+ value = value*100.0
+ elif ctype == 'n':
+ ctype = 'g' #FIXME: this doesn't _quite_ do the same thing
+ if ctype == 'g' or ctype == 'G':
+ p = prec
+ if p is None:
+ result = str(value)
+ else:
+ tu = (ctype == 'G')
+ if value < 10**-p or value > 10**p:
+ ctype = 'e'
+ else:
+ ctype = 'f'
+ if tu:
+ ctype = ctype.upper()
+ if ctype == 'e' or ctype == 'E':
+ if prec is None:
+ result = ('%' + ctype) % (value,)
+ else:
+ result = ("%." + str(prec) + ctype) % (value,) #to_sci
+ elif ctype == 'f' or ctype == 'F':
+ if prec is None:
+ result = str(value)
+ if ctype == 'F':
+ result = result.upper()
+ else:
+ result = ("%." + str(prec) + ctype) % (value,) #to_fix
+## else:
+## raise AssertionError, "shouldn't be here"
+ return _pad_output(prefix, result, fill, align, width)
+
+#=========================================================
+#format helpers
+#=========================================================
+def split_sign(val):
+ "split number into sign char and positive value"
+ if val < 0:
+ return '-', -val
+ return '+', val
+
+def _get_sign_char(sign, sign_fmt):
+ "return correct prefix"
+ if sign == '-':
+ return sign
+ elif sign == '+' and sign_fmt and sign_fmt in '+ ':
+ return sign_fmt
+ return ''
+
+def _pad_output(prefix, result, fill, align, width):
+ "helper for padding & aligning result"
+ if width is not None:
+ padding = width - len(result) - len(prefix)
+ if padding > 0:
+ if fill is None:
+ fill = ' ' #pick a default fillchar
+ if align is None:
+ align = ">" #pick a default align
+ if align == '>':
+ return fill * padding + prefix + result
+ elif align == "^":
+ left = padding//2
+ right = padding-left
+ return fill * left + prefix + result + fill * right
+ elif align == '=':
+ return prefix + fill * padding + result
+ else:
+ assert align == '<'
+ return prefix + result + fill * padding
+ return prefix + result
+
+def _parse_std_spec(spec):
+ """parse python's standard format specifier.
+
+ described at
+ http://docs.python.org/library/string.html#format-specification-mini-language
+ the grammar is::
+ format_spec ::= [[fill]align][sign][#][0][width][.precision][type]
+ fill ::= <a character other than '}'>
+ align ::= "<" | ">" | "=" | "^"
+ sign ::= "+" | "-" | " "
+ width ::= integer
+ precision ::= integer
+ type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" |
+ "n" | "o" | "x" | "X" | "%"
+
+ this function returns a tuple of:
+ (fill, align, sign, alt, zero, width, prec, type)
+
+ any unspecified values are set to ``None``.
+ 'alt' indicated the presence of the hash mark (#)
+ """
+ fill_char = None
+ align = None
+ sign = None
+ alt = None
+ zero = None
+ width = None
+ precision = None
+ ctype = None
+
+ spec_len = len(spec)
+
+ # If the second char is an alignment token,
+ # then parse the fill char
+ if spec_len >=2 and spec[ 1 ] in '<>=^':
+ fill_char = spec[ 0 ]
+ align = spec[ 1 ]
+ index = 2
+ # Otherwise, parse the alignment token
+ elif spec_len >= 1 and spec[ 0 ] in '<>=^':
+ align = spec[ 0 ]
+ index = 1
+ else:
+ index = 0
+
+ # Parse the sign char
+ if index < spec_len and spec[ index ] in ' +-':
+ sign = spec[ index ]
+ index += 1
+
+ # The special case for '#' (only used for integers)
+ if index < spec_len and spec[index] == '#':
+ alt = True
+ index += 1
+
+ # The special case for 0-padding
+ if index < spec_len and spec [ index ] == '0':
+ zero = True
+ #NOTE: strings treat this like a fill_char='0',
+ #but ints treat this slightly differently, IF # is enabled.
+ index += 1
+
+ # Parse field width
+ saveindex = index
+ while index < spec_len and spec[index].isdigit():
+ index += 1
+
+ if index > saveindex:
+ width = int(spec[saveindex : index])
+
+ # Parse field precision
+ if index < spec_len and spec[index] == '.':
+ index += 1
+ saveindex = index
+ while index < spec_len and spec[index].isdigit():
+ index += 1
+ if index > saveindex:
+ precision = int(spec[saveindex:index])
+
+ # Finally, parse the type field
+ if index < spec_len:
+ if index < spec_len-1:
+ raise InvalidFormatSpecError()
+ ctype = spec[index]
+
+ log.debug("_parse_std_spec(%r) => fill=%r align=%r sign=%r alt=%r zero=%r width=%r prec=%r type=%r",
+ spec, fill_char, align, sign, alt, zero, width, precision, ctype)
+ return fill_char, align, sign, alt, zero, width, precision, ctype
+
+#---------------------------------------------------
+#helpers for numeric formatting
+#---------------------------------------------------
+
+##if fpformat:
+## to_sci = fpformat.sci
+## to_fix = fpformat.fix
+##else:
+## def to_sci(val,precision):
+## """Pure python implementation of the C printf 'e' format specificer"""
+## # Split into sign and magnitude (not really needed for formatting
+## # since we already did this part. Mainly here in case 'sci'
+## # ever gets split out as an independent function.)
+## sign = ''
+## if val < 0:
+## sign = '-'
+## val = -val
+##
+## # Calculate the exponent
+## exp = int(floor(logarithm(val,10)))
+##
+## # Normalize the value
+## val *= 10**-exp
+##
+## # If the value is exactly an integer, then we don't want to
+## # print *any* decimal digits, regardless of precision
+## if val == floor(val):
+## val = int(val)
+## else:
+## # Otherwise, round it based on precision
+## val = round(val,precision)
+## # The rounding operation might have increased the
+## # number to where it is no longer normalized, if so
+## # then adjust the exponent.
+## if val >= 10.0:
+## exp += 1
+## val = val * 0.1
+##
+## # Convert the exponent to a string using only str().
+## # The existing C printf always prints at least 2 digits.
+## esign = '+'
+## if exp < 0:
+## exp = -exp
+## esign = '-'
+## if exp < 10: exp = '0' + str(exp)
+## else: exp = str(exp)
+##
+## # The final result
+## return sign + str(val) + 'e' + esign + exp
+##
+## def to_fix(value, precision):
+## #FIXME: implement this!
+## return str(value)
+
+#=========================================================
+#template formatting
+#=========================================================
+class Formatter(object):
+ """Formatter, taken directly from python 2.6.1.
+ the only change is that the CPython hooks have been replaced
+ by python code"""
+
+ def format(self, format_string, *args, **kwargs):
+ return self.vformat(format_string, args, kwargs)
+
+ def vformat(self, format_string, args, kwargs):
+ used_args = set()
+ result = self._vformat(format_string, args, kwargs, used_args, 2)
+ self.check_unused_args(used_args, args, kwargs)
+ return result
+
+ def _vformat(self, format_string, args, kwargs, used_args, recursion_depth):
+ if not format_string: #accelerate the simple cases
+ if isinstance(format_string, unicode):
+ return u''
+ else:
+ return ''
+ if recursion_depth <= 0:
+ raise ValueError('Max string recursion exceeded')
+ result = []
+ for literal_text, field_name, format_spec, conversion in self.parse(format_string):
+
+ # output the literal text
+ if literal_text:
+ result.append(literal_text)
+
+ # if there's a field, output it
+ if field_name is not None:
+ # this is some markup, find the object and do
+ # the formatting
+
+ # given the field_name, find the object it references
+ # and the argument it came from
+ obj, arg_used = self.get_field(field_name, args, kwargs)
+ used_args.add(arg_used)
+
+ # do any conversion on the resulting object
+ obj = self.convert_field(obj, conversion)
+
+ # expand the format spec, if needed
+ format_spec = self._vformat(format_spec, args, kwargs,
+ used_args, recursion_depth-1)
+ # format the object and append to the result
+ text = self.format_field(obj, format_spec)
+ if not isinstance(format_string, unicode) and isinstance(text, unicode):
+ text = str(text)
+ result.append(text)
+
+ return ''.join(result)
+
+ def get_value(self, key, args, kwargs):
+ if isinstance(key, (int, long)):
+ return args[key]
+ else:
+ return kwargs[key]
+
+ def check_unused_args(self, used_args, args, kwargs):
+ pass
+
+ def format_field(self, value, format_spec):
+ return format(value, format_spec)
+
+ def convert_field(self, value, conversion):
+ # do any conversion on the resulting object
+ if conversion == 'r':
+ return repr(value)
+ elif conversion == 's':
+ return str(value)
+ elif conversion is None:
+ return value
+ raise ValueError("Unknown converion specifier %s" % (conversion,))
+
+ def parse(self, format_string):
+ return _parse_template(format_string)
+
+ def get_field(self, field_name, args, kwargs):
+ first, rest = _parse_field_name(field_name)
+
+ obj = self.get_value(first, args, kwargs)
+
+ # loop through the rest of the field_name, doing
+ # getattr or getitem as needed
+ if rest: #save us creating a empty iterable
+ for is_attr, i in rest:
+ if is_attr:
+ obj = getattr(obj, i)
+ else:
+ obj = obj[i]
+
+ return obj, first
+
+#---------------------------------------------------
+#pure-python replacement parsers
+#---------------------------------------------------
+def _parse_template(format_string):
+ "parse template into chunks of (literal_text, field_name, format_spec, conversion)"
+ buffer, render = _make_array_like(format_string)
+ if isinstance(format_string, unicode):
+ umap = unicode
+ else:
+ umap = lambda x: x
+ state = 0
+ depth = 0 #used by spec
+ head = None
+ field_name = None
+ conversion = None
+ format_spec = None
+ for ch in format_string:
+ #
+ #text parsing states
+ #
+ if state == 0: #just reading text into buffer
+ if ch == "{":
+ state = 1
+ elif ch == "}":
+ state = 2
+ else:
+ buffer.append(ch)
+
+ elif state == 1: #just saw a single "{"
+ if ch == "{": #unescape the "{{"
+ buffer.append(umap("{"))
+ state = 0
+ elif ch == "}": # "{}" isn't allowed
+ raise FormatError, "empty field specifier in %r" % (format_string,)
+ elif ch == "!":
+ raise FormatError, "end of format while looking for conversion specifier"
+ elif ch == ":":
+ raise FormatError, "zero length field name in format"
+ else: #assume we're looking at a field name
+ head = render()
+ del buffer[:]
+ buffer.append(ch)
+ state = 3
+
+ elif state == 2: #just saw a single "}"
+ if ch == "}": #unescape the "}}"
+ buffer.append(umap("}"))
+ state = 0
+ else:
+ raise FormatError, "unmatched closing brace in %r" % (format_string,)
+
+ #
+ #field parsing states
+ #
+ elif state == 3: #parsing field name, at least 1 char in buffer, head is set
+ if ch == "}": #field is entirely over
+ if depth == 0: #end field
+ yield head, render(), None, None
+ del buffer[:]
+ state = 0
+ else: #end a nested {}
+ depth -= 1
+ buffer.append(ch)
+ elif ch == "{": #start a nested {}
+ depth += 1
+ buffer.append(ch)
+ elif ch == "!": #begin conversion section
+ field_name = render()
+ del buffer[:]
+ state = 4
+ elif ch == ":": #begin spec
+ field_name = render()
+ conversion = None #skipping this field
+ del buffer[:]
+ depth = 0
+ state = 5
+ else: #assume it's part of field name
+ buffer.append(ch)
+
+ elif state == 4: #parsing conversion section, head & field_name are set
+ if ch == ":": #end conversion, begin spec
+ conversion = render()
+ del buffer[:]
+ depth = 0
+ state = 5
+ elif ch == "}": #end field
+ yield head, field_name, None, render()
+ del buffer[:]
+ state = 0
+ else: #add a char
+ buffer.append(ch)
+
+ elif state == 5: #parsing spec, head & field_name & conversion are set
+ if ch == "}":
+ if depth == 0: #end field
+ yield head, field_name, render(), conversion
+ del buffer[:]
+ state = 0
+ else: #end a nested {}
+ depth -= 1
+ buffer.append(ch)
+ elif ch == "{": #start a nested {}
+ depth += 1
+ buffer.append(ch)
+ else: #add a char
+ buffer.append(ch)
+
+ #end parser loop
+ if state == 0: #general text
+ head = render()
+ if head:
+ yield head, None, None, None
+ elif state == 1: #"{"
+ raise FormatError, "unmatched open brace at end of %r" % (format_string,)
+ elif state == 2: #"}"
+ raise FormatError, "unmatched close brace at end of %r" % (format_string,)
+ else: #states 3-5
+ raise FormatError, "unfinished field at end of %r" % (format_string,)
+
+def _parse_field_name(field_name):
+ "parse field name in head, (is_attr,value)*"
+ offset = field_name.find(".") #look for ATTR
+ if offset == -1:
+ offset = field_name.find("[") #look for IDX
+ if offset == -1: #neither were found, we have plain field name
+ if field_name.isdigit():
+ field_name = int(field_name)
+ return field_name, []
+ else: #found ATTR, check for closer IDX
+ alt = field_name.find("[", 0, offset)
+ if alt != -1:
+ assert alt < offset
+ offset = alt
+ head = field_name[:offset]
+ if head.isdigit():
+ head = int(head)
+ return head, _parse_field_tail(field_name, offset)
+
+def _parse_field_tail(field_name, offset):
+ "helper for _parse_field_name"
+ state = 0
+ buffer, render = _make_array_like(field_name)
+ for ch in field_name[offset:]:
+ if state == 0: #expecting either ATTR or IDX
+ assert not buffer, "buffer should be empty"
+ if ch == '.': #start parsing ATTR
+ state = 1
+ elif ch == '[': #start parsing IDX
+ state = 2
+ else:
+ raise UnexpectedFieldChar(field_name, ch) #not sure how to get here
+ elif state == 1: #parsing ATTR
+ if ch == '.': #flush, start parsing new ATTR
+ x = render()
+ if not x:
+ raise EmptyFieldAttr()
+ yield True, x
+ del buffer[:]
+ elif ch == '[': #flush, start parsing IDX
+ x = render()
+ if not x:
+ raise EmptyFieldAttr()
+ yield True, x
+ del buffer[:]
+ state = 2
+ else:
+ buffer.append(ch)
+
+ else: #parsing IDX
+ if ch == ']': #flush, return to state 0
+ text = render()
+ if text.isdigit():
+ text = int(text)
+ yield False, text
+ del buffer[:]
+ state = 0
+ else:
+ buffer.append(ch)
+ if state == 0: #nothing to do
+ assert not buffer, "buffer should be empty"
+ elif state == 1: #flush last attr
+ x = render()
+ if not x:
+ raise EmptyFieldAttr()
+ yield True, x
+ else:
+ raise FormatError, "unmatched open bracket in field %r" % (field_name,)
+
+def _make_array_like(source):
+ "Use unicode array if the original string is unicode, else use string array"
+ if isinstance(source, unicode):
+ arr = array('u')
+ return arr, arr.tounicode
+ else:
+ arr = array('c')
+ return arr, arr.tostring
+
+#=========================================================
+#helper functions
+#=========================================================
+_formatter = Formatter() #the default formatter used by this module
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/text/patch_format.py b/bps/text/patch_format.py
new file mode 100644
index 0000000..4e95ee2
--- /dev/null
+++ b/bps/text/patch_format.py
@@ -0,0 +1,97 @@
+"""bps.text.patch_format -- automatically patches python 2.5 to support format()"""
+#=========================================================
+#patch python
+#=========================================================
+if not hasattr('', 'format'): #py26 at on will have proper implemenation
+ from bps.text._string_format import format, _formatter
+
+ __builtins__['format'] = format
+
+ def patch_string_types():
+ """insert a format() method to the builtin str & unicode types.
+
+ credit the following blog for this bit of ctypes evil :)
+ http://comex.wordpress.com/2009/01/19/how-to-add-methods-to-a-python-built-in-type/
+
+ =======================
+ WARNING WARNING WARNING
+ =======================
+ Seriously, monkeypatching the builtin types just shouldn't be done.
+ if you came this far, and want to use this code yourself,
+ PLEASE choose another route. If such behavior became prevalent,
+ the core types would become unpredictable in all kinds of ways,
+ and down that road lies madness. The only reason we *barely* have
+ an excuse in this case:
+ * It's backward compatible with python 2.5,
+ there was no format method on str or anything else to conflict with.
+ * It's a Python 2.6 feature, so nothing is being done to the namespace
+ of str or unicode which GvR didn't approve... He just didn't
+ approve it for Python 2.5 :)
+ * BPS tries to provide a reasonably faithful replica of Python 2.6's format method.
+ Any deviations from it will be patched as soon as they are found,
+ so 2.5 users should not come to rely on non-standard behavior.
+ * If the code you're patching in does not satisfy ALL of the above conditions,
+ please don't monkeypatch the builtin types!
+ * Though if you do get this far, we'll be happy to rework this code
+ so you can use BPS to handle the grunt work, so there's only
+ one implementation of the ctypes patcher floating around.
+
+ .. todo::
+ If this gets deployed under Jython or something else,
+ we'll need to use a different patching strategy.
+ """
+ from ctypes import pythonapi, Structure, c_long, c_char_p, POINTER, py_object
+
+ class py_type(Structure):
+ _fields_ = [
+ # 1, type, zero,
+ ('ob_refcnt', c_long),
+ ('ob_type', POINTER(c_long)), # could be different
+ ('ob_size', c_long), # size
+ ('name', c_char_p),
+ ('tp_basicsize', c_long),
+ ('tp_itemsize', c_long),
+ ('tp_dealloc', POINTER(c_long)),
+ ('tp_print', POINTER(c_long)),
+ ('tp_getattr', POINTER(c_long)),
+ ('tp_getattr', POINTER(c_long)),
+ ('tp_compare', POINTER(c_long)),
+ ('tp_repr', POINTER(c_long)),
+ ('tp_as_number', POINTER(c_long)),
+ ('tp_as_sequence', POINTER(c_long)),
+ ('tp_as_mapping', POINTER(c_long)),
+ ('tp_hash', POINTER(c_long)),
+ ('tp_call', POINTER(c_long)),
+ ('tp_str', POINTER(c_long)),
+ ('getattrofunc', POINTER(c_long)),
+ ('setattrofunc', POINTER(c_long)),
+ ('tp_as_buffer', POINTER(c_long)),
+ ('tp_flags', c_long),
+ ('tp_doc', c_char_p),
+ ('tp_traverse', POINTER(c_long)),
+ ('tp_clear', POINTER(c_long)),
+ ('tp_richcompare', POINTER(c_long)),
+ ('tp_weaklistoffset', POINTER(c_long)),
+ ('tp_iter', POINTER(c_long)),
+ ('tp_iternext', POINTER(c_long)),
+ ('tp_methods', POINTER(c_long)),
+ ('tp_members', POINTER(c_long)),
+ ('tp_getset', POINTER(c_long)),
+ ('tp_base', POINTER(c_long)),
+ ('tp_dict', py_object)
+
+ ]
+
+ render = _formatter.format
+ def wrapper(self, *args, **kwds):
+ """pure-python implementation of python 2.6's str.format() method, provided by BPS"""
+ return render(self, *args, **kwds)
+ wrapper.__name__ = "format"
+
+ po = py_type.in_dll(pythonapi, "PyString_Type")
+ po.tp_dict['format'] = wrapper
+
+ po = py_type.in_dll(pythonapi, "PyUnicode_Type")
+ po.tp_dict['format'] = wrapper
+
+ patch_string_types()
diff --git a/bps/types.py b/bps/types.py
new file mode 100644
index 0000000..3c8c48c
--- /dev/null
+++ b/bps/types.py
@@ -0,0 +1,732 @@
+"""bps.types -- helper datatypes
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import sys
+import time
+from logging import getLogger; log = getLogger(__name__)
+#pkg
+from bps.meta import instrument_super, is_str
+from bps.undef import Undef
+from bps.warndep import deprecated_method, relocated_function
+#local
+__all__ = [
+ 'BaseMetaClass', 'BaseClass',
+ 'stub',
+ 'namedtuple',
+ 'CustomDict', 'OrderedDict',
+]
+
+#provide a 'bytes' alias for 2.5 (2.6 already has one)
+try:
+ bytes
+except NameError:
+ bytes = str
+
+#=========================================================
+#BaseClass
+#=========================================================
+class BaseMetaClass(type):
+ """meta class which provides some useful class behaviors.
+ see `BaseClass` for details.
+ """
+ def __init__(self, name, bases, kwds):
+ #init parent stuff
+ type.__init__(self,name,bases, kwds)
+
+ #fill in __super descriptor
+ instrument_super(self)
+
+ #call __initclass__ if defined
+ if '__initclass__' in kwds:
+ kwds['__initclass__'](self)
+
+ #does this class defined __initsubclass__?
+ if '__initsubclass__' in kwds:
+ #make sure it's stored as a classmethod
+ value = kwds['__initsubclass__']
+ if not isinstance(value, classmethod):
+ self.__initsubclass__ = classmethod(value)
+ #now call parent's __initsubclass__ if any
+ parent = super(self,self)
+ if hasattr(parent, "__initsubclass__"):
+ parent.__initsubclass__()
+ #else check if any of this class's parents defined __initsubclass__
+ elif hasattr(self, '__initsubclass__'):
+ self.__initsubclass__()
+
+## def __repr__(self):
+## return "<class '%s.%s' id=%r>" % (self.__module__, self.__name__, id(self))
+
+
+class BaseClass(object):
+ """Useful base class to inherit from, as a replacement for :class:`object`.
+
+ Inheriting from this class provides three peices of magic behavior,
+ courtesy of the metaclass :class:`BaseMetaClass`:
+
+ * every subclass of BaseClass is provided with a ``self.__super`` attribute which can take
+ the place of having to call super(cls,self) all the time.
+
+ * the method ``cls.__initclass__()`` method will be invoked (if present) after a
+ class is created, for doing additional runtime initialization.
+
+ * the method ``cls.__initsubclass__()`` method will be invoked (if present)
+ for every subclass of cls which is created.
+
+ .. note::
+ NOTE: __super.__initsubclass__ must be called to invoke a parent's
+ __initsubclass__ method, if it has one, they will not be chained.
+
+ .. todo::
+ Some usage examples, especially to illustrate __initsubclass__.
+ """
+ #=========================================================
+ #class attrs
+ #=========================================================
+ __metaclass__ = BaseMetaClass
+ __extraneous_policy = "log" #XXX: may change default policy soon, now that non-conforming apps have this option
+ __super = None
+
+ if sys.version_info >= (2, 6):
+ #HACK:
+ # We were hit hard by 2.6's requirement that object.__init__()
+ # never get passed arguments. This is a stopgap until our
+ # code finally gets in compliance.
+ # (though it may be left available as a optional flag in the future)
+
+ def __init__(self, *args, **kwds):
+ if args or kwds:
+ #check if we're last before object
+ if self.__class__.__mro__[-2] is BaseClass:
+ assert self.__class__.__mro__[-1] is object
+ #and if so, discard args and kwds
+ #these really are a sign of bad code
+ policy = self.__extraneous_policy
+ if policy == "log":
+ log.error("extraneous arguments passed to BaseClass: cls=%r args=%r kwds=%r",
+ self.__class__, args, kwds)
+ self.__super.__init__()
+ return
+ elif policy == "ignore":
+ self.__super.__init__()
+ return
+ elif policy != "preserve":
+ raise ValueError, "bad _BaseClass__extraneous_policy value for %r: %r" % (self, policy)
+ self.__super.__init__(*args, **kwds)
+
+ #=========================================================
+ #EOC BaseClass
+ #=========================================================
+
+#=========================================================
+#closeable
+#=========================================================
+class CloseableClass(BaseClass):
+ """Represents an object which has resources that will need freeing when it's closed.
+
+ This class provides a closed attribute, a close method,
+ and methods for attaching callbacks which will be invoked
+ when close is called. This is especially useful for objects
+ which representing a external resource which needs to be freed
+ at a certain time.
+
+ .. todo::
+ Document methods & examples
+ """
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ __closed = False #private flag indicating object has been "closed"
+ __closing = False #private flag indicating close() method is working
+ __closehooks = None #callbacks, stored as list of (func,args,kwds) tuples
+
+ #=========================================================
+ #internals
+ #=========================================================
+
+ def __del__(self):
+ #when object is GC'd, make sure to close it.
+ #FIXME: if attached hooks are methods of object,
+ # we'll never get here due to cyclic problem :(
+ self.close()
+
+ #simulate a ".closed" attribute by proxying __closed
+ class _Closed_Property(object):
+ """Indicates whether this :meth:`closed` has been called for this object.
+
+ This is a readonly boolean property.
+ """
+ def __get__(self, obj, cls):
+ if obj is None: return self
+ else: return obj._CloseableClass__closed
+
+ def __set__(self, obj, value):
+ raise AttributeError, "'closed' attribute is read-only"
+ closed = _Closed_Property()
+
+ def __purge_helper(self, *attrs):
+ "helper used by delete_on_close"
+ for attr in attrs:
+ #XXX: should we use hasattr/delattr?
+ setattr(self, attr, None)
+
+ #=========================================================
+ #shared methods
+ #=========================================================
+ def _cleanup(self):
+ "subclass this to add cleanup functions w/o using on_close()"
+ pass
+
+ #=========================================================
+ #public methods
+ #=========================================================
+ def close(self):
+ """close this object, and any attached resources."""
+ if self.__closed:
+ return False
+ if self.__closing:
+ log.warning("ignoring recursive call to CloseableClass.close()")
+ return None
+ self.__closing = True
+ try:
+ if self.__closehooks:
+ #XXX: might make things more resilient to purge callbacks as they're run
+ for func,args,kwds in self.__closehooks:
+ assert callable(func)
+ func(*args,**kwds)
+ self.__closehooks = None
+ self._cleanup()
+ finally:
+ self.__closing = False
+ self.__closed = True
+ return True
+
+ def on_close(self, func, *args, **kwds):
+ """register a callback to invoke at cleanup time.
+
+ ``func`` should be a callable, and will be invoked as ``func(*args, **kwds)``.
+
+ Callbacks are run in LIFO order.
+
+ Exceptions raised by ``func`` will not be caught,
+ and it's return value will be ignored.
+ """
+ assert not self.__closed
+ if self.__closehooks is None:
+ self.__closehooks = []
+ assert callable(func)
+ self.__closehooks.insert(0,(func,args,kwds))
+
+ def delete_on_close(self, *attrs):
+ """
+ on close, set the specified attrs to ``None`` to free any references
+ """
+ #NOTE: this uses a callback so that attrs are deleted in order
+ # along with any callbacks, in case class relies on exact ordering.
+ assert not self.__closed
+ if len(attrs) == 0:
+ return
+ if self.__closehooks is None:
+ self.__closehooks = []
+ self.__closehooks.insert(0,(self.__purge_helper,attrs, {}))
+
+ purge_on_close = deprecated_method("purge_on_close", delete_on_close)
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+#=========================================================
+#helper classes
+#=========================================================
+class stub(BaseClass):
+ """create an anonymous object:
+
+ * Any kwds passed to constructor are set as attributes.
+ * All attribute are read-write.
+ * There are no default attributes.
+
+ For when even namedtuple isn't quick-n-dirty enough.
+ """
+ def __init__(self, **kwds):
+ for k,v in kwds.iteritems():
+ setattr(self, k, v)
+ self.__super.__init__()
+
+ __repr = None
+ def __repr__(self):
+ value = self.__repr
+ if value is None:
+ return object.__repr__(self)
+ elif is_str(value):
+ return value
+ else:
+ return unicode(value)
+
+ __str = None
+ def __str__(self):
+ value = self.__str
+ if value is None:
+ return object.__str__(self)
+ elif is_str(value):
+ return value
+ else:
+ return unicode(value)
+
+#=========================================================
+#backports
+#=========================================================
+try:
+ from collections import namedtuple #use native version if available
+except ImportError:
+ #
+ # this class taken directly from the Python 2.6.2 source
+ #
+ from keyword import iskeyword as _iskeyword
+ from operator import itemgetter as _itemgetter
+ import sys as _sys
+ def namedtuple(typename, field_names, verbose=False):
+ """Returns a new subclass of tuple with named fields.
+ [backported from python 2.6.2]
+
+ >>> Point = namedtuple('Point', 'x y')
+ >>> Point.__doc__ # docstring for the new class
+ 'Point(x, y)'
+ >>> p = Point(11, y=22) # instantiate with positional args or keywords
+ >>> p[0] + p[1] # indexable like a plain tuple
+ 33
+ >>> x, y = p # unpack like a regular tuple
+ >>> x, y
+ (11, 22)
+ >>> p.x + p.y # fields also accessable by name
+ 33
+ >>> d = p._asdict() # convert to a dictionary
+ >>> d['x']
+ 11
+ >>> Point(**d) # convert from a dictionary
+ Point(x=11, y=22)
+ >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
+ Point(x=100, y=22)
+
+ """
+
+ # Parse and validate the field names. Validation serves two purposes,
+ # generating informative error messages and preventing template injection attacks.
+ if isinstance(field_names, basestring):
+ field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
+ field_names = tuple(map(str, field_names))
+ for name in (typename,) + field_names:
+ if not all(c.isalnum() or c=='_' for c in name):
+ raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
+ if _iskeyword(name):
+ raise ValueError('Type names and field names cannot be a keyword: %r' % name)
+ if name[0].isdigit():
+ raise ValueError('Type names and field names cannot start with a number: %r' % name)
+ seen_names = set()
+ for name in field_names:
+ if name.startswith('_'):
+ raise ValueError('Field names cannot start with an underscore: %r' % name)
+ if name in seen_names:
+ raise ValueError('Encountered duplicate field name: %r' % name)
+ seen_names.add(name)
+
+ # Create and fill-in the class template
+ numfields = len(field_names)
+ argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
+ reprtxt = ', '.join('%s=%%r' % name for name in field_names)
+ dicttxt = ', '.join('%r: t[%d]' % (name, pos) for pos, name in enumerate(field_names))
+ template = '''class %(typename)s(tuple):
+ '%(typename)s(%(argtxt)s)' \n
+ __slots__ = () \n
+ _fields = %(field_names)r \n
+ def __new__(cls, %(argtxt)s):
+ return tuple.__new__(cls, (%(argtxt)s)) \n
+ @classmethod
+ def _make(cls, iterable, new=tuple.__new__, len=len):
+ 'Make a new %(typename)s object from a sequence or iterable'
+ result = new(cls, iterable)
+ if len(result) != %(numfields)d:
+ raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
+ return result \n
+ def __repr__(self):
+ return '%(typename)s(%(reprtxt)s)' %% self \n
+ def _asdict(t):
+ 'Return a new dict which maps field names to their values'
+ return {%(dicttxt)s} \n
+ def _replace(self, **kwds):
+ 'Return a new %(typename)s object replacing specified fields with new values'
+ result = self._make(map(kwds.pop, %(field_names)r, self))
+ if kwds:
+ raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
+ return result \n
+ def __getnewargs__(self):
+ return tuple(self) \n\n''' % locals()
+ for i, name in enumerate(field_names):
+ template += ' %s = property(itemgetter(%d))\n' % (name, i)
+ if verbose:
+ print template
+
+ # Execute the template string in a temporary namespace and
+ # support tracing utilities by setting a value for frame.f_globals['__name__']
+ namespace = dict(itemgetter=_itemgetter, __name__='namedtuple_%s' % typename)
+ try:
+ exec template in namespace
+ except SyntaxError, e:
+ raise SyntaxError(str(e) + ':\n' + template)
+ result = namespace[typename]
+
+ # For pickling to work, the __module__ variable needs to be set to the frame
+ # where the named tuple is created. Bypass this step in enviroments where
+ # sys._getframe is not defined (Jython for example).
+ if hasattr(_sys, '_getframe'):
+ result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
+
+ return result
+
+#=========================================================
+#CustomDict
+#=========================================================
+#TODO: would like to clean this up a lot more before exposing it
+
+class CustomDict(dict):
+ """This is basically a lift of the DictMixin code,
+ but inheriting from dict itself, so that object is
+ a newstyle class.
+
+ It attempts to provide an easy
+ framework for creating custom dict objects with
+ arbitrary behavior, by providing useful defaults
+ for most of the dictionary methods such that
+ only a few methods need to be overidden.
+
+ .. todo::
+ * document the layers system, what methods
+ are required, may be overridden, and probably shouldn't be.
+ currently all of that is in the source code.
+ """
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, *args, **kwds):
+## #use underlying dict's clear method if __delitem__ isn't overridden
+## #and neither is clear()
+## if (
+## (getattr(self.__delitem__, "im_func", None) == dict.__delitem__)
+## and
+## (getattr(self.clear, "im_func", None) == CustomDict.clear)
+## ):
+## self.clear = self.__raw_clear
+ #init object - we pass everything through update() so that
+ # custom update / setitem calls get to handle everything.
+ dict.__init__(self)
+ if args or kwds:
+ self.update(*args, **kwds)
+
+
+ #=========================================================
+ #item methods [layer 1]
+ # dict defaults can be used,
+ # but you'll probably want to override them
+ #=========================================================
+
+ #def __getitem__(self, key):
+ #def __setitem__(self, key, value):
+ #def __delitem__(self, key):
+ #def __contains__(self, key):
+
+ #=========================================================
+ #length methods [layer 2]
+ # either leave them alone,
+ # or overide both of them.
+ #=========================================================
+ #def __iter__(self):
+ #def keys(self):
+
+ #TODO: could auto-detect override of single, and patch
+
+ #=========================================================
+ #basic iterators [layer 3]
+ # you can override these one-by-one if your
+ # subclass could do one of them more efficiently,
+ # but the defaults are usually sufficient.
+ # (the dict default methods are shadowed since
+ # these will respect whatever you do to the item methods)
+ #=========================================================
+ def iteritems(self):
+ for k in self:
+ yield (k, self[k])
+
+ def items(self):
+ return [ (k, self[k]) for k in self ]
+
+ def itervalues(self):
+ for k in self:
+ yield self[k]
+
+ def values(self):
+ return [ self[k] for k in self ]
+
+ def clear(self):
+ for key in self.keys():
+ del self[key]
+
+ #=========================================================
+ #other methods [layer 4]
+ # you may override these one-by-one if like,
+ # but for almost all use-cases, the provided
+ # defaults will allow efficient behavior
+ # using the implementations of the lower layers
+ #=========================================================
+ def iterkeys(self):
+ return self.__iter__()
+
+ def has_key(self, key):
+ return self.__contains__(key)
+
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def setdefault(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ self[key] = default
+ return default
+
+ def pop(self, key, *args):
+ if len(args) > 1:
+ raise TypeError, "pop expected at most 2 arguments, got %d" % (1+len(args),)
+ try:
+ value = self[key]
+ except KeyError:
+ if args: return args[0]
+ else: raise
+ del self[key]
+ return value
+
+ def popitem(self):
+ try:
+ k, v = self.iteritems().next()
+ except StopIteration:
+ raise KeyError, 'container is empty'
+ del self[k]
+ return (k, v)
+
+ def update(self, *args, **kwds):
+ if args:
+ if len (args) > 1:
+ raise ValueError, "update expected at most 1 positional argument, got %d" % (len(args),)
+ other = args[0]
+ # Make progressively weaker assumptions about "other"
+ if hasattr(other, 'iteritems'): # iteritems saves memory and lookups
+ for k, v in other.iteritems():
+ self[k] = v
+ elif hasattr(other, 'keys'):
+ for k in other.keys():
+ self[k] = other[k]
+ else:
+ for k, v in other:
+ self[k] = v
+ if kwds:
+ for k,v in kwds.iteritems():
+ self[k] = v
+
+## def __repr__(self):
+## return repr(dict(self.iteritems()))
+
+## def __cmp__(self, other):
+## if other is None:
+## return 1
+## if isinstance(other, DictMixin):
+## other = dict(other.iteritems())
+## return cmp(dict(self.iteritems()), other)
+
+ def __len__(self):
+ return len(self.keys())
+
+ #=========================================================
+ #some helpers for use by subclasses
+ #=========================================================
+ def _raw_get(self, key, value=None):
+ "return value of underlying dictionary"
+ if value is Undef:
+ return dict.__getitem__(self, key)
+ else:
+ try:
+ return dict.__getitem__(self, key)
+ except KeyError:
+ return value
+
+ def _raw_set(self, key, value):
+ "set value of underlying dictionary"
+ return dict.__setitem__(self, key, value)
+
+ def _raw_del(self, key):
+ "delete value of underlying dictionary"
+ return dict.__delitem__(self, key)
+
+ def _raw_clear(self):
+ "clear underlying dictionary"
+ return dict.clear(self)
+
+ #=========================================================
+ #EOC
+ #=========================================================
+
+#=========================================================
+#ordered dictionary
+#=========================================================
+class OrderedDict(CustomDict):
+ """dictionary that preserves order of keys.
+
+ .. todo::
+ * Document this better
+ * implement reorder() method
+ * compare to python's new ordereddict, implement any missing features
+ """
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ __keys = None #internal list object storing canonical ordering of keys
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, *args, **kwds):
+ #TODO: support initial key order from inputs
+ self.__keys = [] #list containing key ordering
+ CustomDict.__init__(self, *args, **kwds)
+
+ #=========================================================
+ #first level
+ #=========================================================
+ #getitem - we can just use default
+
+ def __setitem__(self, key, value):
+ retval = CustomDict.__setitem__(self,key,value)
+ if key not in self.__keys:
+ self.__keys.append(key)
+ return retval
+
+ def __delitem__(self, key):
+ retval = CustomDict.__delitem__(self,key)
+ if key in self.__keys: #should always be true
+ self.__keys.remove(key)
+ return retval
+
+ #=========================================================
+ #second level
+ #=========================================================
+ def __iter__(self): return iter(self.__keys)
+ def keys(self): return list(self.__keys)
+
+ #=========================================================
+ #custom
+ #=========================================================
+ #TODO: a "reorder" method for rearranging keys
+
+ def insert(self, pos, key, value=Undef):
+ """insert a key in a particular position.
+ if value is Undef, the key must already
+ be present, and will simple be relocated.
+ else a keyerror will be throw
+ if value is present, key will be added and the value set.
+ """
+ if value is Undef:
+ if key in self.__keys:
+ self.__keys.remove(key)
+ self.__keys.insert(pos,key)
+ else:
+ raise KeyError, "key not found: %r" % (key,)
+ else:
+ if key in self.__keys:
+ self.__keys.remove(key)
+ self.__keys.insert(pos,key)
+ self[key] = value
+ #=========================================================
+ #EOC
+ #=========================================================
+
+#=========================================================
+#insensitive dictionary
+#=========================================================
+#this probably works, just hasn't been tested/used
+##class InsensitiveDict(CustomDict):
+## """dictionary that allows only strings for keys, and is case-insensitive-but-preserving"""
+## #CAUTION: this class hasn't been fully tested yet
+##
+## #=========================================================
+## #init
+## #=========================================================
+## def __init__(self, *args, **kwds):
+## self.__keys = {} #dict mapping lower-case key to currently preserved key
+## CustomDict.__init__(self, *args, **kwds)
+##
+## def normkey(self, key):
+## if not isinstance(key, str):
+## raise ValueError, "key must be a str: %r" % (key,)
+## return key.lower()
+##
+## #=========================================================
+## #first level
+## #=========================================================
+## def __getitem__(self, key):
+## nkey = self.normkey(key)
+## if CustomDict.__contains__(self, nkey):
+## return dict.__getitem__(self,nkey)
+## else:
+## raise KeyError, "key not found: %r" % (key,)
+##
+## def __setitem__(self, key, value):
+## nkey = self.normkey(key)
+## self.__keys.setdefault(nkey, key)
+## return CustomDict.__setitem__(self,nkey,value)
+##
+## def __delitem__(self, key):
+## nkey = self.normkey(key)
+## retval = CustomDict.__delitem__(self,nkey)
+## del self.__keys[nkey]
+## return retval
+##
+## def __contains__(self, key):
+## nkey = self.normkey(key)
+## return CustomDict.__contains__(self, nkey)
+##
+## #=========================================================
+## #second level
+## #=========================================================
+## def __iter__(self): return self.__keys.itervalues()
+## def keys(self): return self.__keys.values()
+##
+## #=========================================================
+## #third level
+## #=========================================================
+## def iteritems(self):
+## for nk, k in self.__keys.iteritems():
+## yield k, CustomDict.__getitem__(self, nk)
+##
+## def items(self):
+## return [
+## (k, CustomDict.__getitem__(self, nk))
+## for nk, k in self.__keys.iteritems()
+## ]
+##
+## #=========================================================
+## #EOC
+## #=========================================================
+
+#=========================================================
+#deprecated - to be removed 2010-04-01
+#=========================================================
+defined = relocated_function("defined", "bps.undef.defined", removal="2010-04-01")
+undefined = relocated_function("undefined", "bps.undef.undefined", removal="2010-04-01")
+
+#=========================================================
+#EOF
+#=========================================================
diff --git a/bps/undef.py b/bps/undef.py
new file mode 100644
index 0000000..94ed87b
--- /dev/null
+++ b/bps/undef.py
@@ -0,0 +1,59 @@
+"""bps.undef -- undefined singleton - import from types"""
+#=========================================================
+#imports
+#=========================================================
+#local
+__all__ = [
+ 'Undef',
+ 'defined','undefined',
+ 'strip_undefined_keys',
+]
+#=========================================================
+#"undefined" singleton
+#=========================================================
+#TODO: figure out how to play nice w/ peak's NOT_GIVEN, mako's Undefined,
+# and any other instances of this singleton
+# eg: could have bps check an env var for the name of a constant to import
+# instead of Undef=UndefType()
+
+class UndefType(object):
+ "class whose single instance is the Undef object"
+ def __nonzero__(self):
+ return False
+ def __str__(self):
+ return "Undef"
+ def __repr__(self):
+ return "Undef"
+ def __eq__(self, value):
+ return False #never equal to anything, including itself :)
+ def __ne__(self, value):
+ return True #never equal to anything, including itself :)
+Undef = UndefType()
+
+def defined(value):
+ return value is not Undef
+
+def undefined(value):
+ return value is Undef
+
+def strip_undefined_keys(source, inplace=False):
+ "remove any keys from dict whose value is Undef; returns resulting dict"
+ if inplace:
+ remove = set(
+ k
+ for k,v in source.iteritems()
+ if v is Undef
+ )
+ for k in remove:
+ del source[k]
+ return source
+ else:
+ return dict(
+ (k,v)
+ for k,v in source.iteritems()
+ if v is not Undef
+ )
+
+#=========================================================
+#
+#=========================================================
diff --git a/bps/unstable/__init__.py b/bps/unstable/__init__.py
new file mode 100644
index 0000000..297d701
--- /dev/null
+++ b/bps/unstable/__init__.py
@@ -0,0 +1,1084 @@
+"""bps.unstable -- new, undocumented, and still developing additions to bps
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import contextlib
+import os
+from logging import getLogger; log = getLogger(__name__)
+import os.path
+from warnings import warn
+import re
+#pkg
+from bps.fs import getcwd, filepath
+from bps.text import condense
+from bps.meta import isstr, is_iter, get_module
+from bps.basic import enum_slice
+#local
+__all__ = [
+ 'smart_list_iter',
+ 'ellipsize',
+ 'protected_env',
+]
+
+#=========================================================
+#iteration
+#=========================================================
+##class BufferedIter(object):
+## "return iterator which can be walked back, ala ungetch"
+## def __init__(self, source):
+## self._source = source
+## self._itr = iter(source)
+## self._buffer = []
+##
+## def next(self):
+## if self._buffer:
+## return self._buffer.pop()
+## return self._itr.next()
+##
+## def pushnext(self, value):
+## self._buffer.append(value)
+
+
+##def pairs(obj):
+## "an attempt at a Lua-like pairs() iterator."
+## if hasattr(obj, "iteritems"):
+## return obj.iteritems()
+## elif hasattr(obj, "__len__") and hasattr(obj,"__getitem__"):
+ ##if hasattr(obj, "_asdict") and hasattr(obj, "_fields"):
+ ## #probably a namedtuple...
+ ## return (
+ ## (k,getattr(obj,k))
+ ## for k in obj._fields
+ ## )
+ ##return enumerate(obj)
+## else:
+## return (
+## (k,getattr(obj,k))
+## for k in dir(obj)
+## if not k.startswith("_")
+## )
+##TODO: maybe also an ipairs()
+
+def filter_in_place(func, target, invert=False):
+ "perform in-place filtering on a list, removing elements if they don't pass filter func"
+ #NOTE: length check delayed til after type identified,
+ # so that type errors won't get hidden for empty instances
+ # of unsupported types.
+ if not hasattr(target, "__len__"):
+ #target is iter, or something more obscure
+ raise TypeError, "cannot support types without length: %r" % (type(target),)
+ if not hasattr(target, "__iter__"):
+ #not sure what types could get here. but code relies on iteration
+ raise TypeError, "cannot support non-iterable types: %r" % (type(target),)
+ if hasattr(target, "__getitem__"):
+ if not hasattr(target, "__delitem__"):
+ #target is frozenset, str, tuple, etc
+ raise TypeError, "cannot support types without del: %r" % (type(target),)
+ if hasattr(target, "keys"): #xxx: is there better way to identifier mapping?
+ #target is dict-like
+ #NOTE: we build set of keys to delete rather than deleting
+ #as we go, since that set will always be smaller than set of all keys
+ pos = not invert
+ remove = set(
+ key for key in target
+ if pos ^ func(key) #NOTE: operates on key, not value
+ )
+ for key in remove:
+ del target[key]
+ return
+ else:
+ #target is list-like (eg: list, array)
+ #FIXME: this assumes __len__ + __getitem__ - keys implies int indexes.
+ # might need better check of this assumption ('index' is reliable for list & array)
+ end = len(target)
+ if not end:
+ return
+ pos = 0
+ while pos < end:
+ if invert ^ func(target[pos]):
+ pos += 1
+ else:
+ del target[pos]
+ end -= 1
+ return
+ elif hasattr(target, "difference_update"):
+ #assume it's a set
+ pos = not invert
+ remove = set(
+ elem for elem in target
+ if pos ^ func(elem)
+ )
+ if remove:
+ target.difference_update(remove)
+ return
+ else:
+ #probably a frozenset
+ raise TypeError, "unsupported type: %r" % (type(target),)
+
+#rename to mutable_list_iter ? also, might be too complex, when filter_in_place might be better
+class smart_list_iter(object):
+ """list iterator which handles certain list operations without disrupting iteration.
+
+ The typical usage of this class is when you need to iterate
+ over a list, and delete selected elements as you go.
+
+ The reason this is in unstable is that other use-cases may require
+ reworking the class, the delete() case is the only one
+ this class is currently being used for.
+
+ Usage example::
+
+ >>> from bps.unstable import smart_list_iter
+ >>> a=[5,6,100,2,3,40,8] #given a list
+ >>> itr = smart_list_iter(a) #create the iterator
+ >>> for elem in itr: #iterate over it as normal
+ >>> print elem
+ >>> if elem > 30: #remove all elements over 30
+ >>> itr.delete() #delete via the iterator, allowing it to continue w/o losing sync
+ >>> #all elements will be scanned
+ 5
+ 6
+ 100
+ 2
+ 3
+ 40
+ 8
+ >>> a #but all ones called for delete will be replaced
+ [5,6,2,3,8]
+
+ Public Attributes
+ =================
+
+ .. attribute:: pos
+
+ Current position in list (-1 before iterator starts)
+
+ .. attribute:: next_pos
+
+ Next position in list (may be equal to len of list)
+
+ .. attrbite:: target
+
+ The list we're iterating over.
+
+ Public Methods
+ ==============
+
+ .. automethod:: delete
+
+ .. automethod:: pop
+
+ .. automethod:: append
+
+ .. automethod:: insert
+ """
+ def __init__(self, target, enum=False):
+ #XXX: reverse option?
+ self.target = target
+ self._enum = enum
+ self.pos = -1
+ self._deleted = False #flag that current element was deleted
+
+ def _get_next_pos(self):
+ pos = self.pos
+ if not self._deleted:
+ pos += 1
+ return max(pos, len(self.target))
+ next_pos = property(_get_next_pos)
+
+ def __iter__(self):
+ return self
+
+ def __length__(self):
+ pos = self.pos
+ if not self._deleted:
+ pos += 1
+ return max(0, len(self.target)-pos)
+
+ def next(self):
+ "return next item"
+ pos, deleted = self.pos, self._deleted
+ if not deleted:
+ pos += 1
+ assert pos >= 0
+ end = len(self.target)
+ if pos >= end:
+ raise StopIteration
+ if deleted:
+ self._deleted = False
+ else:
+ self.pos = pos
+ if self._enum:
+ return pos, self.target[pos]
+ else:
+ return self.target[pos]
+
+ def delete(self):
+ "delete current entry"
+ pos = self.pos
+ if pos == -1:
+ raise IndexError, "not currently pointing to an element"
+ del self.target[pos]
+ self._deleted = True
+
+ def pop(self, idx, relative=False):
+ "pop entry from list. if index not specified, pops current entry in iterator"
+ pos = self.pos
+ if relative:
+ idx += pos
+ elif idx < 0:
+ idx += len(self.target)
+ if idx < 0:
+ raise IndexError, "index too small"
+ value = self.target.pop(idx)
+ if idx < pos:
+ self.pos = pos-1
+ elif idx == pos:
+ self._deleted = True
+ return value
+
+ def append(self, value):
+ "quickly append to list"
+ return self.insert(len(self.target), value)
+
+ def insert(self, idx, value, relative=False):
+ "insert entry into list. if relative=True, pos is relative to current entry in iterator"
+ pos = self.pos
+ if relative:
+ idx += pos
+ elif idx < 0:
+ idx += len(self.target)
+ if idx < 0:
+ raise IndexError, "index too small"
+ self.target.insert(idx, value)
+ if idx < pos:
+ self.pos = pos+1
+ return idx
+
+#=========================================================
+#text related
+#=========================================================
+def get_textblock_size(text, rstrip=False):
+ "return rows & cols used by text block"
+ if not text:
+ return 0,0
+ textlines = text.split("\n")
+ rows = len(textlines)
+ if rstrip:
+ cols = max(len(line.rstrip()) for line in textlines)
+ else:
+ cols = max(len(line) for line in textlines)
+ return rows, cols
+
+def ellipsize(text, width, align=">", ellipsis="...", mode="plain", window=None):
+ """attempt to ellipsize text.
+
+ :arg text: the string to ellipsize
+ :arg width: the maximum allowed width
+ :param align:
+ Where the ellipsis should be inserted.
+
+ ============== ============================================
+ Value Location
+ -------------- --------------------------------------------
+ "<", "left" ellipsis inserted at left side of text
+ "^", "center" ellipsis inserted in center of text
+ ">", "right" ellipsis inserted at right side of text
+ (the default).
+ ============== ============================================
+
+ :param mode:
+ Select which algorithm is used to ellipsize.
+ Defaults to "smart".
+
+ ============== ================================================
+ Value Behavior
+ -------------- ------------------------------------------------
+ "plain" Text will be clipped as needed,
+ regardless of spaces or other boundaries,
+ and the text will otherwise be left alone.
+ "smart" This function will attempt to remove extraneous
+ spaces and similar characters from the string,
+ and if ellipsis are needed, will prefer splitting
+ text at a space.
+ "filepath" Variant of the smart algorithm, this assumes
+ it's working with a local filepath,
+ and attempts to break on directory boundaries.
+ ============== ================================================
+
+ :param window:
+ For smart / filepath modes, this specifies
+ the maximum number of characters to search for a good break point
+ before giving up.
+
+ :param ellipsis:
+ Optionally overide the text used as the ellipsis.
+
+ Usage Example::
+
+ >>> from bps.text import ellipsize
+ >>> ellipsize("abc", 6) #this does nothing
+ 'abc'
+ >>> ellipsize("abcdefghi", 8) #suddenly, ellipsized
+ 'abcde...'
+ >>> ellipsize("abcdefghi", 8, "<") #other side
+ '...efghi'
+ """
+ #XXX: write code to break up string into atomic parts,
+ #and override len() to handle them,
+ #so this can deal with VT100 codes and HTML
+
+ #convert to string
+ if not isstr(text):
+ text = str(text)
+
+ #pre-process string
+ if mode == "smart":
+ #smart mode will ALWAYS try to shrink
+ text = condense(text, " \t")
+
+ #check if string fits w/in alloted space
+ tsize = len(text)
+ if tsize <= width:
+ return text
+
+ #figure out how much we can keep
+ chars = width-len(ellipsis)
+ if chars < 0:
+ raise ValueError, "width must be larger than ellipsis!"
+ elif chars == 0:
+ return ellipsis
+
+ #select boundary finder function
+ if mode in ("smart", "filepath"):
+ if mode == "smart":
+ bc = " \t"
+ if window is None:
+ window = 8
+ else:
+ assert mode == "filepath"
+ bc = os.path.sep
+ if window is None:
+ window = 32
+ def find_boundary(start, fwd):
+ "locate next boundary character in string"
+ #nested vars: 'text', 'tsize', 'bc', and 'window'
+ if fwd:
+ if window == -1:
+ end = None
+ else:
+ end = min(start+window, tsize)
+ last = None
+ for idx, c in enum_slice(text, start, end):
+ if c in bc:
+ last = idx
+ elif last is not None:
+ return last
+ return last
+ else:
+ if window == -1:
+ end = None
+ else:
+ end = start-window-1
+ if end < 0:
+ end = None
+ log.debug("find_boundary rev: %r %r %r", text, start, end)
+ last = None
+ for idx, c in enum_slice(text, start, end, -1):
+ log.debug("checking %r %r last=%r", idx, c, last)
+ if c in bc:
+ last = idx
+ elif last is not None:
+ return last
+ return last
+ else:
+ def find_boundary(start, fwd):
+ return None
+
+ #chop according to alignment
+ if align == "<" or align == "left":
+ #put ellipsis on left side, so pick at most {chars}
+ #characters from the right side of the string.
+ left = tsize-chars
+ b = find_boundary(left, True)
+ if b is not None:
+ left = b #want text[left] to be the boundary char
+ result = ellipsis + text[left:]
+
+ elif align == "^" or align == "center":
+ #'left' is left end-point
+ #'right' is right start-point
+ #result is [0:left] ... [right:tsize]
+ if mode == "plain":
+ #quick simple centering
+ left = chars//2
+ right = tsize-chars+left
+ else:
+ #much more tricky... start in center of string,
+ #and move left/right until boundaries are large enough.
+ left = center = tsize//2
+ right = center-1
+ left_active = right_active = True
+ first = True
+ diff = tsize+2-chars
+ while left_active and right_active:
+ #move left to next boundary
+ if left_active:
+ assert left > 0
+ b = find_boundary(left-1, False)
+ if b is None:
+ left_active = False
+ else:
+ left = b
+ left_active = (left > 0)
+ #check if we're done
+ if not first and right-left<diff:
+ break
+ #move right to next boundary
+ if right_active:
+ assert right < tsize-1
+ b = find_boundary(right+1, True)
+ if b is None:
+ right_active = False
+ else:
+ right = b
+ right_active = (right < tsize-1)
+ #check if we're done
+ if right-left < diff:
+ break
+ #move boundaries outward again
+ first = False
+ #check if we failed.
+ if right-left < diff:
+ #moved out to best boundary we could find,
+ #and nothing doing. so fall back to
+ #quick simple centering
+ left = chars//2
+ right = tsize-chars+left
+ else:
+ left += 1 #so we include left char
+ assert left>=0 and left < tsize
+ assert right>left and right <= tsize
+ assert left+tsize-right <= chars
+ result = text[:left] + ellipsis + text[right:]
+
+ #TODO: could have "edge"/"<>" alignment, where center is kept
+ else:
+ assert align == ">" or align == "right"
+ right = chars-1
+ b = find_boundary(right, False)
+ log.debug("text=%r right=%r b=%r", text, right, b)
+ if b is not None:
+ right = b
+ result = text[:right+1] + ellipsis
+
+ #just a final check
+ assert len(result) <= width
+ return result
+
+#=========================================================
+#text - email
+#=========================================================
+
+#regexp for strict checking of local part
+#TODO: support backslash escapes, quoted mode
+_re_email_local = re.compile(r"""
+ ^(
+ (
+ \w | [-!#$%&'*+/=?^`{|}~.]
+ )+
+ )$
+ """, re.X|re.U)
+
+#regexp for strict checking of domain part
+#TODO: support ipv6 in quotes too
+_re_email_domain = re.compile(r"""
+ ^(
+ ## match ip address within brackets (rare but in std)
+ \[ [0-2]?\d\d \. [0-2]?\d\d \. [0-2]?\d\d \. [0-2]?\d\d \]
+ |
+ ## match domain name
+ (
+ [.-]
+ |
+ ## note: since \w matches underscore along with alphanum,
+ ## we have to use neg-lookahead to prevent underscore from matching
+ (?!_) \w
+ )+
+ )$
+ """, re.X|re.U)
+
+_dws_re = re.compile(r"\s{2,}")
+
+def parse_email_addr(value, strict=True, strip=True, allow_empty=False, unquote_name=True, clarify=False):
+ """parse email address into constituent parts.
+
+ This function takes a provided email address,
+ and splits it into the display name, the local name, and the domain name.
+ While this function has a lot of options for controlling precisely
+ how it parses email addresses, the basic usage is::
+
+ >>> from bps.unstable import parse_email_addr
+ >>> parse_email_addr("joe@bob.com")
+ (None, 'joe', 'bob.com')
+ >>> parse_email_addr("Joe Smith <joe@bob.com>")
+ ('Joe Smith','joe','bob.com')
+ >>> parse_email_addr("joe@")
+ ValueError: domain part of email address must not be empty: 'joe@'
+
+ :arg value:
+ This should be a string containing the email address to be parsed.
+ Extranous spaces around the address will be automatically stripped.
+
+ :param strict:
+ By default this function is strict, and raises :exc:`ValueError`
+ if the local name or domain name violates various email address rules
+ (see :func:`validate_email_parts`).
+
+ If ``strict=False``, this function will only throw as :exc:`ValueError`
+ only for mismatched ``<>`` around an email, or if the ``@`` is missing.
+
+ :param strip:
+ By default, extraneous white space is stripped from the address
+ before parsing, and from the parts after they have been parsed,
+ to help normalize unpredictable user input. Set ``strip=False`` to disable.
+
+ :param allow_empty:
+ By default, an empty string is considered an invalid email.
+ If ``allow_empty=True``, passing in an empty string
+ will result in the tuple ``(None,None,None)`` being returned.
+ This can be detected easily because in all other cases,
+ the domain part will be a non-empty string.
+
+ :param unquote_name:
+ A common convention is to surround display names in quotes
+ (eg ``"John Doe" <jdoe@foo.com>``). By default, this function
+ will strip the quotes out, and report the raw name.
+ To disable this, set ``unquote_name=False``,
+ and the raw name string will be returned.
+
+ :param clarify:
+ If enabled via ``clarify=True``, and the address cannot be parsed
+ as provided, parse_email_addr will search for obfuscated email address
+ features, such as ``@`` being written as ``(at)``, and attempt
+ to restore and parse the original address. This is particularly useful
+ when standardizing user input.
+
+ This feature is disabled by default, since it may not always
+ return the right results.
+
+ :returns:
+ This returns a tuple ``(name, local, domain)``:
+
+ * ``name`` contains the display name, or ``None`` if the display name was empty / missing.
+ * ``local`` contains the local part of the address (or ``None`` if allow_empty is ``True``).
+ * ``domain`` contains the domain part of the address (or ``None`` if allow_empty is ``True``).
+
+ :raises ValueError:
+ if the address cannot be parsed as an email address,
+ or if the components of the address violate rfc specs
+ (see the ``strict`` parameter for more).
+
+ .. note::
+ This function (mostly) complies with the relevant rfcs, such as http://tools.ietf.org/html/rfc3696.
+ Deviations include:
+
+ * it doesn't support quoted local names (eg ``"John Doe"@foo.com``)
+ * it doesn't support backslash escaping in the local name (eg ``User\<\>Name@foo.com``).
+ * it allows any alphanumeric unicode/locale defined character, not just a-z, 0-9.
+
+ """
+ #initial setup
+ if value is None:
+ if allow_empty:
+ return (None,None,None)
+ else:
+ raise ValueError, "not a valid email address: %r" % (value,)
+ if strip:
+ addr = value.strip()
+ else:
+ addr = value
+
+ #extract name part
+ if '<' in addr:
+ if addr[-1] != '>':
+ raise ValueError, "malformed braces in email address: %r" % (value,)
+ name, addr = addr[:-1].rsplit("<",1)
+ if strip:
+ name = name.strip()
+ elif name[-1] == ' ': #at least strip right most space
+ name = name[:-1]
+ if unquote_name:
+ if name.startswith('"') and name.endswith('"'):
+ name = name[1:-1]
+ if strip:
+ name = name.strip()
+ elif name.startswith("'") and name.endswith("'"):
+ name = name[1:-1]
+ if strip:
+ name = name.strip()
+ if not name:
+ name = None
+ elif strip and ' ' in name:
+ name = _dws_re.sub(" ", name)
+ if strip:
+ addr = addr.strip()
+ elif '>' in addr:
+ raise ValueError, "malformed braces in email address: %r" % (value,)
+ else:
+ name = None
+
+ #split local & domain parts
+ if not addr and allow_empty:
+ return None, None, None
+ elif '@' in addr:
+ local, domain = addr.rsplit('@',1)
+ if strip:
+ local = local.rstrip()
+ domain = domain.lstrip()
+ elif clarify:
+ #let's try some alternates
+ def helper(addr):
+ try:
+ result = parse_email_addr(addr, strict=False, strip=True, clarify=False)
+ return result[1:3]
+ except ValueError:
+ return None, None
+ while True:
+ if '(at)' in addr:
+ tmp = re.sub(r"\s*\(at\)\s*","@",addr)
+ tmp = re.sub(r"\s*\(dot\)\s*",".",tmp)
+ local, domain = helper(tmp)
+ if domain:
+ break
+ if '[at]' in addr:
+ tmp = re.sub(r"\s*\[at\]\s*","@",addr)
+ tmp = re.sub(r"\s*\[dot\]\s*",".",tmp)
+ local, domain = helper(tmp)
+ if domain:
+ break
+ if ' at ' in addr:
+ tmp = re.sub(r"\s* at \s*","@",addr)
+ tmp = re.sub(r"\s* dot \s*",".",tmp)
+ local, domain = helper(tmp)
+ if domain:
+ break
+ raise ValueError, "not a valid email address: %r" % (value,)
+ else:
+ raise ValueError, "not a valid email address: %r" % (value,)
+
+ #validate parts and return
+ validate_email_parts(name, local, domain, strict=strict, _value=value)
+ return name, local, domain
+
+def validate_email_parts(name, local, domain, strict=True, _value=None):
+ """validates the three components of an email address (``Name <local @ domain>``).
+
+ :arg name: the display name component, or ``None``.
+ :arg local: the local part component
+ :arg domain: the domain part component
+
+ :param strict:
+ By default, this function checks that the parts conform to the rfc,
+ and don't contain any forbidden characters or character sequences.
+
+ By default this function is strict, and raises :exc:`ValueError`
+ if the local name or domain name contain invalid characters;
+ contain invalid character sequences (such as ".."); or
+ if the address violates various email part size rules.
+
+ If ``strict=False``, the only checks made are that local & domain
+ are non-empty strings.
+
+ :param _value:
+ Override the value that's displayed in error messages.
+ This is mainly used internally by :func:`parse_email_address`.
+
+ :returns:
+ ``True`` on success; raises ValueError upon failure.
+ """
+ if _value is None:
+ _value = (name,local,domain)
+
+ if not local:
+ raise ValueError, "empty local part in email address: %r" % (_value,)
+ if not domain:
+ raise ValueError, "empty domain part in email address: %r" % (_value,)
+ if not strict:
+ return True
+
+ if not _re_email_local.match(local):
+ raise ValueError, "invalid characters in local part of email address: %r" % (_value,)
+ if '..' in local or local[0] == '.' or local[-1] == '.':
+ raise ValueError, "invalid periods in local part of email address: %r" %(_value,)
+
+ #XXX: split into is_valid_hostname?
+ if not _re_email_domain.match(domain):
+ raise ValueError, "invalid characters in domain part of email address: %r" % (_value,)
+ if '..' in domain or domain[0] == '.':
+ raise ValueError, "invalid periods in domain part of email address: %r" %(_value,)
+ if domain[0] == '-' or domain[-1] == '-' or '-.' in domain or '.-' in domain:
+ raise ValueError, "invalid hyphens in domain part of email address: %r" %(_value,)
+ ##if len(domain) < (3 if domain[-1] == '.' else 2):
+ ## raise ValueError, "domain part of email address is too small: %r" % (value,)
+
+ if len(local) > 64:
+ raise ValueError, "local part of email address is too long: %r" % (_value,)
+ if len(domain) > 255:
+ raise ValueError, "domain part of email address is too long: %r" % (_value,)
+
+ return True
+
+def compile_email_addr(name, local, domain, strict=True, quote_name=True):
+ """return formatted email address.
+
+ this function takes the components of an email address,
+ and formats them correctly into a single string,
+ after validating them.
+
+ :arg name: the display name component, or ``None``.
+ :arg local: the local part component
+ :arg domain: the domain part component
+
+ :param strict:
+ whether strict validation is enabled
+ (see :func:`validate_email_parts`)
+
+ :param quote_name:
+ whether the name part is automatically
+ put inside double-quotes when formatting.
+
+ :returns:
+ email address as single string.
+ """
+ validate_email_parts(name, local, domain, strict=strict)
+ if name:
+ if quote_name:
+ ##if '"' in name:
+ ## name = name.replace('"',"'")
+ return '"%s" <%s@%s>' % (name,local,domain)
+ else:
+ return '%s <%s@%s>' % (name,local,domain)
+ else:
+ return '%s@%s' % (local,domain)
+
+def norm_email_addr(value, strict=True, allow_empty=False, quote_name=True, clarify=False):
+ """normalize email address string.
+
+ This uses :func:`parse_email_addr` and :func:`compile_email_addr`
+ in order to parse, validate, normalize, and reassemble
+ any email address passed into it.
+
+ :arg value: raw email address
+ :param strict:
+ whether strict checking of email format is enabled
+ (see :func:`validate_email_parts`).
+ :param allow_empty:
+ By default, empty strings will cause a :exc:`ValueError`.
+ If ``True``, empty strings will be returned as ``None``.
+ :param quote_name:
+ By default, the name portion will have double-quotes
+ added around it if they are missing.
+ Set to ``False`` to preserve original name.
+
+ :returns:
+ normalized email address, with extraneous spaces removed;
+ or raises ValueError if address was invalid.
+ """
+ n,l,d = parse_email_addr(value,
+ strict=strict,
+ strip=True,
+ allow_empty=allow_empty,
+ unquote_name=quote_name,
+ clarify=clarify,
+ )
+ if d is None:
+ assert allow_empty
+ return None
+ return compile_email_addr(n,l,d, strict=False, quote_name=quote_name)
+
+#=========================================================
+#functional code
+#=========================================================
+
+##class compose(object):
+## """
+## function composition
+##
+## usage:
+## fc = compose(f0, f1, ... fn)
+## assert fc(*a,**k) == f0(f1(...fn(*a,**k))
+## """
+## def __new__(cls, *funcs):
+## #calc true content from funcs
+## assert isinstance(funcs, tuple)
+## content = []
+## for func in funcs:
+## assert callable(func)
+## if func == IdentityFunc:
+## #this one should be ignored.
+## continue
+## if isinstance(compose):
+## content += compose.func_seq
+## else:
+## content.append(func)
+## #return appropiate object based on content
+## if len(content) == 0:
+## return IdentityFunc
+## elif len(content) == 1:
+## #no need to compose
+## return content[0]
+## else:
+## #build compose object
+## self = object.__new__(cls)
+## content.reverse()
+## self.func_seq = tuple(content)
+## return self
+##
+## def __call__(self, *args, **kwds):
+## gen = iter(self.func_seq)
+## result = gen.next()(*args,**kwds)
+## for fn in gen:
+## result = fn(result)
+## return result
+
+##_NameCount = {}
+##def composeClass(bases, name=Undef, kwds=None):
+## """
+## returns a new class built out of the bases provided.
+## given bases = [b1, b2, b3],
+## the resulting class expects arguments in the form of...
+##
+## cls( a1, a2, a3)
+## where aN are all tuples, dicts, or ArgKwds.
+## only the first contructor b1.__new__(*args,**kwds) from a1
+## each bN.__new__(*args,**kwds) from aN
+##
+## xxx: not done with this!
+## """
+## global _NameCount
+##
+## initseq = []
+## for pos, base in enumerate(bases):
+## for i, cls in initseq:
+## if issubclass(base,cls):
+## break
+## else:
+## initseq.append((pos,base))
+##
+## def newfn(cls, *aks):
+## return cls.__bases__[0].__new__(aks[0])
+##
+## def initfn(self, *aks):
+## for pos, cls in initseq:
+## if pos > len(aks):
+## args = []
+## kwds = {}
+## else:
+## ak = aks[pos]
+## if isinstance(ak, tuple):
+## args = ak
+## kwds = {}
+## elif isinstance(ak, dict):
+## args = []
+## kwds = ak
+## else:
+## args, kwds = ak.args, ak.kwds
+## cls.__init__(*args, **kwds)
+## outkwds = {"__new__":newfn, "__init__": initfn}
+## if kwds:
+## outkwds.update(kwds)
+##
+## if name is Undef:
+## name = "_comp_".join([cls.__name__ for cls in bases])
+## count = _NameCount[name] = _NameCount.get(name,0)+1
+## if count > 1:
+## name += "_%d" % (count,)
+##
+## return type(name,bases,outkwds)
+
+#=========================================================
+#fs/env related
+#=========================================================
+@contextlib.contextmanager
+def protected_env(*keys, **opts):
+ "context manager which restores cwd & specified environment keys"
+ cwd = opts.pop("cwd", False)
+ if opts:
+ raise TypeError, "unknown kwd options: %r" % (opts,)
+ if cwd:
+ c = getcwd()
+ assert c.isabs
+ if keys:
+ env = os.environ
+ o = dict((k,env.get(k)) for k in keys)
+ f = [] #list of files to purge
+ try:
+ yield f
+ finally:
+ for name in f:
+ filepath(name).discard()
+ if cwd:
+ c.chdir()
+ if keys:
+ for k, v in o.iteritems():
+ if v is None:
+ if k in env:
+ del env[k]
+ else:
+ env[k] = v
+
+#=========================================================
+#unused fs code that might be useful in the future
+#=========================================================
+
+##def getDir(path, separator="\x00"):
+## return join(separator, os.path.listdir(path))
+
+##def getDirHash(path):
+## return sha.new(getDir(path)).digest()
+
+##def getUrl(url, **kwds):
+## """getUrl(url, **kwds) -> str
+## wrapper for urllib, behaves like getFile.
+## keyword args translated to cgi params.
+## uses 'post' method.
+## xxx: swallows all exceptions
+## """
+## try:
+## if len(kwds):
+## fh = urllib.urlopen(url, urllib.urlencode(kwds))
+## else:
+## fh = urllib.urlopen(url)
+## except:
+## return None
+## try:
+## return fh.read()
+## finally:
+## fh.close()
+
+##def getModUrl(srcUrl, tick=None, rawDate=False, dateFmt="%a, %d %b %Y %H:%M:%S %Z"):
+## """
+## ok, data, tick = getModUrl(url,tick=None)
+## """
+## print srcUrl, tick
+## if tick is None:
+## fh = urllib2.urlopen(srcUrl)
+## else:
+## if isinstance(tick, (int,float,long)):
+## tick = time.strftime(dateFmt,time.gmtime(tick))
+## try:
+## fh = urllib2.urlopen(
+## urllib2.Request(srcUrl,None,{'If-Modified-Since': tick})
+## )
+## except urllib2.HTTPError, e:
+## if e.code == 304: # not modified
+## return False, None, tick
+## else:
+## raise e
+## data = fh.read()
+## tick = fh.headers['Last-Modified']
+## if not rawDate:
+## tick = time.mktime(time.strptime(tick, dateFmt))
+## fh.close()
+## return True, data, tick
+
+#=========================================================
+#version string parsing - stopgap until PEP386 verlib is in stdlib
+#=========================================================
+FINAL_MARKER = ('f',)
+
+VERSION_RE = re.compile(r'''
+ ^
+ (?P<release>
+ (?P<version>\d+\.\d+) # minimum 'N.N'
+ (?P<extraversion>(?:\.\d+)*) # any number of extra '.N' segments
+ )
+ (?:
+ (?P<prerel>[abc]|rc) # 'a'=alpha, 'b'=beta, 'c'=release candidate
+ # 'rc'= alias for release candidate
+ (?P<prerelversion>\d+(?:\.\d+)*)
+ )?
+ (?P<postdev>
+ (?: \.post (?P<post>\d+) )?
+ (?: \.dev (?P<dev>\d+) )?
+ )?
+ $''', re.VERBOSE)
+
+def main_version(verstr, str=False):
+ "return version as ``(major,minor)`` tuple"
+ version = parse_version(verstr)[0][:2]
+ if str:
+ return "%d.%d" % version
+ else:
+ return version
+
+def release_version(verstr, str=False):
+ "return version+extraversion as ``(major,minor,...)`` tuple"
+ version = parse_version(verstr)[0]
+ if str:
+ return ".".join(str(n) for n in version)
+ else:
+ return version
+
+def get_module_release(modname, str=False):
+ "return release version given module name"
+ mod = get_module(modname)
+ return release_version(mod.__version__, str=str)
+
+def parse_version(verstr):
+ "parse version into parts per PEP386"
+ match = VERSION_RE.search(verstr)
+ if not match:
+ raise ValueError, "version string doesn't conform to PEP386: %r" % (verstr,)
+ groups = match.groupdict()
+
+ def parse_numdots(s, minsize=0):
+ assert minsize >= 0
+ nums = []
+ for n in s.split("."):
+ if len(n) > 1 and n.startswith("0"):
+ raise ValueError("cannot have leading zero in version string segment: %r in %r" % (n, verstr))
+ nums.append(int(n))
+ if len(nums) > minsize:
+ while len(nums) > minsize and nums[-1] == 0:
+ nums.pop()
+ elif len(nums) < minsize:
+ nums.extend([0] * (minsize-len(nums)))
+ assert len(nums) >= minsize
+ return nums
+
+ # main version
+ block = tuple(parse_numdots(groups['release'], 2))
+ parts = [block]
+
+ # prerelease
+ prerel = groups.get('prerel')
+ if prerel:
+ block = [prerel] + parse_numdots(groups['prerelversion'], 1)
+ parts.append(tuple(block))
+ else:
+ parts.append(FINAL_MARKER)
+
+ # postdev
+ if groups.get('postdev'):
+ post = groups.get('post')
+ dev = groups.get('dev')
+ postdev = []
+ if post:
+ postdev.extend(FINAL_MARKER)
+ postdev.extend(['post', int(post)])
+ if dev:
+ postdev.extend(FINAL_MARKER)
+ if dev:
+ postdev.extend(['dev', int(dev)])
+ parts.append(tuple(postdev))
+ else:
+ parts.append(FINAL_MARKER)
+ return tuple(parts)
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/unstable/ansi.py b/bps/unstable/ansi.py
new file mode 100644
index 0000000..3df3ed1
--- /dev/null
+++ b/bps/unstable/ansi.py
@@ -0,0 +1,683 @@
+"""ansi (aka vt100) control code handling
+
+this module contains some attempts at ANSI control code parsing,
+with a focus on the color control codes. It also contains
+some ctypes code for rendering said color codes onto windows consoles.
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+import sys
+import os
+import re
+#pkg
+from bps.logs.proxy_logger import log
+from bps.meta import Params
+#local
+__all__ = [
+ 'AnsiCode', 'is_ansi_code',
+ 'parse_ansi_string',
+ 'len_ansi_string',
+]
+
+#=========================================================
+#constants
+#=========================================================
+colors = ("black","red","green","yellow", "blue","magenta","cyan","white","default")
+
+def write_test_text(stream=sys.stdout):
+ "helper which writes some text to test ansi escape code handling"
+ def write(msg, *a, **k):
+ if a: msg %= a
+ if k: msg %= k
+ stream.write(msg)
+
+ r8 = range(8) + [9]
+ hs = 16
+ hfmt = "\x1b[0m%16s: "
+ fmt = "%-8s " * 8
+ x = "-" * 7 + " "
+
+ write("\x1b[2J\x1b[9999;9999H\x1b[3DBR\x1b[H")
+
+ write("COLOR/STYLE CODES...\n")
+ write(" " * hs + " ")
+ for c in colors:
+ if not c:
+ continue
+ write("%-8s", c)
+ write("\n")
+
+ write("-" * (hs+1) + " " + x * 9 + "\n")
+
+ def write_row(name, seq):
+ write(hfmt % name)
+ for v in seq:
+ write("\x1b[%smTEST \x1b[0m",v)
+ write("\n")
+
+ write_row("fg", ("3%s" % i for i in r8))
+ #NOTE: 37 should basically be ignored
+
+ write("\n")
+ write_row("unbold", ("1;3%s;22" % i for i in r8))
+ write_row("bold", ("1;3%s" % i for i in r8))
+
+## write("\n")
+## write_row("unitalic", ("3;3%s;23" % i for i in r8))
+## write_row("italic", ("3;3%s" % i for i in r8))
+
+ #4/24 - underlined
+
+ write("\n")
+ write_row("bg", ("4%s" % i for i in r8))
+ write_row("bg+bold", ("1;4%s" % i for i in r8))
+# write("\n")
+# write_row("under", ("4;3%s" % i for i in r8))
+
+ write("\n")
+ write_row("unblink", ("6;3%s;25" % i for i in r8))
+ write_row("blink", ("5;3%s" % i for i in r8))
+ write_row("blink+bold", ("6;1;3%s" % i for i in r8))
+
+ write("\n")
+ write_row("normal", ("7;3%s;27" % i for i in r8))
+ write_row("reverse", ("7;3%s" % i for i in r8))
+ write_row("bold+reverse", ("1;7;3%s" % i for i in r8)) #effective fg should be bold
+ write_row("blink+reverse", ("5;7;3%s" % i for i in r8))
+
+ write("\n")
+ write_row("visible", ("8;3%s;28" % i for i in r8))
+ write_row("conceal", ("8;3%s" % i for i in r8)) #should display as solid BG block
+
+ write("-" * (hs+1) + " " + x * 9 + "\n")
+
+ write("\nCURSOR CONTROL CODES...\n")
+ write("1. UP-> <--\n")
+ write(" ERROR\x1b[1ATEST-UP\n2. ^ ^\n")
+ write("3. RIGHT ------\\/ \\/\n")
+ write("4. \x1b[13CTEST-RIGHT\n")
+ write("5. LEFT----> ERROR<--\x1b[12DTEST-LEFT\n")
+ write("6. DOWN----\\/ \\/\n7.\n ERROR \x1b[2A\x1b[1BTEST-DOWN\n8. /\\ /\\\n")
+ write("9. Far Right... \x1b[999C\x1b[13DTEST-FAR-RIGHT\nERROR\x1b[999D\x1b[1AA.\nB. \n")
+
+#=========================================================
+#main classes
+#=========================================================
+#XXX: could remove the "CODESET." prefix from everything
+class CODESET:
+ #the 'c0' set
+ #NOTE: this library ignores 0x09, 0x0A, 0x0D, and handles 0x1B specially
+ # xxx: could make this a special policy
+ C0 = "c0"
+ #range(0x00,0x20)
+ #identified by ESC + 0x21 + 0x40
+
+ #the 'c1' set
+ #NOTE: until CSTR support is added, this will parse CSTRs incorrectly
+ C1 = "c1"
+ # #ESC + range(0x40,0x60)
+ # #identified by ESC + 0x26 0x40
+ # #8bit: 7bit identified by ESC + 0x20 + 0x46
+ # #8bit: raw bytes in range(0x80,0xA0)
+ # #8bit: identified by ESC + 0x22 0x46
+
+ #control sequences
+ CSEQ = "cseq"
+ #ESC + 0x5b + P=range(0x30,0x40)? + I=range(0x20,0x30)? + F=range(0x40,0x7F)
+ # note F=range(0x70,0x7F) reserved for private/experimental use
+ #8bit: 0x9b instead of ESC + 0x5b
+
+ #indep control funcs
+ ICF = "icf"
+ #ESC + range(0x60,0x7F)
+
+ #code currently doesn't support parsing these,
+ ###control strings - meaning dependant on sender & receiver
+ ##CSTR = "cstr"
+ ## #startr + cstr + end + ST
+ ## #start: APC, DCS, OSC, PM, SOS - all defined in C1
+ ## #cstr: (range(0x08,0x0D)|range(0x20,0x7F))?
+ ## # or any bitseq but SOS / ST
+
+ values = (C0, C1, CSEQ, ICF)
+
+class AnsiError(ValueError):
+ "base for all ansi parsing errors"
+
+class AnsiParseError(AnsiError):
+ "error raised when parsing an incorrectly structured ansi control code"
+
+class AnsiCommandError(AnsiError):
+ "error raised when command contains invalid arguments"
+
+class AnsiCode(object):
+ "base class representing a vt100 control code"
+ #=========================================================
+ #instance attrs
+ #=========================================================
+
+ #general
+ malformed = None #flag set if code is malformed: None if not malformed; if malformed, non-empty string containing error message
+ source = None #source string if set explicitly - use 'source' property
+ codeset = None #codeset this belongs to (one of CODESET.values)
+ #NOTE: this will be None for an instance IFF it's a "malformed" code
+ code = None #always contains code string w/ CODESET specific prefix & params removed
+ #see also <{codeset}_code>
+
+ argstr = None #raw parameter string for CSEQ codes (empty string if no parms)
+
+ #command specific attrs
+ args = None #generic tuple of parsed args
+ mode = None #used by some cseq commands which have a "mode" parameter
+ offset = None #used by some cseq commands which encode a single relative offset
+ row = None #used by some cseq commands which encode a absolute row
+ col = None #used by some cseq commands which encode a absolute col
+
+ #=========================================================
+ #init
+ #=========================================================
+ def __init__(self, codeset, code, argstr=None, source=None, malformed=None):
+ if codeset is None:
+ assert code is None
+ assert argstr is None
+ else:
+ if codeset not in CODESET.values:
+ raise ValueError, "invalid codeset: %r" % (codeset,)
+ if not code:
+ raise ValueError, "code must be specified"
+ if malformed is True:
+ malformed = "<unknown reason>"
+ if malformed:
+ assert source
+ assert isinstance(malformed,str),"bad value: %r" % (malformed,)
+ self.malformed = malformed
+ self.codeset = codeset
+ self.code = code
+ self.source = source
+ if argstr is None and codeset == CODESET.CSEQ:
+ argstr = ""
+ self.argstr = argstr
+
+ #run code-specific init func if present
+ if code:
+ func = self._get_init_func()
+ #XXX: not sure about this policy
+ if malformed:
+ try:
+ func()
+ except AnsiError, err:
+ self.malformed = "%s; %s" % (self.malformed, str(err))
+ else:
+ func()
+
+ def _get_init_func(self):
+ "retrieve code-specific init function"
+ codeset, code = self.codeset, self.code
+ name = "init_" + codeset + "_" + "_".join(
+ c if c.isalnum()
+ else "%02x" % (ord(c),)
+ for c in code
+ )
+ func = getattr(self, name, None)
+ if func:
+ return func
+ name = "init_" + codeset + "_default"
+ func = getattr(self, name, None)
+ if func:
+ return func
+ return self.init_default
+
+ @classmethod
+ def try_parse(cls, source):
+ "wrapper for :meth:`parse` which catches AnsiErrors"
+ try:
+ return True, cls.parse(source)
+ except AnsiError, err:
+ return False, err
+
+ #XXX: flag controlling if argstr-related errors should be raised vs ignored vs turned into malformed?
+
+ @classmethod
+ def parse(cls, source):
+ "parse control sequence; raises ValueError if format isn't right"
+ if not source:
+ raise AnsiParseError, "empty string is not a code"
+ elif source.startswith("\x1b"):
+ if len(source) < 2:
+ raise AnsiParseError, "too few characters in control code"
+ s1 = source[1]
+ if s1 == "[":
+ #parse cseq
+ if len(source) < 3:
+ raise AnsiParseError, "too few characters in control sequence"
+ code = source[-1]
+ if code < '\x40' or code >= '\x7F':
+ raise AnsiParseError, "invalid final character in control sequence"
+ idx = len(source)-2
+ while idx > 1 and '\x20' <= source[idx] < '\x30':
+ idx -= 1
+ code = source[idx+1:-1] + code
+ argstr = source[2:idx+1]
+ return cls(codeset=CODESET.CSEQ, code=code,
+ argstr=argstr, source=source)
+ elif s1 < '\x40':
+ #non-standard, but some legacy codes exist.
+ #TODO: should have init_c1_default issue warning
+ ##raise ValueError, "invalid control code"
+ if len(source) > 2:
+ #TODO: could be cstr instead
+ raise AnsiParseError, "too many characters in (c1) control code"
+ return cls(codeset=CODESET.C1, code=s1, source=source)
+ elif s1 < '\x60':
+ #parse c1
+ if len(source) > 2:
+ #TODO: could be cstr instead
+ raise AnsiParseError, "too many characters in (c1) control code"
+ return cls(codeset=CODESET.C1, code=s1, source=source)
+ elif s1 < '\x7F':
+ #parse icf
+ if len(source) > 2:
+ raise AnsiParseError, "too many characters in (icf) control code"
+ return cls(codeset=CODESET.ICF, code=s1, source=source)
+ else:
+ raise AnsiParseError, "invalid control code"
+ elif len(source) == 1 and source < '\x20':
+ return cls(codeset=CODESET.C0, code=source, source=source)
+ else:
+ raise AnsiParseError, "unknown control code"
+
+ #=========================================================
+ #python protocol
+ #=========================================================
+ def __str__(self):
+ "use source code came from, or render it as ansi string"
+ if self.source is None:
+ return self.render()
+ else:
+ return self.source
+
+ def render(self):
+ "render string from components"
+ cs = self.codeset
+ if cs == CODESET.CSEQ:
+ return "\x1b[" + self.argstr + self.code
+ elif self.codeset == CODESET.C1 or self.codeset == CODESET.ICF:
+ return "\x1b" + self.code
+ elif not self.codeset:
+ return ""
+ else:
+ assert self.codeset == CODESET.C0
+ return self.code
+
+ def __repr__(self):
+ p = Params(self.codeset, self.code)
+ if self.codeset == CODESET.CSEQ and self.argstr:
+ p.append(self.argstr)
+ if self.source is not None and self.source != self.render():
+ p.append(source=self.source)
+ malformed = self.malformed
+ if malformed:
+ ##if ';' in malformed:
+ ## #strip out init_xxx level errors that were added
+ ## malformed = malformed[:malformed.index(";")]
+ p.append(malformed=malformed)
+ return "AnsiCode(%s)" % p
+
+ def __eq__(self, other):
+ if is_ansi_code(other):
+ #XXX: deal w/ malformed - probably should compare 'source' attrs
+ return self.codeset == other.codeset and \
+ self.code == other.code and self.argstr == other.argstr
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ #=========================================================
+ #malformed helpers
+ #=========================================================
+ @classmethod
+ def create_malformed(cls, source, reason=None):
+ "helper to create a MalformedAnsiCode"
+ return cls(None, None, malformed=reason or True, source=source)
+
+ def is_malformed(self):
+ return bool(self.malformed)
+
+ def get_malformed_reasons(self):
+ if self.malformed:
+ return (self.malformed,)
+## return self.malformed.split(";")
+ else:
+ return ()
+
+ #=========================================================
+ #codeset & code examination
+ #=========================================================
+ def get_c0_code(self):
+ return self.code if self.codeset == CODESET.C0 else None
+ c0_code = property(get_c0_code)
+
+ def get_c1_code(self):
+ return self.code if self.codeset == CODESET.C1 else None
+ c1_code = property(get_c1_code)
+
+ def get_cseq_code(self):
+ return self.code if self.codeset == CODESET.CSEQ else None
+ cseq_code = property(get_cseq_code)
+
+ def get_icf_code(self):
+ return self.code if self.codeset == CODESET.ICF else None
+ icf_code = property(get_icf_code)
+
+ ##def get_cstr_code(self):
+ ## return self.code if self.codeset == CODESET.CSTR else None
+ ##cstr_code = property(get_cstr_code)
+
+ #=========================================================
+ #argstr parsing
+ #=========================================================
+ def parse_cseq_int_args(self):
+ "return argstr as ints separated by semicolons ala CSEQ"
+ if not self.argstr:
+ return ()
+ def cast_int(x):
+ try:
+ return int(x)
+ except ValueError:
+ raise AnsiParseError("argstr contains non-integer: %r" % (self.argstr,))
+ return tuple(cast_int(x) for x in self.argstr.split(";"))
+
+ def _tma_error(self):
+ return AnsiCommandError("too many arguments for command sequence: %r" % (str(self),))
+
+ def _wna_error(self):
+ return AnsiCommandError("wrong number of arguments for command sequence: %r" % (str(self),))
+
+ #=========================================================
+ #c0, c1 init helpers
+ #=========================================================
+ def init_default(self):
+ #generic fallback
+ pass
+
+ def init_c0_1b(self):
+ #forbidden, since this signals start of c1, icf, or cseq
+ raise AnsiParseError, "raw 'ESC' is not a valid control code"
+
+ def init_c1_5b(self):
+ #forbidden, since this signals start of cseq
+ raise AnsiParseError, "raw 'ESC' + '[' is not a valid control code"
+
+ #=========================================================
+ #cseq init helpers
+ #=========================================================
+ def init_cseq_default(self):
+ #by default, parse argstr as ints (if present at all)
+ self.args = self.parse_cseq_int_args()
+
+ def init_cseq_A(self):
+ args = self.args = self.parse_cseq_int_args()
+ if not args:
+ self.offset = 1
+ elif len(args) == 1:
+ self.offset, = args
+ else:
+ raise self._tma_error()
+ init_cseq_D = init_cseq_C = init_cseq_B = init_cseq_A
+
+ def init_cseq_f(self):
+ self.code = "H"
+ self.init_cseq_H()
+
+ def init_cseq_H(self):
+ #TODO: support row or col being None
+ args = self.args = self.parse_cseq_int_args()
+ if not args:
+ self.col = self.row = 0
+ elif len(args) == 2:
+ self.col, self.row = args
+ else:
+ raise self._wna_error()
+
+ def init_cseq_J(self):
+ args = self.args = self.parse_cseq_int_args()
+ if not args:
+ self.mode = 0
+ elif len(args) == 1:
+ self.mode, = args
+ ##if not (0 <= self.mode < 3):
+ ## raise AnsiCommandError, "unknown clear-line mode: %r" % (str(self),)
+ else:
+ raise self._tma_error()
+
+ def init_cseq_K(self):
+ args = self.args = self.parse_cseq_int_args()
+ if not args:
+ self.mode = 0
+ elif len(args) == 1:
+ self.mode, = args
+ ##if not (0 <= self.mode < 3):
+ ## raise AnsiCommandError, "unknown clear-screen mode: %r" % (str(self),)
+ else:
+ raise self._tma_error()
+
+ def init_cseq_m(self):
+ #ensure args are parseable
+ args = self.args = self.parse_cseq_int_args()
+ ##if not args:
+ ## raise AnsiCommandError, "no styles listed: %r" % (str(self),)
+ ##if any(x < 0 or x > 100 for x in args):
+ ## raise AnsiCommandError, "style value out of bounds: %r" % (str(self),)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#utilities
+#=========================================================
+
+def is_ansi_code(obj):
+ "check if object is a AnsiCode object"
+ return hasattr(obj,"codeset") and hasattr(obj,"code")
+
+def is_malformed_ansi_code(obj):
+ return is_ansi_code(obj) and obj.is_malformed()
+
+def len_ansi_string(source):
+ """return effective text length of a ansi string.
+
+ .. todo::
+ decide on whether cursor control codes should result in error,
+ warning, or be ignored. for now, naively counting chars.
+ """
+ count = 0
+ for elem in parse_ansi_string(source, rtype=iter):
+ if not is_ansi_code(elem):
+ count += len(elem)
+ return count
+
+def strip_ansi_string(source):
+ "strip ansi escape codes out of string"
+ return "".join(
+ elem
+ for elem in parse_ansi_string(source, rtype=iter,
+ malformed_codes="parse")
+ if not is_ansi_code(elem)
+ )
+
+def parse_ansi_string(source, rtype=list, malformed_codes="ignore"):
+ """parse string, yeilding chunks of raw text and ansi control codes.
+
+ :arg source:
+ source string to parse
+
+ :param rtype:
+ optionally you can specify the return type of this function;
+ the common values are ``list``, and ``iter``.
+
+ :param malformed_codes:
+ this sets the policy for how this function
+ handles malformed command codes.
+
+ * ``ignore`` (the default) -- malformed codes are ignored, and kept as literal text.
+ * ``parse`` -- malformed codes are parsed and returned
+ in :class:`AnsiCode` instances which have no code or codeset specified.
+ * ``strip`` -- malformed codes are removed entirely.
+ * ``raise`` -- malformed codes cause a ValueError to be raised.
+
+ :returns:
+ this returns a list of 1 or more elements,
+ which are either raw strings, or :class:`AnsiCode` instances.
+
+ """
+ assert malformed_codes in ("ignore","parse","strip","raise")
+ if malformed_codes == "strip":
+ result = (
+ elem
+ for elem in _parse_ansi_helper(source, "parse")
+ if not is_malformed_ansi_code(elem)
+ )
+ else:
+ result = _parse_ansi_helper(source, malformed_codes)
+ if rtype is iter:
+ return result
+ else:
+ return rtype(result)
+
+def _parse_ansi_helper(source, malformed_codes):
+ if not source:
+ yield ""
+ return
+ if malformed_codes == "raise":
+ def create_bad(source, reason):
+ raise ValueError, "%s: %r" % (reason, source)
+ create = AnsiCode.parse
+ elif malformed_codes == "ignore":
+ def create_bad(source,reason):
+ log.warning("ignoring malformed control code: %r: %r", reason, source)
+ return source
+ def create(source):
+ ok, result = AnsiCode.try_parse(source)
+ if ok:
+ return result
+ else:
+ log.warning("ignoring malformed control code: %r: %r", result, source)
+ return source
+ else:
+ assert malformed_codes == "parse"
+ create_bad = AnsiCode.create_malformed
+ def create(source):
+ ok, result = AnsiCode.try_parse(source)
+ if ok:
+ return result
+ else:
+ result = str(result)
+ log.warning("encounterd malformed control code: %r: %r", result, source)
+ return create_bad(source, result)
+ state = 0
+ #0 - scanning raw text into buffer
+ #1 - saw ESC -- looking for next char
+ #2 - saw ESC+[ -- scanning cseq into buffer
+ buf = ""
+ for c in source:
+ if state == 1:
+ #parsing escape code
+ assert buf == "\x1b"
+ if c == '[':
+ #it's a cseq
+ buf += c
+ state = 2
+ continue
+ else:
+ #assume it's a 2 char escape code (c1, icf)
+ buf += c
+ yield create(buf)
+ state = 0
+ buf = ""
+ continue
+
+ elif state == 2:
+ assert buf.startswith("\x1b[")
+
+ if '\x20' <= c < '\x40':
+ #parse cseq param or intermediate byte
+ buf += c
+ continue
+ elif '\x40' <= c < '\x7F':
+ #parse cseq final byte
+ buf += c
+ yield create(buf)
+ buf = ""
+ state = 0
+ continue
+ else:
+ #cseq should contain no other bytes,
+ #so something's invalid here
+ yield create_bad("\x1b[" + buf, "string contains unterminated control code")
+ #fall through to state 0, below
+ state = 0
+
+ #this is down here in case a higher state finishes early
+ if state == 0:
+ #parsing raw text
+ if c < '\x20':
+ #it's a c0 code...
+ if c == "\x1b":
+ #jump to escape handling (c1,icf,cseq)
+ if buf:
+ yield buf
+ buf = c
+ state = 1
+ continue
+ elif c in '\r\n\t':
+ #treat these codes like regular characters.
+ #XXX: should caller be able to set policy?
+ buf +=c
+ continue
+ else:
+ #all others, yeild c0 code
+ if buf:
+ yield buf
+ buf = ""
+ yield create(c)
+ continue
+ else:
+ buf += c
+ continue
+
+ if state == 0:
+ if buf:
+ yield buf
+ else:
+ yield create_bad(buf, "string ends with unterminated control code")
+
+#=========================================================
+#streams
+#=========================================================
+class AnsiStripper(object):
+ "wraps another stream, removes ansi escape codes before writing to original stream"
+ stream = None
+
+ def __init__(self, stream):
+ self.stream = stream
+
+ def __getattr__(self, attr):
+ return getattr(self.stream, attr)
+
+ def write(self, text):
+ write = self.stream.write
+ for elem in parse_ansi_string(text, rtype=iter, malformed_codes="parse"):
+ if not is_ansi_code(elem):
+ write(elem)
+
+ def writelines(self, seq):
+ for elem in seq:
+ self.write(seq)
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/unstable/bpsdoc/__init__.py b/bps/unstable/bpsdoc/__init__.py
new file mode 100644
index 0000000..3048668
--- /dev/null
+++ b/bps/unstable/bpsdoc/__init__.py
@@ -0,0 +1,8 @@
+"""
+This module contains a few small sphinx extensions.
+They are mainly used to help with the generation
+of BPS's own documentation, but some other projects
+use them as well, so they are kept here.
+"""
+import os.path
+theme_path = os.path.abspath(os.path.join(__file__,os.path.pardir))
diff --git a/bps/unstable/bpsdoc/ast/layout.html b/bps/unstable/bpsdoc/ast/layout.html
new file mode 100644
index 0000000..3384e90
--- /dev/null
+++ b/bps/unstable/bpsdoc/ast/layout.html
@@ -0,0 +1,29 @@
+{% extends "basic/layout.html" %}
+
+{# include release in root breadcrumb #}
+{% block rootrellink %}
+ <li><a href="{{ pathto("index") }}"><b>{{project}} v{{release}}</b> Documentation</a> &raquo; </li>
+{% endblock %}
+
+{# have logo go to index, not TOC #}
+{# FIXME: is there a way to read conf.py setting, so user can set index_doc or we can read master_doc? #}
+{%- block sidebarlogo %}
+ {%- if logo %}
+ <p class="logo"><a href="{{ pathto("index") }}">
+ <img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
+ </a></p>
+ {%- endif %}
+{%- endblock %}
+
+{# give relbars separate classes for themeing #}
+{%- block relbar1 %}
+ <div class="relbar-top">
+ {{ super() }}
+ </div>
+{% endblock %}
+
+{%- block relbar2 %}
+ <div class="relbar-bottom">
+ {{ super() }}
+ </div>
+{% endblock %}
diff --git a/bps/unstable/bpsdoc/ast/static/ast-website.css b/bps/unstable/bpsdoc/ast/static/ast-website.css
new file mode 100644
index 0000000..d720321
--- /dev/null
+++ b/bps/unstable/bpsdoc/ast/static/ast-website.css
@@ -0,0 +1,733 @@
+/**
+ * Sphinx Doc Design
+ */
+
+body {
+ font-family: sans-serif;
+ font-size: 100%;
+ background: #D4BCA5 url(bg_top.jpg) repeat-x;
+ color: #000;
+ margin: 0;
+ padding: 0;
+}
+
+/* :::: LAYOUT :::: */
+
+div.document {
+ background-color: #FBF8F4;
+ margin: auto;
+ max-width: 1024px;
+}
+
+div.documentwrapper {
+ float: left;
+ width: 100%;
+}
+
+div.bodywrapper {
+ margin: 0 0 0 230px;
+}
+
+div.body {
+ background-color: white;
+ padding: 20px 20px 30px 20px;
+ border-left: 2px dotted #F5F2EE;
+ min-height: 400px;
+}
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+ float: left;
+ width: 230px;
+ margin-left: -100%;
+ font-size: 90%;
+}
+
+div.clearer {
+ clear: both;
+}
+
+div.footer {
+ color: #fff;
+ width: 100%;
+ padding: 9px 0 9px 0;
+ text-align: center;
+ font-size: 75%;
+}
+
+div.footer a {
+ color: #fff;
+ text-decoration: underline;
+}
+
+div.footer sep
+{
+ font-weight: bold;
+ color: #854D15;
+}
+
+div.relbar-top
+{
+ background-color: #FFFEFC;
+ border-bottom:5px solid #854D15;
+}
+
+div.relbar-bottom div.related
+{
+ background-color: #FFFEFC;
+ border-bottom:5px solid #854D15;
+}
+
+div.related {
+/* background-color: #FFFEFC;
+ border-bottom:5px solid #854D15;*/
+/* color: #fff;*/
+ line-height: 30px;
+ font-size: 90%;
+ margin: auto;
+ max-width: 1024px;
+}
+
+div.related h3 {
+ display: none;
+}
+
+div.related ul {
+ margin: 0;
+ padding: 0 0 0 10px;
+ list-style: none;
+}
+
+div.related li {
+ display: inline;
+}
+
+div.related li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+div.related a {
+/* color: white;*/
+}
+
+/* ::: TOC :::: */
+div.sphinxsidebar h3 {
+ font-family: 'Trebuchet MS', sans-serif;
+/* color: white; */
+ font-size: 1.3em;
+ font-weight: normal;
+ margin: 0;
+ padding: 0;
+ border-bottom:2px dotted #E3D4C5;
+ color: #D4BCA5;
+}
+
+div.sphinxsidebar h3 a {
+ color: #D4BCA5;
+/* color: white; */
+}
+
+div.sphinxsidebar h4 {
+ font-family: 'Trebuchet MS', sans-serif;
+/* color: white; */
+ font-size: 1.2em;
+ font-weight: normal;
+ margin: 5px 0 0 0;
+ padding: 0;
+ border-bottom:2px dotted #E3D4C5;
+ color: #D4BCA5;
+}
+
+div.sphinxsidebar p {
+/* color: white; */
+}
+
+div.sphinxsidebar p.topless {
+ margin: 5px 10px 10px 10px;
+}
+
+div.sphinxsidebar ul {
+ margin: 10px;
+ padding: 0;
+ list-style: none;
+/* color: white; */
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+ margin-left: 20px;
+ list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar a {
+/* color: #98dbcc; */
+}
+
+div.sphinxsidebar form {
+ margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #D4BCA5;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+/* :::: MODULE CLOUD :::: */
+div.modulecloud {
+ margin: -5px 10px 5px 10px;
+ padding: 10px;
+ line-height: 160%;
+ border: 1px solid #cbe7e5;
+ background-color: #f2fbfd;
+}
+
+div.modulecloud a {
+ padding: 0 5px 0 5px;
+}
+
+/* :::: SEARCH :::: */
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+
+/* :::: COMMON FORM STYLES :::: */
+
+div.actions {
+ padding: 5px 10px 5px 10px;
+ border-top: 1px solid #cbe7e5;
+ border-bottom: 1px solid #cbe7e5;
+ background-color: #e0f6f4;
+}
+
+form dl {
+ color: #333;
+}
+
+form dt {
+ clear: both;
+ float: left;
+ min-width: 110px;
+ margin-right: 10px;
+ padding-top: 2px;
+}
+
+input#homepage {
+ display: none;
+}
+
+div.error {
+ margin: 5px 20px 0 0;
+ padding: 5px;
+ border: 1px solid #d00;
+ font-weight: bold;
+}
+
+/* :::: INDEX PAGE :::: */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* :::: INDEX STYLES :::: */
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+form.pfform {
+ margin: 10px 0 20px 0;
+}
+
+/* :::: GLOBAL STYLES :::: */
+
+.docwarning {
+ background-color: #ffe4e4;
+ padding: 10px;
+ margin: 0 -20px 0 -20px;
+ border-bottom: 1px solid #f66;
+}
+
+p.subhead {
+ font-weight: bold;
+ margin-top: 20px;
+}
+
+a {
+ color: #739BD5;
+ text-decoration: none;
+}
+
+a:hover {
+ text-decoration: underline;
+}
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+ font-family: 'Trebuchet MS', sans-serif;
+ background-color:#F5F0E9;
+ font-weight: normal;
+ color: black;
+ border-bottom:1px solid #D4BCA5;
+ margin: 20px 0 10px;
+ padding: 3px 0 3px 10px;
+}
+
+div.body h1 { margin: -10px -10px 0; font-size: 200%; border: 1px solid #D4BCA5; }
+div.body h2 { font-size: 160%; }
+div.body h3 { font-size: 140%; border-color: #E3D4C5; }
+div.body h4 { font-size: 120%; border-color: #E3D4C5; }
+div.body h5 { font-size: 110%; border-color: #E3D4C5; }
+div.body h6 { font-size: 100%; border-color: #E3D4C5; }
+
+a.headerlink {
+ color: #c60f0f;
+ font-size: 0.8em;
+ padding: 0 4px 0 4px;
+ text-decoration: none;
+ visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+ visibility: visible;
+}
+
+a.headerlink:hover {
+ background-color: #c60f0f;
+ color: white;
+}
+
+div.body p, div.body dd, div.body li {
+ text-align: justify;
+ line-height: 130%;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+ul.fakelist {
+ list-style: none;
+ margin: 10px 0 10px 20px;
+ padding: 0;
+}
+
+.field-list ul {
+ padding-left: 1em;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+/* "Footnotes" heading */
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+/* Sidebars */
+
+div.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px 7px 0 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
+/* "Topics" */
+
+div.topic {
+ background-color: #eee;
+ border: 1px solid #ccc;
+ padding: 7px 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* Admonitions */
+
+div.admonition {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ padding: 7px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+div.admonition p.admonition-title + p {
+ display: inline;
+}
+
+div.seealso {
+ background-color: #ffc;
+ border: 1px solid #ff6;
+}
+
+div.warning {
+ background-color: #ffe4e4;
+ border: 1px solid #f66;
+}
+
+div.note,
+div.admonition-todo
+{
+ background-color: #fafafa;
+ border: 1px solid #eee;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+ display: inline;
+}
+
+p.admonition-title:after {
+ content: ":";
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+table.docutils {
+ border: 0;
+}
+
+table.docutils td, table.docutils th {
+ padding: 1px 8px 1px 0;
+ border-top: 0;
+ border-left: 0;
+ border-right: 0;
+ border-bottom: 1px solid #aaa;
+}
+
+table.docutils th.field-name
+{
+ position: absolute;
+ font-family: sans-serif;
+ font-weight: normal;
+}
+
+table.docutils td.field-body
+{
+ padding: 1.5em 0 0 1.5em;
+}
+
+table.docutils td.field-body dt
+{
+ font-style: italic;
+}
+
+table.field-list td, table.field-list th {
+ border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+.field-list ul {
+ margin: 0;
+ padding-left: 1em;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+dl {
+ margin-bottom: 15px;
+ clear: both;
+}
+
+dl.function > dt,
+dl.attribute > dt,
+dl.class > dt,
+dl.exception > dt
+{
+ border-bottom: 3px dotted #E7EEF3;
+ padding: 1px 1px 1px 3px;
+}
+
+dl.attribute > dt { border-color: #E9E4F3; }
+dl.attribute > dt tt.descname { font-style: italic; }
+dl.class > dt { border-color: #F3EAE7; }
+dl.exception > dt { border-color: #F3EBDB; }
+
+tt.descclassname, tt.descname
+{
+ font-size: 120%;
+}
+
+dl.exception > dd,
+dl.class > dd,
+dl.function > dd,
+dl.attribute > dd
+{
+ padding-top: .1em;
+ padding-bottom: .75em;
+}
+
+dd p {
+ margin-top: 0px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+}
+
+.refcount {
+ color: #060;
+}
+
+dt:target,
+.highlight {
+ background-color: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 1.1em;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+pre {
+ padding: 5px;
+ background-color: #F1FFD4;
+ color: #333;
+ border: 1px solid #D5E6B3;
+ border-left: none;
+ border-right: none;
+ overflow: auto;
+}
+
+td.linenos pre {
+ padding: 5px 0px;
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ margin-left: 0.5em;
+}
+
+table.highlighttable td {
+ padding: 0 0.5em 0 0.5em;
+}
+
+tt {
+ background-color: #ECF0F3;
+ padding: 1px 2px 1px 2px;
+ font-size: 0.95em;
+}
+
+tt.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1.2em;
+}
+
+tt.descclassname {
+ background-color: transparent;
+}
+
+tt.xref, a tt {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+.footnote:target { background-color: #ffa }
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ background-color: transparent;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+form.comment {
+ margin: 0;
+ padding: 10px 30px 10px 30px;
+ background-color: #eee;
+}
+
+form.comment h3 {
+ background-color: #326591;
+ color: white;
+ margin: -10px -30px 10px -30px;
+ padding: 5px;
+ font-size: 1.4em;
+}
+
+form.comment input,
+form.comment textarea {
+ border: 1px solid #ccc;
+ padding: 2px;
+ font-family: sans-serif;
+ font-size: 100%;
+}
+
+form.comment input[type="text"] {
+ width: 240px;
+}
+
+form.comment textarea {
+ width: 100%;
+ height: 200px;
+ margin-bottom: 10px;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+img.math {
+ vertical-align: middle;
+}
+
+div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+img.logo {
+ border: 0;
+}
+
+/* :::: PRINT :::: */
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0;
+ width : 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ div#comments div.new-comment-box,
+ #top-link {
+ display: none;
+ }
+}
diff --git a/bps/unstable/bpsdoc/ast/static/ast.css b/bps/unstable/bpsdoc/ast/static/ast.css
new file mode 100644
index 0000000..2734b35
--- /dev/null
+++ b/bps/unstable/bpsdoc/ast/static/ast.css
@@ -0,0 +1,153 @@
+/*****************************************************
+ * Additional styles laid on top of default theme
+ *****************************************************/
+
+@import url("default.css");
+
+/*****************************************************
+ * limit page width to 1024px
+ *****************************************************/
+
+div.related, div.document
+{
+ margin: 0 auto;
+ width: 1024px;
+}
+
+div.relbar-top
+{
+ margin-top: 1em;
+}
+
+/*****************************************************
+ * make sure small pages lay out right
+ *****************************************************/
+div.body
+{
+ min-height: 550px;
+}
+
+/*****************************************************
+ * provide styling for TODO
+ *****************************************************/
+div.admonition.caution {
+ background-color: #FFF0E4;
+ border: 1px solid #FF9966;
+}
+
+div.admonition-todo {
+ background-color: #EEE6E0;
+ border: 1px solid #D4CDC7;
+}
+
+div#todos p.admonition-title
+{
+ font-weight: normal;
+ color: #AAA;
+ font-size: 70%;
+}
+
+div#todos div.admonition-todo + p
+{
+ font-size: 70%;
+ text-align: right;
+ margin-top: -.5em;
+ margin-bottom: 1.5em;
+ color: #AAA;
+}
+
+div#todos div.admonition-todo + p a
+{
+ font-size: 130%;
+}
+
+/*****************************************************
+ * add more whitespace to definitions
+ *****************************************************/
+dl.function,
+dl.method,
+dl.attribute,
+dl.class,
+dl.exception,
+dl.data
+{
+ margin-bottom: 1.5em;
+}
+
+/*****************************************************
+ * add more whitespace to parameter lists
+ *****************************************************/
+td.field-body > ul.first.simple > li,
+td.field-body > p.first
+{
+ margin-bottom: 1em;
+}
+
+td.field-body > ul.first.simple > li > em,
+td.field-body > em
+{
+ border-bottom: 1px solid #DEE6ED;
+ padding: 2px 4px;
+ background: #ECF0F3;
+}
+
+/*****************************************************
+ * css colorization of object definitions,
+ * adds colored line underneath definition title.
+ * color scheme used:
+ * callables (function, method) - green
+ * attributes - purple
+ * classes - red
+ * exceptions - orange
+ * data - blue
+ *****************************************************/
+
+dl.function > dt,
+dl.method > dt,
+dl.attribute > dt,
+dl.class > dt,
+dl.exception > dt,
+dl.data > dt
+{
+ border-bottom: 2px solid #9AB9CE;
+ background: #E2ECF3;
+ padding: .1em 1px 1px 3px;
+}
+
+dl.function > dt { border-color: #8BC38B; background: #D8E8D8; }
+dl.method > dt { border-color: #AA96C2; background: #E8E1F2; }
+dl.attribute > dt { border-color: #9996C2; background: #E7E6F6; }
+dl.attribute > dt tt.descname { font-style: italic; }
+dl.class > dt { border-color: #C8A69A; background: #E8DCD8; }
+dl.exception > dt { border-color: #F8A69A; background: #F2E3E1; }
+
+/*****************************************************
+ * css colorization for index page, using styles
+ * provided by customize.py extension
+ *****************************************************/
+
+table.indextable span.category
+{
+ font-size: 80%;
+ color: #84ADBE;
+}
+
+table.indextable span.category.function { color: #8BC38B; }
+table.indextable span.category.method { color: #AA96C2; }
+table.indextable span.category.attribute { color: #9996C2; }
+table.indextable span.category.class { color: #C8A69A; }
+table.indextable span.category.module { color: #9AB9CE; }
+
+table.indextable span.subject
+{
+ font-weight: bold;
+}
+
+table.indextable td > dl > dt
+{
+ margin-bottom: .5em;
+}
+
+/*****************************************************
+ * EOF
+ *****************************************************/
diff --git a/bps/unstable/bpsdoc/ast/static/bg_top.jpg b/bps/unstable/bpsdoc/ast/static/bg_top.jpg
new file mode 100644
index 0000000..c1e5775
--- /dev/null
+++ b/bps/unstable/bpsdoc/ast/static/bg_top.jpg
Binary files differ
diff --git a/bps/unstable/bpsdoc/ast/theme.conf b/bps/unstable/bpsdoc/ast/theme.conf
new file mode 100644
index 0000000..33a4bff
--- /dev/null
+++ b/bps/unstable/bpsdoc/ast/theme.conf
@@ -0,0 +1,3 @@
+[theme]
+inherit = default
+stylesheet = ast.css
diff --git a/bps/unstable/bpsdoc/cloud/layout.html b/bps/unstable/bpsdoc/cloud/layout.html
new file mode 100644
index 0000000..597732b
--- /dev/null
+++ b/bps/unstable/bpsdoc/cloud/layout.html
@@ -0,0 +1,51 @@
+{% extends "basic/layout.html" %}
+
+{%- set theme_roottarget = (theme_roottarget == "<master>" and master_doc or theme_roottarget) %}
+{%- set theme_logotarget = (theme_logotarget == "<root>" and theme_roottarget or (theme_logotarget == "<master>" and master_doc or theme_logotarget)) %}
+{%- set footdelim = footdelim is not defined and ' &nbsp; / &nbsp; ' or footdelim %}
+
+{# make root link redirectable #}
+{% block rootrellink %}
+ <li><a href="{{ pathto(theme_roottarget) }}">{{project}} Documentation</a> &raquo; </li>
+{% endblock %}
+
+{# make logo link redirectable #}
+{%- block sidebarlogo %}
+ {%- if logo %}
+ <p class="logo"><a href="{{ pathto(theme_logotarget) }}">
+ <img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
+ </a></p>
+ {%- endif %}
+{%- endblock %}
+
+{# give relbars separate classes for themeing #}
+{%- block relbar1 %}
+ <div class="relbar-top">
+ {{ super() }}
+ </div>
+{% endblock %}
+
+{%- block relbar2 %}
+ <div class="relbar-bottom">
+ {{ super() }}
+ </div>
+{% endblock %}
+
+{# add separators into footer #}
+{%- block footer %}
+ <div class="footer">
+ {%- if hasdoc('copyright') %}
+ {% trans path=pathto('copyright'), copyright=copyright|e %}&copy; <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
+ {%- else %}
+ {% trans copyright=copyright|e %}&copy; Copyright {{ copyright }}.{% endtrans %}
+ {%- endif %}
+ {%- if last_updated %}
+ {{ footdelim }}
+ {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
+ {%- endif %}
+ {%- if show_sphinx %}
+ {{ footdelim }}
+ {% trans sphinx_version=sphinx_version|e %}Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> {{ sphinx_version }}.{% endtrans %}
+ {%- endif %}
+ </div>
+{%- endblock %}
diff --git a/bps/unstable/bpsdoc/cloud/static/ast-website.css b/bps/unstable/bpsdoc/cloud/static/ast-website.css
new file mode 100644
index 0000000..d720321
--- /dev/null
+++ b/bps/unstable/bpsdoc/cloud/static/ast-website.css
@@ -0,0 +1,733 @@
+/**
+ * Sphinx Doc Design
+ */
+
+body {
+ font-family: sans-serif;
+ font-size: 100%;
+ background: #D4BCA5 url(bg_top.jpg) repeat-x;
+ color: #000;
+ margin: 0;
+ padding: 0;
+}
+
+/* :::: LAYOUT :::: */
+
+div.document {
+ background-color: #FBF8F4;
+ margin: auto;
+ max-width: 1024px;
+}
+
+div.documentwrapper {
+ float: left;
+ width: 100%;
+}
+
+div.bodywrapper {
+ margin: 0 0 0 230px;
+}
+
+div.body {
+ background-color: white;
+ padding: 20px 20px 30px 20px;
+ border-left: 2px dotted #F5F2EE;
+ min-height: 400px;
+}
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+ float: left;
+ width: 230px;
+ margin-left: -100%;
+ font-size: 90%;
+}
+
+div.clearer {
+ clear: both;
+}
+
+div.footer {
+ color: #fff;
+ width: 100%;
+ padding: 9px 0 9px 0;
+ text-align: center;
+ font-size: 75%;
+}
+
+div.footer a {
+ color: #fff;
+ text-decoration: underline;
+}
+
+div.footer sep
+{
+ font-weight: bold;
+ color: #854D15;
+}
+
+div.relbar-top
+{
+ background-color: #FFFEFC;
+ border-bottom:5px solid #854D15;
+}
+
+div.relbar-bottom div.related
+{
+ background-color: #FFFEFC;
+ border-bottom:5px solid #854D15;
+}
+
+div.related {
+/* background-color: #FFFEFC;
+ border-bottom:5px solid #854D15;*/
+/* color: #fff;*/
+ line-height: 30px;
+ font-size: 90%;
+ margin: auto;
+ max-width: 1024px;
+}
+
+div.related h3 {
+ display: none;
+}
+
+div.related ul {
+ margin: 0;
+ padding: 0 0 0 10px;
+ list-style: none;
+}
+
+div.related li {
+ display: inline;
+}
+
+div.related li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+div.related a {
+/* color: white;*/
+}
+
+/* ::: TOC :::: */
+div.sphinxsidebar h3 {
+ font-family: 'Trebuchet MS', sans-serif;
+/* color: white; */
+ font-size: 1.3em;
+ font-weight: normal;
+ margin: 0;
+ padding: 0;
+ border-bottom:2px dotted #E3D4C5;
+ color: #D4BCA5;
+}
+
+div.sphinxsidebar h3 a {
+ color: #D4BCA5;
+/* color: white; */
+}
+
+div.sphinxsidebar h4 {
+ font-family: 'Trebuchet MS', sans-serif;
+/* color: white; */
+ font-size: 1.2em;
+ font-weight: normal;
+ margin: 5px 0 0 0;
+ padding: 0;
+ border-bottom:2px dotted #E3D4C5;
+ color: #D4BCA5;
+}
+
+div.sphinxsidebar p {
+/* color: white; */
+}
+
+div.sphinxsidebar p.topless {
+ margin: 5px 10px 10px 10px;
+}
+
+div.sphinxsidebar ul {
+ margin: 10px;
+ padding: 0;
+ list-style: none;
+/* color: white; */
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+ margin-left: 20px;
+ list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar a {
+/* color: #98dbcc; */
+}
+
+div.sphinxsidebar form {
+ margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #D4BCA5;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+/* :::: MODULE CLOUD :::: */
+div.modulecloud {
+ margin: -5px 10px 5px 10px;
+ padding: 10px;
+ line-height: 160%;
+ border: 1px solid #cbe7e5;
+ background-color: #f2fbfd;
+}
+
+div.modulecloud a {
+ padding: 0 5px 0 5px;
+}
+
+/* :::: SEARCH :::: */
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+
+/* :::: COMMON FORM STYLES :::: */
+
+div.actions {
+ padding: 5px 10px 5px 10px;
+ border-top: 1px solid #cbe7e5;
+ border-bottom: 1px solid #cbe7e5;
+ background-color: #e0f6f4;
+}
+
+form dl {
+ color: #333;
+}
+
+form dt {
+ clear: both;
+ float: left;
+ min-width: 110px;
+ margin-right: 10px;
+ padding-top: 2px;
+}
+
+input#homepage {
+ display: none;
+}
+
+div.error {
+ margin: 5px 20px 0 0;
+ padding: 5px;
+ border: 1px solid #d00;
+ font-weight: bold;
+}
+
+/* :::: INDEX PAGE :::: */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* :::: INDEX STYLES :::: */
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+form.pfform {
+ margin: 10px 0 20px 0;
+}
+
+/* :::: GLOBAL STYLES :::: */
+
+.docwarning {
+ background-color: #ffe4e4;
+ padding: 10px;
+ margin: 0 -20px 0 -20px;
+ border-bottom: 1px solid #f66;
+}
+
+p.subhead {
+ font-weight: bold;
+ margin-top: 20px;
+}
+
+a {
+ color: #739BD5;
+ text-decoration: none;
+}
+
+a:hover {
+ text-decoration: underline;
+}
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+ font-family: 'Trebuchet MS', sans-serif;
+ background-color:#F5F0E9;
+ font-weight: normal;
+ color: black;
+ border-bottom:1px solid #D4BCA5;
+ margin: 20px 0 10px;
+ padding: 3px 0 3px 10px;
+}
+
+div.body h1 { margin: -10px -10px 0; font-size: 200%; border: 1px solid #D4BCA5; }
+div.body h2 { font-size: 160%; }
+div.body h3 { font-size: 140%; border-color: #E3D4C5; }
+div.body h4 { font-size: 120%; border-color: #E3D4C5; }
+div.body h5 { font-size: 110%; border-color: #E3D4C5; }
+div.body h6 { font-size: 100%; border-color: #E3D4C5; }
+
+a.headerlink {
+ color: #c60f0f;
+ font-size: 0.8em;
+ padding: 0 4px 0 4px;
+ text-decoration: none;
+ visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+ visibility: visible;
+}
+
+a.headerlink:hover {
+ background-color: #c60f0f;
+ color: white;
+}
+
+div.body p, div.body dd, div.body li {
+ text-align: justify;
+ line-height: 130%;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+ul.fakelist {
+ list-style: none;
+ margin: 10px 0 10px 20px;
+ padding: 0;
+}
+
+.field-list ul {
+ padding-left: 1em;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+/* "Footnotes" heading */
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+/* Sidebars */
+
+div.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px 7px 0 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
+/* "Topics" */
+
+div.topic {
+ background-color: #eee;
+ border: 1px solid #ccc;
+ padding: 7px 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* Admonitions */
+
+div.admonition {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ padding: 7px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+div.admonition p.admonition-title + p {
+ display: inline;
+}
+
+div.seealso {
+ background-color: #ffc;
+ border: 1px solid #ff6;
+}
+
+div.warning {
+ background-color: #ffe4e4;
+ border: 1px solid #f66;
+}
+
+div.note,
+div.admonition-todo
+{
+ background-color: #fafafa;
+ border: 1px solid #eee;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+ display: inline;
+}
+
+p.admonition-title:after {
+ content: ":";
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+table.docutils {
+ border: 0;
+}
+
+table.docutils td, table.docutils th {
+ padding: 1px 8px 1px 0;
+ border-top: 0;
+ border-left: 0;
+ border-right: 0;
+ border-bottom: 1px solid #aaa;
+}
+
+table.docutils th.field-name
+{
+ position: absolute;
+ font-family: sans-serif;
+ font-weight: normal;
+}
+
+table.docutils td.field-body
+{
+ padding: 1.5em 0 0 1.5em;
+}
+
+table.docutils td.field-body dt
+{
+ font-style: italic;
+}
+
+table.field-list td, table.field-list th {
+ border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+.field-list ul {
+ margin: 0;
+ padding-left: 1em;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+dl {
+ margin-bottom: 15px;
+ clear: both;
+}
+
+dl.function > dt,
+dl.attribute > dt,
+dl.class > dt,
+dl.exception > dt
+{
+ border-bottom: 3px dotted #E7EEF3;
+ padding: 1px 1px 1px 3px;
+}
+
+dl.attribute > dt { border-color: #E9E4F3; }
+dl.attribute > dt tt.descname { font-style: italic; }
+dl.class > dt { border-color: #F3EAE7; }
+dl.exception > dt { border-color: #F3EBDB; }
+
+tt.descclassname, tt.descname
+{
+ font-size: 120%;
+}
+
+dl.exception > dd,
+dl.class > dd,
+dl.function > dd,
+dl.attribute > dd
+{
+ padding-top: .1em;
+ padding-bottom: .75em;
+}
+
+dd p {
+ margin-top: 0px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+}
+
+.refcount {
+ color: #060;
+}
+
+dt:target,
+.highlight {
+ background-color: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 1.1em;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+pre {
+ padding: 5px;
+ background-color: #F1FFD4;
+ color: #333;
+ border: 1px solid #D5E6B3;
+ border-left: none;
+ border-right: none;
+ overflow: auto;
+}
+
+td.linenos pre {
+ padding: 5px 0px;
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ margin-left: 0.5em;
+}
+
+table.highlighttable td {
+ padding: 0 0.5em 0 0.5em;
+}
+
+tt {
+ background-color: #ECF0F3;
+ padding: 1px 2px 1px 2px;
+ font-size: 0.95em;
+}
+
+tt.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1.2em;
+}
+
+tt.descclassname {
+ background-color: transparent;
+}
+
+tt.xref, a tt {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+.footnote:target { background-color: #ffa }
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ background-color: transparent;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+form.comment {
+ margin: 0;
+ padding: 10px 30px 10px 30px;
+ background-color: #eee;
+}
+
+form.comment h3 {
+ background-color: #326591;
+ color: white;
+ margin: -10px -30px 10px -30px;
+ padding: 5px;
+ font-size: 1.4em;
+}
+
+form.comment input,
+form.comment textarea {
+ border: 1px solid #ccc;
+ padding: 2px;
+ font-family: sans-serif;
+ font-size: 100%;
+}
+
+form.comment input[type="text"] {
+ width: 240px;
+}
+
+form.comment textarea {
+ width: 100%;
+ height: 200px;
+ margin-bottom: 10px;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+img.math {
+ vertical-align: middle;
+}
+
+div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+img.logo {
+ border: 0;
+}
+
+/* :::: PRINT :::: */
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0;
+ width : 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ div#comments div.new-comment-box,
+ #top-link {
+ display: none;
+ }
+}
diff --git a/bps/unstable/bpsdoc/cloud/static/ast.css_t b/bps/unstable/bpsdoc/cloud/static/ast.css_t
new file mode 100644
index 0000000..d12a11f
--- /dev/null
+++ b/bps/unstable/bpsdoc/cloud/static/ast.css_t
@@ -0,0 +1,248 @@
+/*****************************************************
+ * Additional styles laid on top of default theme
+ *****************************************************/
+
+@import url("default.css");
+
+/*****************************************************
+ * enforce max width and min height, and center document
+ *****************************************************/
+
+div.related, div.document
+{
+ margin: 0 auto;
+ max-width: {{ theme_docwidth }};
+}
+
+div.relbar-top
+{
+ margin-top: 1em;
+}
+
+div.body
+{
+ /* note: this is just a hack to prevent body from being shorter than sidebar */
+ min-height: {{ theme_docheight }};
+}
+
+/*****************************************************
+ * restyle relbar & doc
+ *****************************************************/
+
+div.relbar-top div.related
+{
+ border-radius: 8px 8px 0 0;
+ -moz-border-radius: 8px 8px 0 0;
+ -webkit-border-radius: 8px 8px 0 0;
+}
+
+div.relbar-bottom div.related
+{
+ border-radius: 0 0 8px 8px;
+ -moz-border-radius: 0 0 8px 8px;
+ -webkit-border-radius: 0 0 8px 8px;
+}
+
+/*****************************************************
+ * restyle the sidebar
+ *****************************************************/
+p.logo
+{
+ margin: 8px 0 0 0;
+ text-align: center;
+}
+
+div.sphinxsidebar h3,
+div.sphinxsidebar h4
+{
+ font-size: 80%;
+ border-bottom: 1px dashed {{ theme_sidebartrimcolor }};
+ margin-top: 24px;
+ margin-right: 16px;
+}
+
+div.sphinxsidebar input
+{
+ border-color: {{ theme_sidebartrimcolor }};
+}
+
+div.sphinxsidebar .searchtip
+{
+ color: {{ theme_sidebartrimcolor }};
+}
+
+/*****************************************************
+ * restyle headers
+ *****************************************************/
+
+div.body
+{
+ border-left: 1px solid {{theme_bodytrimcolor}};
+}
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6
+{
+ border: 1px solid {{ theme_bodytrimcolor }};
+ border-bottom: 1px solid {{ theme_headtrimcolor }};
+
+ margin-left: -15px;
+ margin-right: -15px;
+
+ padding-top: 10px;
+ padding-bottom: 10px;
+
+ border-radius: 10px;
+ -moz-border-radius: 10px;
+ -webkit-border-radius: 10px;
+}
+
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6
+{
+ margin-left: -5px;
+ margin-right: -5px;
+}
+
+div.body h1
+{
+ border-top: 0;
+ padding: 20px;
+ margin-left: -20px;
+ margin-right: -20px;
+ text-align: center;
+ border-radius: 0 0 10px 10px;
+ -moz-border-radius: 0 0 10px 10px;
+ -webkit-border-radius: 0 0 10px 10px;
+}
+
+/*****************************************************
+ * provide styling for TODO
+ *****************************************************/
+div.admonition.caution {
+ background-color: #FFF0E4;
+ border: 1px solid #FF9966;
+}
+
+div.admonition-todo {
+ background-color: #EEE6E0;
+ border: 1px solid #D4CDC7;
+}
+
+div#todos p.admonition-title
+{
+ font-weight: normal;
+ color: #AAA;
+ font-size: 70%;
+}
+
+div#todos div.admonition-todo + p
+{
+ font-size: 70%;
+ text-align: right;
+ margin-top: -.5em;
+ margin-bottom: 1.5em;
+ color: #AAA;
+}
+
+div#todos div.admonition-todo + p a
+{
+ font-size: 130%;
+}
+
+/*****************************************************
+ * add more whitespace to definitions
+ *****************************************************/
+dl.function,
+dl.method,
+dl.attribute,
+dl.class,
+dl.exception,
+dl.data
+{
+ margin-bottom: 1.5em;
+}
+
+/*****************************************************
+ * add more whitespace to parameter lists
+ *****************************************************/
+td.field-body > ul.first.simple > li,
+td.field-body > p.first
+{
+ margin-bottom: 1em;
+}
+
+td.field-body > ul.first.simple > li > em,
+td.field-body > em
+{
+ border-bottom: 1px solid #DEE6ED;
+ padding: 2px 4px;
+ background: #ECF0F3;
+}
+
+/*****************************************************
+ * css colorization of object definitions,
+ * adds colored line underneath definition title.
+ * color scheme used:
+ * callables (function, method) - green
+ * attributes - purple
+ * classes - red
+ * exceptions - orange
+ * data - blue
+ *****************************************************/
+
+dl.function > dt,
+dl.method > dt,
+dl.attribute > dt,
+dl.class > dt,
+dl.exception > dt,
+dl.data > dt
+{
+ border-bottom: 2px solid #9AB9CE;
+ background: #E2ECF3;
+ padding: .1em 1px 1px 3px;
+}
+
+dl.function > dt { border-color: #8BC38B; background: #D8E8D8; }
+dl.method > dt { border-color: #AA96C2; background: #E8E1F2; }
+dl.attribute > dt { border-color: #9996C2; background: #E7E6F6; }
+dl.attribute > dt tt.descname { font-style: italic; }
+dl.class > dt { border-color: #C8A69A; background: #E8DCD8; }
+dl.exception > dt { border-color: #F8A69A; background: #F2E3E1; }
+
+/*****************************************************
+ * css colorization for index page, using styles
+ * provided by customize.py extension
+ *****************************************************/
+
+table.indextable span.category
+{
+ font-size: 80%;
+ color: #84ADBE;
+}
+
+table.indextable span.category.function { color: #8BC38B; }
+table.indextable span.category.method { color: #AA96C2; }
+table.indextable span.category.attribute { color: #9996C2; }
+table.indextable span.category.class { color: #C8A69A; }
+table.indextable span.category.module { color: #9AB9CE; }
+
+table.indextable span.subject
+{
+ font-weight: bold;
+}
+
+table.indextable td > dl > dt
+{
+ margin-bottom: .5em;
+}
+
+/*****************************************************
+ * EOF
+ *****************************************************/
diff --git a/bps/unstable/bpsdoc/cloud/static/bg_top.jpg b/bps/unstable/bpsdoc/cloud/static/bg_top.jpg
new file mode 100644
index 0000000..c1e5775
--- /dev/null
+++ b/bps/unstable/bpsdoc/cloud/static/bg_top.jpg
Binary files differ
diff --git a/bps/unstable/bpsdoc/cloud/static/header.png b/bps/unstable/bpsdoc/cloud/static/header.png
new file mode 100644
index 0000000..c4f729e
--- /dev/null
+++ b/bps/unstable/bpsdoc/cloud/static/header.png
Binary files differ
diff --git a/bps/unstable/bpsdoc/cloud/static/header.svg b/bps/unstable/bpsdoc/cloud/static/header.svg
new file mode 100644
index 0000000..b78c0a1
--- /dev/null
+++ b/bps/unstable/bpsdoc/cloud/static/header.svg
@@ -0,0 +1,101 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="256"
+ height="16"
+ id="svg4248"
+ version="1.1"
+ inkscape:version="0.47pre4 r22446"
+ inkscape:export-filename="/home/biscuit/dev/libs/bps/trunk/bps/unstable/bpsdoc/ast/static/header.png"
+ inkscape:export-xdpi="90"
+ inkscape:export-ydpi="90"
+ sodipodi:docname="New document 5">
+ <defs
+ id="defs4250">
+ <linearGradient
+ id="linearGradient4770">
+ <stop
+ style="stop-color:#f2f2f2;stop-opacity:1;"
+ offset="0"
+ id="stop4772" />
+ <stop
+ id="stop4780"
+ offset="0.49901354"
+ style="stop-color:#f2f2f2;stop-opacity:0.49803922;" />
+ <stop
+ style="stop-color:#f2f2f2;stop-opacity:0;"
+ offset="1"
+ id="stop4774" />
+ </linearGradient>
+ <inkscape:perspective
+ sodipodi:type="inkscape:persp3d"
+ inkscape:vp_x="0 : 526.18109 : 1"
+ inkscape:vp_y="0 : 1000 : 0"
+ inkscape:vp_z="744.09448 : 526.18109 : 1"
+ inkscape:persp3d-origin="372.04724 : 350.78739 : 1"
+ id="perspective4256" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient4770"
+ id="linearGradient4776"
+ x1="256"
+ y1="1052.3622"
+ x2="0"
+ y2="1052.3622"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="translate(-256,0)" />
+ </defs>
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#a5c2d9"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="1"
+ inkscape:pageshadow="2"
+ inkscape:zoom="3.959798"
+ inkscape:cx="105.7364"
+ inkscape:cy="4.5574751"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ inkscape:window-width="1920"
+ inkscape:window-height="1005"
+ inkscape:window-x="0"
+ inkscape:window-y="24"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata4253">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Layer 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(0,-1036.3622)">
+ <rect
+ style="fill:url(#linearGradient4776);fill-opacity:1;stroke:none"
+ id="rect4768"
+ width="256"
+ height="16"
+ x="-256"
+ y="1036.3622"
+ transform="scale(-1,1)" />
+ </g>
+</svg>
diff --git a/bps/unstable/bpsdoc/cloud/theme.conf b/bps/unstable/bpsdoc/cloud/theme.conf
new file mode 100644
index 0000000..5b6bcfc
--- /dev/null
+++ b/bps/unstable/bpsdoc/cloud/theme.conf
@@ -0,0 +1,35 @@
+[theme]
+inherit = default
+stylesheet = ast.css
+
+[options]
+
+roottarget = <master>
+logotarget = <root>
+
+docwidth = 10.5in
+docheight = 6in
+
+footerbgcolor = #1A4162
+footertextcolor = #B0B0B0
+doctrimcolor = #5682AD
+
+sidebarbgcolor = #F2F2F2
+sidebartextcolor = #777777
+sidebarlinkcolor = #003469
+sidebartrimcolor = #C0C0C0
+
+relbarbgcolor = #5682AD
+relbartextcolor = #ffffff
+relbarlinkcolor = #ffffff
+relbartrimcolor = #777777
+
+bgcolor = #ffffff
+textcolor = #000000
+linkcolor = #003469
+
+headbgcolor = #A5C2D9
+headtextcolor = #000000
+headlinkcolor = #003469
+headtrimcolor = #A0A0A0
+bodytrimcolor = #D0D0D0
diff --git a/bps/unstable/bpsdoc/index_styles.py b/bps/unstable/bpsdoc/index_styles.py
new file mode 100644
index 0000000..af7ec44
--- /dev/null
+++ b/bps/unstable/bpsdoc/index_styles.py
@@ -0,0 +1,69 @@
+"""
+sphinx extension which intercepts & modifies the index page html.
+all entries are wrapped in <span> elements
+with class tags set to "category method" "category class", etc,
+as appropriate for each entry. This allows colorization of the index
+based on object type, making things an easier read.
+
+TODO: could improve style structure to make things more generically useful,
+eg wrapping each entry in an "entry" span, tagged by type.
+"""
+from bps.develop import dbgcon
+import re
+from bps import *
+from jinja2 import Markup as literal, escape
+
+prefix = r"^(?P<name>.*)\("
+suffix = r"\)$"
+_attr_re = re.compile(prefix + r"(?P<left>)(?P<sub>.*)(?P<right> attribute)" + suffix)
+_meth_re = re.compile(prefix + r"(?P<left>)(?P<sub>.*)(?P<right> method)" + suffix)
+_fc_re = re.compile(prefix + r"(?P<left>class in |in module )(?P<sub>.*)(?P<right>)" + suffix)
+_mod_re = re.compile(prefix + r"module" + suffix)
+
+def format_index_name(name):
+ while True:
+ m = _attr_re.match(name)
+ if m:
+ name, left, sub, right = m.group("name","left", "sub", "right")
+ type = "attribute"
+ break
+ m = _meth_re.match(name)
+ if m:
+ name, left, sub, right = m.group("name","left", "sub", "right")
+ type = "method"
+ break
+ m = _fc_re.match(name)
+ if m:
+ name, left, sub, right = m.group("name","left", "sub", "right")
+ if left.startswith("class"):
+ type = "class"
+ else:
+ type = "function"
+ break
+ m = _mod_re.match(name)
+ if m:
+ name = m.group("name")
+ left = "module"
+ sub = right = ''
+ type = "module"
+ break
+ return name
+ if sub:
+ sub = literal('<span class="subject">') + escape(sub) + literal("</span>")
+ cat = left + sub + right
+ return escape(name) + literal('<span class="category ' + type + '">(') + escape(cat) + literal(")</span>")
+
+def mangle_index(app, pagename, templatename, ctx, event_arg):
+ if pagename != "genindex":
+ return
+ fmt = format_index_name
+ for key, entries in ctx['genindexentries']:
+ for idx, entry in enumerate(entries):
+ name, (links, subitems) = entry
+ entries[idx] = fmt(name), (links, subitems)
+ for idx, entry in enumerate(subitems):
+ name, links = entry
+ subitems[idx] = fmt(name), links
+
+def setup(app):
+ app.connect('html-page-context', mangle_index)
diff --git a/bps/unstable/bpsdoc/make_helper.py b/bps/unstable/bpsdoc/make_helper.py
new file mode 100644
index 0000000..48ed2e3
--- /dev/null
+++ b/bps/unstable/bpsdoc/make_helper.py
@@ -0,0 +1,149 @@
+"""helper for quick cross-platform makefile for sphinx
+
+TODO: this was hacked up really quickly, could use a facelift.
+"""
+#===============================================================
+#imports
+#===============================================================
+import os,sys
+from bps import *
+from string import Template
+import subprocess
+def sub(fmt, **kwds):
+ if not kwds:
+ kwds = globals()
+ return Template(fmt).substitute(**kwds)
+__all__ = [
+ "SphinxMaker",
+]
+#===============================================================
+#main class
+#===============================================================
+class SphinxMaker(BaseClass):
+ #===============================================================
+ #class attrs
+ #===============================================================
+ # You can subclass these variables
+ #TODO: cmd line override support
+ SPHINXOPTS = []
+ SPHINXBUILD = "sphinx-build"
+ PAPER = "letter"
+
+ # Paths
+ BUILD = "_build"
+ STATIC = "_static"
+
+ #internal opts
+ PAPEROPT_a4 = ["-D","latex_paper_size=a4"]
+ PAPEROPT_letter = ["-D","latex_paper_size=letter"]
+ #===============================================================
+ #instance attrs
+ #===============================================================
+ root_dir = None
+ conf_file = None
+ conf = None
+
+ #===============================================================
+ #frontend
+ #===============================================================
+ def __init__(self, root=None):
+ if root is None:
+ root = sys.modules[self.__class__.__module__]
+ self.root_dir = filepath(root).abspath.dir
+ self.conf_file = self.root_dir / "conf.py"
+ if self.conf_file.ismissing:
+ raise RuntimeError, "conf file not found in root: %r" % (self.root_dir)
+ #XXX: load conf file?
+
+ self.BUILD = filepath(self.BUILD)
+ self.STATIC = filepath(self.STATIC)
+
+ @classmethod
+ def execute(cls, args=None, **kwds):
+ return cls(**kwds).run(args)
+
+ def run(self, args=None):
+ if args is None:
+ args = sys.argv[1:]
+ self.root_dir.chdir() #due to relative paths like self.BUILD
+ for arg in args:
+ getattr(self,"target_"+arg)()
+
+ #===============================================================
+ #targets
+ #===============================================================
+ def target_help(self):
+ print "Please use \`make <target>' where <target> is one of"
+ print " clean remove all compiled files"
+ print " html to make standalone HTML files"
+ print " http to serve standalone HTML files on port 8000"
+# print " pickle to make pickle files"
+# print " json to make JSON files"
+ print " htmlhelp to make HTML files and a HTML help project"
+# print " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+# print " changes to make an overview over all changed/added/deprecated items"
+# print " linkcheck to check all external links for integrity"
+
+ def target_clean(self):
+ BUILD = self.BUILD
+ if BUILD.exists:
+ BUILD.clear()
+
+ def target_html(self):
+ #just in case htmldev was run
+ (self.BUILD / "html" / "_static" / "default.css").discard()
+ self.build("html")
+
+ def target_htmlhelp(self):
+ self.build("htmlhelp")
+
+ def target_http(self):
+ self.target_html()
+ path = self.BUILD.canonpath / "html"
+ path.chdir()
+ port = 8000
+ print "Serving files from %r on port %r" % (path, port)
+ import SimpleHTTPServer as s
+ s.BaseHTTPServer.HTTPServer(('',port), s.SimpleHTTPRequestHandler).serve_forever()
+
+ ##def target_latex(self):
+ ## build("latex")
+ ## print "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+ ## "run these through (pdf)latex."
+ ##
+ ##def target_pdf():
+ ## assert os.name == "posix", "pdf build support not automated for your os"
+ ## build("latex")
+ ## target = BUILD / "latex"
+ ## target.chdir()
+ ## subprocess.call(['make', 'all-pdf'])
+ ## print "pdf built"
+
+ #===============================================================
+ #helpers
+ #===============================================================
+ def build(self, name):
+ BUILD = self.BUILD
+ ALLSPHINXOPTS = self.get_sphinx_opts()
+ dt = BUILD / "doctrees"; dt.ensuredirs()
+ target = BUILD/ name; target.ensuredirs()
+ rc = subprocess.call([self.SPHINXBUILD, "-b", name] + ALLSPHINXOPTS + [ target ])
+ if rc:
+ print "Sphinx-Build returned error, exiting."
+ sys.exit(rc)
+ print "Build finished. The %s pages are in %r." % (name, target,)
+ return target
+
+ def get_paper_opts(self):
+ return getattr(self,"PAPER_" + self.PAPER, [])
+
+ def get_sphinx_opts(self):
+ return ["-d", self.BUILD / "doctrees"] + self.get_paper_opts() + self.SPHINXOPTS + [ "." ]
+
+ #===============================================================
+ #eoc
+ #===============================================================
+
+#===============================================================
+#eof
+#===============================================================
diff --git a/bps/unstable/bpsdoc/nested_sections.py b/bps/unstable/bpsdoc/nested_sections.py
new file mode 100644
index 0000000..516992e
--- /dev/null
+++ b/bps/unstable/bpsdoc/nested_sections.py
@@ -0,0 +1,95 @@
+"""
+This extension should be used in conjunction with autodoc.
+It permits docstrings to have embedded rst section headers,
+by translating them into indented paragraphs with
+italicized section headers.
+
+TODO: make this more flexible and less hackneyed
+"""
+from bps.develop import dbgcon
+import re
+from bps import *
+
+def indent_sections(lines, reference_prefix=''):
+ "replaces any section headers with indented paragraphs"
+ end = len(lines)-1
+ out = []
+
+ sections = []
+ indent_char = ' ' * 4
+ indent_level = 0
+ SCHARS = '#*=-^"'
+ def get_level(c):
+ return SCHARS.index(c)
+ #FIXME: this doesn't detect double-barred sections
+ def detect_section(idx):
+ if idx == end:
+ return None
+ line = lines[idx].rstrip()
+ if not line or line.lstrip() != line:
+ return None
+ next = lines[idx+1].rstrip()
+ if next.lstrip() != next:
+ return None
+ for c in SCHARS:
+ if next.startswith(c * len(line)):
+ return c
+ return None
+ idx = 0
+ while idx <= end:
+ line = lines[idx].rstrip()
+ if not line:
+ out.append("")
+ idx += 1
+ continue
+ new_char = detect_section(idx)
+ if new_char:
+ new_level = get_level(new_char)
+ while sections and sections[-1] > new_level:
+ sections.pop()
+ if not sections or sections[-1] < new_level:
+ sections.append(new_level)
+ name = line.lower().strip().replace(" ", "-").replace("--", "-")
+ indent = indent_char * (indent_level-1)
+ #TODO: would be nice to add a special directive instead of **%s**,
+ # so that we could render appropriate html styling to the section header
+ out.extend([
+ indent + ".. _%s:" % (reference_prefix + name),
+ "",
+ indent + "**%s**\n" % line.rstrip(),
+ ])
+ idx += 2 #skip section header
+ indent_level = max(0, len(sections))
+ continue
+ indent = indent_char * indent_level
+ out.append(indent + line)
+ idx += 1
+ return out
+
+def _remove_oneline(name, lines):
+ #remove one-line description from top of module, if present,
+ #cause we don't want it being duplicated (should already be listed in module's header)
+ _title_re = re.compile(r"""
+ ^ \s*
+ ( {0} \s* -- \s* )?
+ [a-z0-9 _."']*
+ $
+ """.format(re.escape(name)), re.X|re.I)
+ if len(lines) > 1 and _title_re.match(lines[0]) and lines[1].strip() == '':
+ del lines[:2]
+
+def mangle_docstrings(app, what, name, obj, options, lines):
+ if what == 'module':
+ _remove_oneline(name, lines)
+ elif what in ('class', 'exception', 'function', 'method'):
+ name = "%s.%s" % (obj.__module__, obj.__name__)
+ name = name.replace(".", "-").lower()
+ lines[:] = indent_sections(lines, reference_prefix=name + "-")
+ elif what in ('attribute',):
+ pass
+ else:
+ print "unknown what: %r %r" % (what, obj)
+ dbgcon()
+
+def setup(app):
+ app.connect('autodoc-process-docstring', mangle_docstrings)
diff --git a/bps/unstable/bpsdoc/relbar_toc.py b/bps/unstable/bpsdoc/relbar_toc.py
new file mode 100644
index 0000000..ace8730
--- /dev/null
+++ b/bps/unstable/bpsdoc/relbar_toc.py
@@ -0,0 +1,32 @@
+"""
+automatically insert a "toc" entry into relbar for all pages
+(ala old python documentation style)
+"""
+import re
+from bps import *
+
+def insert_toc(app, pagename, templatename, ctx, event_arg):
+ links = ctx['rellinks']
+
+ #remove any existing toc (present on some pages)
+ for idx, elem in enumerate(links):
+ if elem[3] == "toc":
+ del links[idx]
+ break
+
+ #place toc right after "next" / "previous"
+ idx = -1
+ for idx, entry in enumerate(links):
+ if entry[3] in ("next","previous"):
+ break
+ else:
+ idx += 1
+
+ #insert our toc entry
+ path = filepath(ctx['pathto']("contents")).root
+ if path == '':
+ path = pagename
+ links.insert(idx, (path, "Table Of Contents", "C", "toc"))
+
+def setup(app):
+ app.connect('html-page-context', insert_toc)
diff --git a/bps/unstable/softref.py b/bps/unstable/softref.py
new file mode 100644
index 0000000..030d013
--- /dev/null
+++ b/bps/unstable/softref.py
@@ -0,0 +1,847 @@
+"""
+soft reference implementation
+
+this module implements something similar to Java's soft references.
+these are normal python references, but use reference count introspection
+to remove the reference only no other references to the final object
+are left, AND the object has not been used in a long enough time
+to warrant removing the object.
+
+this is mainly useful when maintaining a cache of objects
+which can be regenerated, but only through a (cpu) costly process.
+it's generally useful to keep such objects around in a cache,
+in case they are needed again, but for memory reasons,
+long running processes will generally want to free unused
+objects up after a time.
+
+this class provides ``softref()`` which provides access-time tracking,
+allowing the reference to be freed up after it's remained unused
+for an (application specified) amount of time.
+
+it also contains platform-specific code for detecting low memory conditions,
+and freeing up softrefs more aggressively in this case,
+in the hopes of staving off out-of-memory conditions.
+"""
+#=================================================================================
+#imports
+#=================================================================================
+from __future__ import with_statement
+#core
+import logging
+import sys
+import threading
+from itertools import count as itercount
+from time import time as cur_time
+import logging; log = logging.getLogger(__name__)
+import UserDict
+from weakref import ref as make_weakref
+from warnings import warn
+#site
+#pkg
+#local
+log = logging.getLogger(__name__)
+__all__ = [
+ #main entry point
+ "softref",
+
+ #collector control & configuration
+ "collect", "enable", "disable", "is_enabled",
+ "get_config", "set_config",
+
+ #introspection
+ "get_softref_count",
+ "get_softrefs",
+
+ #other helpers
+ 'SoftValueDict',
+## 'KeyedSoftRef',
+# 'get_memory_usage',
+]
+
+#=================================================================================
+#platform specific code for detecting memory levels
+#=================================================================================
+
+#NOTE: this is mainly used a helper for the collector to detect low memory conditions.
+
+if sys.platform == "linux2":
+ #use /proc/meminfo
+ import re
+
+ _memtotal_re = re.compile("^MemTotal:\s+(\d+)\s*kb$", re.I|re.M)
+ _memfree_re = re.compile("^MemFree:\s+(\d+)\s*kb$", re.I|re.M)
+ _buffers_re = re.compile("^Buffers:\s+(\d+)\s*kb$", re.I|re.M)
+ _cached_re = re.compile("^Cached:\s+(\d+)\s*kb$", re.I|re.M)
+
+ def get_memory_usage():
+ try:
+ with file("/proc/meminfo") as fh:
+ data = fh.read()
+ memtotal = int(_memtotal_re.search(data).group(1))
+ memfree = int(_memfree_re.search(data).group(1))
+ buffers = int(_buffers_re.search(data).group(1))
+ cached = int(_cached_re.search(data).group(1))
+ avail = memfree + buffers + cached
+ assert 0 <= avail <= memtotal
+ return memtotal, avail
+ except:
+ #this is a sign something has gone wrong :|
+ log.error("error reading /proc/meminfo", exc_info=True)
+ return (-1, -1)
+
+elif sys.platform == "win32":
+ #implementation taken from http://code.activestate.com/recipes/511491/
+ #TODO: check if this will work with cygwin platform
+ import ctypes
+ kernel32 = ctypes.windll.kernel32
+ c_ulong = ctypes.c_ulong
+ class MEMORYSTATUS(ctypes.Structure):
+ _fields_ = [
+ ('dwLength', c_ulong),
+ ('dwMemoryLoad', c_ulong),
+ ('dwTotalPhys', c_ulong),
+ ('dwAvailPhys', c_ulong),
+ ('dwTotalPageFile', c_ulong),
+ ('dwAvailPageFile', c_ulong),
+ ('dwTotalVirtual', c_ulong),
+ ('dwAvailVirtual', c_ulong)
+ ]
+ def get_memory_usage():
+ memoryStatus = MEMORYSTATUS()
+ memoryStatus.dwLength = ctypes.sizeof(MEMORYSTATUS)
+ kernel32.GlobalMemoryStatus(ctypes.byref(memoryStatus))
+ #XXX: does availphy correspond properly to linux's free-buffers-cache ?
+ return (memoryStatus.dwTotalPhys//1024, memoryStatus.dwAvailPhys//1024)
+else:
+ #TODO: would like to support more platforms (esp OS X)
+ warn("disabled low memory detection, not implemented for " + sys.platform + " platform")
+ def get_memory_usage():
+ return (-1,-1)
+
+get_memory_usage.__doc__ = """return memory usage
+
+Return current memory usage as tuple
+``(total physical memory, available physical memory)``.
+
+All measurments in kilobytes.
+
+Available physical memory counts
+os buffers & cache as "available",
+where possible for the implementation.
+
+Currently supports linux & win32 platforms,
+if run on other platforms, will return
+tuple where all values are -1.
+
+If an error occurs on a supported platform,
+it will be logged, and a tuple of -1 values will be returned.
+"""
+
+#=================================================================================
+#soft ref collector - implemented as singleton of private class
+#=================================================================================
+
+#log used by collector
+clog = logging.getLogger(__name__ + ".collector")
+
+#NOTE: design of this module attempts to put as much work in the collector,
+# and as little work in the creation & access of the softrefs,
+# since the collector is designed to run in another thread anyways.
+
+class _SoftRefCollector(object):
+ """tracks softref objects, handles collecting them when needed"""
+ #=================================================================================
+ #instance attrs
+ #=================================================================================
+
+ #--------------------------------------------------------------------
+ #configuration
+ #--------------------------------------------------------------------
+ default_min_age = 600 #default delay from last access time before a softref can be released
+ default_max_age = -1 #default delay from last access time before softref will be released even w/o lowmem condition
+
+ collect_frequency = 300 #how often collector should run
+
+ lowmem_abs = 25 * 1024 #lowmem level in kilobytes
+ lowmem_pct = .05 #lowmem level as % of total mem
+ #low memory is calculated as max(lowmem_abs, physmem * lowmem_pct)
+ #this way there's a floor to the lowmem level for low memory systems (eg <512mb),
+ #but it scales so lowmem doesn't get hit as often for systems with more memory (eg 4gb)
+ #values may need tuning
+
+ #--------------------------------------------------------------------
+ #softref state
+ #--------------------------------------------------------------------
+ targetmap = None #map of id(target) -> [ weakref(softref(target)) ... ]
+ #since id will only be re-used once target is dereferenced,
+ #and that won't happen as long as softrefs exist,
+ #ids should never conflict
+ last_collect = 0 #timestamp of last time collect() ran
+
+ #--------------------------------------------------------------------
+ #threading
+ #--------------------------------------------------------------------
+ lock = None #threading lock for instance state
+ thread = None #thread collector uses to run in the background
+ thread_stop = None #Event used to signal collector thread that it should halt
+
+ #=================================================================================
+ #init
+ #=================================================================================
+ def __init__(self):
+ self.targetmap = {}
+ self.lock = threading.Lock() #lock for changes
+ #XXX: if softref's onrelease tried to create a new softref,
+ # it'll block on this lock.. in that case, this should be made to an RLock.
+ # if this happens, could either do it permanently, or make an use_rlock() method
+ self.thread_stop = threading.Event()
+
+ #=================================================================================
+ #softref interface
+ #=================================================================================
+ def add(self, sref):
+ "add a new softref instance"
+ #NOTE: instances *must* be compatible with softref type,
+ # namely its' _target _atime min_age attrs
+
+ #TODO: some target types (eg: int, str, bool, None) should never have softref expire,
+ # and we shouldn't even bother tracking them. should find nice way to have fallback in that case.
+ # bucket system would be good, could just never add them to initial bucket.
+
+ target_id = id(sref._target)
+ sref_wr = make_weakref(sref)
+ targetmap = self.targetmap
+ with self.lock:
+ srlist = targetmap.get(target_id)
+ if srlist is None:
+ targetmap[target_id] = [ sref_wr ]
+ else:
+ srlist.append(sref_wr)
+
+ #=================================================================================
+ #introspection
+ #=================================================================================
+ @property
+ def next_collect(self):
+ "time next collection is scheduled"
+ return self.last_collect + self.collect_frequency
+
+ def count(self, target):
+ with self.lock:
+ srlist = self.targetmap.get(id(target))
+ count = 0
+ if srlist:
+ for sref_wr in srlist:
+ if sref_wr():
+ count += 1
+ return count
+
+ def refs(self, target):
+ with self.lock:
+ srlist = self.targetmap.get(id(target))
+ out = []
+ if srlist:
+ for sref_wr in srlist:
+ sref = sref_wr()
+ if sref:
+ out.append(sref)
+ return out
+
+ #=================================================================================
+ #collector
+ #=================================================================================
+ def collect(self):
+ #TODO: rework this scan into using generational buckets (ala gc module),
+ # keyed off of target id, that way long-lived ones don't have to be scanned as often.
+ with self.lock:
+ targetmap = self.targetmap
+ lowmem = self._check_lowmem()
+ clog.info("collecting soft refs... targets=%d lowmem=%r", len(targetmap), lowmem)
+ purge_keys = set()
+ cur = cur_time()
+ #call collect_entry for all of targetmap
+ helper = self._collect_entry
+ for target_id, srlist in targetmap.iteritems():
+ if helper(target_id, srlist, cur, lowmem):
+ purge_keys.add(target_id)
+ #purge any keys we identified previously, just to free up even more memory
+ for target_id in purge_keys:
+ del targetmap[target_id]
+ self.last_collect = cur
+ count = len(purge_keys)
+ clog.info("released %d targets", count)
+ return count
+
+ def _check_lowmem(self):
+ "check if we're running in low memory condition"
+ #TOOD:
+ # - sliding scale, doing more as lowmem approaches? ie, freeing only some eligible softrefs
+ # - have collector prefer softrefs w/ older atimes in that case
+ # - weighting for different instances / types, allowing "heavy" classes to be preferred to be freed?
+ # - generational buckets
+ # - other schemes?
+ total, free = get_memory_usage()
+ if total == -1: #not available or error occurred
+ clog.debug("disabling lowmem check")
+ #just to stop spamming, let's not call get mem again
+ self._check_lowmem = lambda : False
+ return False
+ threshold = int(max(self.lowmem_abs, total * self.lowmem_pct))
+ clog.debug("system memory: total=%r lowmem_threshold=%r free=%r", total, threshold, free)
+ return free <= threshold
+
+ def _collect_entry(self, target_id, srlist, cur, lowmem):
+ "run collect algorithm for specified entry, return True if it needs removing"
+
+ #NOTE: could consolidate min_age, max_age (and maybe atime)
+ #into a single record stored w/in collector,
+ #instead of storing separately in each softref.
+
+ #scan existing softrefs, working out latest atime & softrefcount
+ atime = 0
+ min_age = self.default_min_age
+ max_age = self.default_max_age
+ srefs = [] #accumulate hard refs to sref objects, so weakrefs don't vanish while in this loop
+ for sref_wr in srlist:
+ sref = sref_wr()
+ if sref is None:
+ #TODO: could purge sref_wr here for extra mem,
+ #but probably efficient enough for most cases
+ #to just purge whole targetmap entry once target_wr is gone
+ continue
+ srefs.append(sref)
+ if sref._atime > atime:
+ atime = sref._atime
+ if sref.min_age > min_age:
+ min_age = sref.min_age
+ if sref.max_age is not None and (max_age == -1 or sref.max_age < max_age):
+ max_age = sref.max_age
+
+ #check if any softrefs is still around
+ if not srefs:
+ #all references to softref objects dropped before they were purged
+ clog.debug("softrefs vanished: %r", target_id)
+ return True
+
+ #decide if this one should be released yet
+ age = cur-atime
+ if age <= min_age:
+ return False
+ if not lowmem and (max_age == -1 or age < max_age):
+ return False
+
+ #sanity check on target
+ assert all(id(sref._target) == target_id for sref in srefs), "targetmap corrupted: target=%r srefs=%r" % (target_id, srefs)
+ target = srefs[0]._target
+
+ #now check how many hardrefs are out there,
+ #after ignoring the following hard refs:
+ # +1 reference in this frame's 'target' var
+ # +1 reference in getrefcount() call
+ # +N references held by '_target' attr of srefs
+ # any more, and it's external.
+ # any less, and it's runtimeerror, cause one of the above was missing.
+ offset = 2+len(srefs)
+ rc = sys.getrefcount(target)
+ if rc < offset:
+ raise RuntimeError, "too few references to target: %r rc=%r offset=%r" % (target, rc, offset)
+ if rc > offset:
+ #(rc-offset) hardrefs still out there, so don't purge softref
+ return False
+
+ #ok, time to release softref
+ clog.info("releasing softrefs: %r", target)
+ for sref in reversed(srefs): #NOTE: reversed so handlers called LIFO, same as weakref module
+ sref._target = None #just so existing softrefs return None
+ h = sref._onrelease
+ if h is not None:
+ try:
+ h(sref)
+ except:
+ clog.error("error in softref onrelease callback: %r %r", target, onrelease)
+ sys.excepthook(*sys.exc_info())
+
+ #schedule whole entry for removal
+ return True
+
+ #=================================================================================
+ #collector thread
+ #=================================================================================
+ def is_enabled(self):
+ "return true if collector thread is running, else false"
+ if not self.thread or not self.thread.isAlive():
+ return False
+ if self.thread_stop.isSet():
+ return None #special value indicating thread is still running but will stop soon. call disable() to ensure it's stopped.
+ return True
+
+ def enable(self):
+ "start collector thread if not running"
+ if self.thread and self.thread.isAlive():
+ if not self.thread_stop.isSet():
+ return True
+ #wait til thread has exited
+ self.thread.join()
+ self.thread_stop.clear()
+ thread = threading.Thread(target=self._collector_loop, name="[softref collector]")
+ thread.setDaemon(True)
+ clog.debug("softref collector thread launched")
+ thread.run()
+
+ def _collector_loop(self):
+ "main loop used by collector thread"
+ clog.info("softref collector thread started")
+ #XXX: should we check for errors and have a cooldown period before trying again?
+ while True:
+ #wait for stop event OR time for next collection
+ delay = max(.05, self.next_collect - cur_time())
+ self.thread_stop.wait(delay)
+
+ #check if we've been signalled to stop
+ if self.thread_stop.isSet():
+ clog.info("softref collector thread stopped")
+ return
+
+ #run collector
+ clog.info("collecting softrefs")
+ self.collect()
+
+ def disable(self):
+ "stop collector thread if running"
+ #NOTE: this shouldn't be called if self.lock is held,
+ #otherwise might deadlock if we join while
+ #other thread is trying to acquire lock.
+
+ #signal thread should stop
+ self.thread_stop.set()
+
+ #then join til it does
+ if self.thread and self.thread.isAlive():
+ self.thread.join()
+ self.thread = None
+
+ #=================================================================================
+ #eoc
+ #=================================================================================
+
+#=================================================================================
+#single collector and public interface to it
+#=================================================================================
+
+_collector = _SoftRefCollector()
+
+#----------------------------------------------------------------------
+#configuration collector
+#----------------------------------------------------------------------
+def set_config(default_max_age=None, default_min_age=None, collect_frequency=None,
+ lowmem_pct=None, lowmem_abs=None):
+ """update various collector config options.
+
+ :kwd default_min_age:
+ change minimum age (seconds since last access)
+ before a softref is eligible to be released
+ by the collector.
+
+ softrefs can specify this per-instance.
+
+ defaults to 10 minutes.
+
+ :kwd default_max_age:
+ change maximum age (seconds since last access)
+ before a softref will be purged by collector
+ even if there isn't a low memory condition.
+
+ softrefs can specify this per-instance.
+
+ defaults to -1 minutes,
+ which indicates there is no max age.
+
+ :kwd collect_frequency:
+ how often collector thread calls collect(),
+ measured in seconds.
+
+ defaults to every 5 minutes.
+
+ :kwd lowmem_pct:
+ if free memory drops below this amount (as percent of total memory),
+ the collector considers the system to be low on memory,
+ and becomes agressive in purging softrefs.
+
+ the actual low memory threshold is ``max(lowmem_abs, phymem * lowmem_pct)``,
+ providing a floor to the lowmem threshold so it will function acceptably
+ on systems with a small amount of physical memory.
+
+ defaults to .05 percent (~100 Mb on a 2Gb system).
+
+ :kwd lowmem_abs:
+ minimum free memory threshold (in kilobytes).
+ see lowmem_pct for more details.
+
+ defaults to 25 Mb (~.05 percent on a 512 Mb system).
+
+ .. note::
+ default values subject to change as internal algorithm
+ is being refined (currently doesn't account for system memory usage, etc)
+
+ most changes will take affect the next time collect() runs,
+ with the exception of collect_frequency, which will take effect
+ next time the collector thread wakes up.
+ """
+ global _collector
+ if default_max_age is not None:
+ if default_max_age <= 0 and default_max_age != -1:
+ raise ValueError, "default_max_age must be -1 or positive value"
+ _collector.default_max_age = default_max_age
+ if default_min_age is not None:
+ if default_min_age < 0:
+ raise ValueError, "default_min_age must be >= 0"
+ _collector.default_min_age = default_min_age
+ if collect_frequency is not None:
+ if collector_frequency <= 0:
+ raise ValueError, "collector frequency must be > 0"
+ _collector.collect_frequency = collect_frequency
+ if lowmem_pct is not None:
+ if lowmem_pct < 0 or lowmem_pct >= 1:
+ raise ValueError, "lowmem_pct must be between [0.0,1.0)"
+ _collector.lowmem_pct = lowmem_pct
+ if lowmem_abs is not None:
+ if lowmem_abs < 0:
+ raise ValueError, "lowmem_abs must be >= 0"
+ _collector.lowmem_abs = lowmem_abs
+
+def get_config():
+ "return dict of current collector config options, corresponding to :func:`set_config` kwds"
+ global _collector
+ return dict(
+ (k,getattr(_collector,k))
+ for k in ["default_min_age", "default_max_age", "collect_frequency",
+ "lowmem_pct", "lowmem_abs"]
+ )
+
+#----------------------------------------------------------------------
+#running collector
+#----------------------------------------------------------------------
+def is_enabled():
+ "check if softref collector thread is running"
+ return _collector.is_enabled()
+
+def enable():
+ "ensure softref collector thread is running"
+ return _collector.enable()
+
+def disable():
+ "ensure softref collector thread is not running (will block until thread terminates)"
+ return _collector.disable()
+
+def collect():
+ "force a run of the softref collector immediately"
+ return _collector.collect()
+
+#----------------------------------------------------------------------
+#introspection of softref information
+#----------------------------------------------------------------------
+def get_softref_count(target):
+ "return number of soft refs attached to target"
+ return _collector.count(target)
+
+def get_softrefs(target):
+ "return all softref instances attached to target"
+ return _collector.refs(target)
+
+def get_hardref_count(target):
+ "return number of hard refs attached to target (include 1 ref for this function call)"
+ rc = sys.getrefcount(target)
+ sc = get_softref_count(target)
+ return rc-sc-1
+
+#=================================================================================
+#softref constructor
+#=================================================================================
+class softref(object):
+ """create a softref to another object
+
+ :arg target: object this should hold softref to
+ :arg onrelease:
+ optional callback to invoke ``onrelease(sref)``
+ if softref to target is released before this object
+ is dereferenced.
+ :arg min_age:
+ override default min_age for this target
+ :arg max_age:
+ override default max_age for this target
+
+ :returns:
+ a new softref instance.
+ calling it will return original target, or ``None``,
+ same as a weakref.
+ """
+
+ #TODO: provide hook which can prevent softref from being freed
+ #(eg if object shouldn't be freed if it's in a certain state, etc)
+
+ #=================================================================================
+ #instance attrs
+ #=================================================================================
+ __slots__ = ["__weakref__", "_target", "_atime", "_onrelease", "min_age", "max_age"]
+
+ #store quick links to collector
+ _collector_lock = _collector.lock
+ _collector_add = _collector.add
+
+ #=================================================================================
+ #instance methods
+ #=================================================================================
+ #TODO: could override __new__ for cls=softref and only 'target' param,
+ # let it cache things for us.
+
+ def __init__(self, target, onrelease=None, min_age=None, max_age=None):
+ self._target = target
+ self._onrelease = onrelease
+ self.min_age = min_age
+ self.max_age = max_age
+ self._atime = cur_time()
+ self._collector_add(self) #register new softref with collector
+
+ ##@property
+ ##def atime(self): return self._atime
+
+ ##def touch(self):
+ ## self._atime = cur_time()
+
+ def __call__(self):
+ self._atime = cur_time() #NOTE: doing this outside lock cause it can't hurt, and might catch collector in it's tracks
+
+ #NOTE: have to lock collector while we're creating new hardref,
+ #or collector might see N hard refs, this thread creates hard ref N+1,
+ #and then collector purges softref, causing this thread
+ #to have a hard ref after softref was purged (which is against
+ #how this module wants softrefs to behave)
+ with self._collector_lock:
+ return self._target
+
+ def __repr__(self):
+ target = self._target
+ if target is None:
+ return "<softref at 0x%x; dead>"% (id(self),)
+ else:
+ return "<softref at 0x%x; to '%s' at 0x%x>" % (id(self), type(target).__name__, id(target))
+
+ def __eq__(self, other):
+ return self._target == other
+
+ def __ne__(self, other):
+ return self._target != other
+
+ #=================================================================================
+ #eoc
+ #=================================================================================
+
+#=================================================================================
+#soft value dict
+#=================================================================================
+#NOTE: this is cloned from py26 weakref.WeakValueDict, and adapted for softrefs...
+
+class SoftValueDictionary(UserDict.UserDict):
+ """Mapping class that references values using softref.
+
+ Entries in the dictionary will be discarded when no strong
+ reference to the value exists anymore
+ """
+ # We inherit the constructor without worrying about the input
+ # dictionary; since it uses our .update() method, we get the right
+ # checks (if the other dictionary is a WeakValueDictionary,
+ # objects are unwrapped on the way out, and we always wrap on the
+ # way in).
+
+ min_age = None
+ max_age = None
+
+ def __init__(self, source=None, min_age=None, max_age=None):
+ if min_age:
+ self.min_age = min_age
+ if max_age:
+ self.max_age = max_age
+ def remove(sr, selfref=make_weakref(self)):
+ self = selfref()
+ if self is not None:
+ del self.data[sr.key]
+ self._remove = remove
+ if source is None:
+ args = ()
+ else:
+ args = (source,)
+ UserDict.UserDict.__init__(self, *args)
+
+ ##def touch(self, key):
+ ## "helper to update softref atime for value attached to key"
+ ## self.data[key].touch()
+
+ def __getitem__(self, key):
+ o = self.data[key]()
+ if o is None:
+ raise KeyError, key
+ else:
+ return o
+
+ def __contains__(self, key):
+ try:
+ o = self.data[key]()
+ except KeyError:
+ return False
+ return o is not None
+
+ def has_key(self, key):
+ try:
+ o = self.data[key]()
+ except KeyError:
+ return False
+ return o is not None
+
+ def __repr__(self):
+ return "<SoftValueDictionary at %s>" % id(self)
+
+ def __setitem__(self, key, value):
+ self.data[key] = KeyedSoftRef(key, value, self._remove, self.min_age, self.max_age)
+
+ def copy(self):
+ new = SoftValueDictionary()
+ new.min_age = self.min_age
+ new.max_age = self.max_age
+ for key, sr in self.data.items():
+ o = sr()
+ if o is not None:
+ new[key] = o
+ return new
+
+ def get(self, key, default=None):
+ try:
+ sr = self.data[key]
+ except KeyError:
+ return default
+ else:
+ o = sr()
+ if o is None:
+ # This should only happen
+ return default
+ else:
+ return o
+
+ def items(self):
+ L = []
+ for key, sr in self.data.items():
+ o = sr()
+ if o is not None:
+ L.append((key, o))
+ return L
+
+ def iteritems(self):
+ for sr in self.data.itervalues():
+ value = ws()
+ if value is not None:
+ yield ws.key, value
+
+ def iterkeys(self):
+ return self.data.iterkeys()
+
+ def __iter__(self):
+ return self.data.iterkeys()
+
+ def itervaluerefs(self):
+ """Return an iterator that yields the weak references to the values.
+
+ The references are not guaranteed to be 'live' at the time
+ they are used, so the result of calling the references needs
+ to be checked before being used. This can be used to avoid
+ creating references that will cause the garbage collector to
+ keep the values around longer than needed.
+
+ """
+ return self.data.itervalues()
+
+ def itervalues(self):
+ for wr in self.data.itervalues():
+ obj = wr()
+ if obj is not None:
+ yield obj
+
+ def popitem(self):
+ while 1:
+ key, wr = self.data.popitem()
+ o = wr()
+ if o is not None:
+ return key, o
+
+ def pop(self, key, *args):
+ try:
+ o = self.data.pop(key)()
+ except KeyError:
+ if args:
+ return args[0]
+ raise
+ if o is None:
+ raise KeyError, key
+ else:
+ return o
+
+ def setdefault(self, key, default=None):
+ try:
+ wr = self.data[key]
+ except KeyError:
+ self.data[key] = KeyedSoftRef(key, default, self._remove, self.min_age, self.max_age)
+ return default
+ else:
+ return wr()
+
+ def update(self, dict=None, **kwargs):
+ d = self.data
+ if dict is not None:
+ if not hasattr(dict, "items"):
+ dict = type({})(dict)
+ for key, o in dict.items():
+ d[key] = KeyedSoftRef(key, o, self._remove, self.min_age, self.max_age)
+ if len(kwargs):
+ self.update(kwargs)
+
+ def valuerefs(self):
+ """Return a list of weak references to the values.
+
+ The references are not guaranteed to be 'live' at the time
+ they are used, so the result of calling the references needs
+ to be checked before being used. This can be used to avoid
+ creating references that will cause the garbage collector to
+ keep the values around longer than needed.
+
+ """
+ return self.data.values()
+
+ def values(self):
+ L = []
+ for wr in self.data.values():
+ o = wr()
+ if o is not None:
+ L.append(o)
+ return L
+
+class KeyedSoftRef(softref):
+ """Specialized reference that includes a key corresponding to the value.
+
+ This is used in the SoftValueDictionary to avoid having to create
+ a function object for each key stored in the mapping. A shared
+ callback object can use the 'key' attribute of a KeyedSoftRef instead
+ of getting a reference to the key from an enclosing scope.
+
+ """
+
+ __slots__ = "key",
+
+ def __new__(cls, key, target, onrelease=None, min_age=None, max_age=None):
+ self = softref.__new__(cls, target, onrelease, min_age, max_age)
+ self.key = key
+ return self
+
+ def __init__(self, key, target, onrelease=None, min_age=None, max_age=None):
+ super(KeyedSoftRef, self).__init__(target, onrelease, min_age, max_age)
+
+#=================================================================================
+#eof
+#=================================================================================
diff --git a/bps/unstable/winconsole.py b/bps/unstable/winconsole.py
new file mode 100644
index 0000000..73a2193
--- /dev/null
+++ b/bps/unstable/winconsole.py
@@ -0,0 +1,406 @@
+"""collection of ctypes-based helpers for accessing the windows console.
+
+References
+==========
+* Windows API Reference -- http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winprog/winprog/windows_api_reference.asp
+* recipe to set color attr -- http://code.activestate.com/recipes/496901/
+* recipe to get color attr -- http://code.activestate.com/recipes/440694/
+"""
+#=========================================================
+#imports
+#=========================================================
+#core
+from logging import getLogger; log = getLogger(__name__)
+import sys
+import os
+import re
+if os.name == "nt":
+ #do thing conditionally so full docs can still be built under posix
+ import msvcrt
+ from ctypes import *
+ kernel32 = windll.kernel32
+else:
+ kernel32 = None
+#pkg
+from bps import *
+from bps.numeric import limit
+from bps.unstable import ansi
+#local
+__all__ = [
+ 'print_ansi_string',
+]
+
+#=========================================================
+#misc constants
+#=========================================================
+FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000
+
+#=========================================================
+#constants from wincon.h
+#=========================================================
+##STD_INPUT_HANDLE = -10
+##STD_OUTPUT_HANDLE= -11
+##STD_ERROR_HANDLE = -12
+
+FOREGROUND_BLUE = 0x01 # text color contains blue.
+FOREGROUND_GREEN= 0x02 # text color contains green.
+FOREGROUND_RED = 0x04 # text color contains red.
+FOREGROUND_INTENSITY = 0x08 # text color is intensified.
+
+BACKGROUND_BLUE = 0x10 # background color contains blue.
+BACKGROUND_GREEN= 0x20 # background color contains green.
+BACKGROUND_RED = 0x40 # background color contains red.
+BACKGROUND_INTENSITY = 0x80 # background color is intensified.
+
+class SMALL_RECT(Structure):
+ _fields_ = [("Left", c_short),
+ ("Top", c_short),
+ ("Right", c_short),
+ ("Bottom", c_short)]
+
+class COORD(Structure):
+ _fields_ = [("X", c_short),
+ ("Y", c_short)]
+
+class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+ _fields_ = [("dwSize", COORD),
+ ("dwCursorPosition", COORD),
+ ("wAttributes", c_short),
+ ("srWindow", SMALL_RECT),
+ ("dwMaximumWindowSize", COORD)]
+
+#non-standard derived constants
+FOREGROUND_WHITE = 0x07
+BACKGROUND_WHITE = 0x70
+FOREGROUND_ALL = 0x0F
+BACKGROUND_ALL = 0xF0
+ALL_WHITE = 0x77
+
+#map of ansi -> dos color values
+# (dos flips R & B bits)
+ansi_to_dos = [0,4,2,6,1,5,3,7]
+
+#=========================================================
+#
+#=========================================================
+def _get_hnd(stream):
+ return msvcrt.get_osfhandle(stream.fileno())
+
+##def get_console_size(file=sys.stdout):
+## "get size of console attached to stream"
+## info = CONSOLE_SCREEN_BUFFER_INFO()
+## status = kernel32.GetConsoleScreenBufferInfo(hnd, byref(info))
+##
+
+def write_ansi_string(source, stream=sys.stdout):
+ "print string w/ embedded ansi escape codes to dos console"
+ return AnsiConsoleWriter(stream).write(source)
+
+def _clear_bits(attr, bits):
+ "clear specified bits in bitmask"
+ return (attr|bits)^bits
+
+def _swap_bgfg(attr):
+ "swap fg and bg color bits"
+ fg = (attr & FOREGROUND_WHITE)
+ bg = (attr & BACKGROUND_WHITE)
+ return (attr-fg-bg) | (bg>>4) |(fg<<4)
+
+def _pp_get_last_error():
+ code = GetLastError() #ctypes wrapper for kernel32.GetLastError
+ if code < 1:
+ return None
+ else:
+ msg = FormatError(code) #ctypes wrapper for kernel32.FormatMessage
+ return "[%d] %s" % (code, msg)
+
+class AnsiConsoleWriter(object):
+ """wraps a stream attached to a windows console,
+ extracting any ansi escape codes, and implementing
+ them using the Windows Console API where possible.
+
+ :arg stream:
+ open handle to console (usualy stdout / stderr).
+ by default, this writes to sys.stdout.
+
+ .. note::
+ Right now, only ansi text styles (code "m") are supported.
+ All others are ignored. In the future, there are plans
+ to support the cursor movement codes.
+
+ .. warning::
+ This will raise an EnvironmentError if the stream is not
+ attached to a console. Use :meth:`wrap` for a graceful
+ fallback if the stream is not a tty.
+
+ .. warning::
+ Reverse Video and Concealed Text styles utilitize
+ per-Writer state, which may not work with concurrent
+ changes to styles while either mode is enabled.
+ """
+ #=========================================================
+ #instance attrs
+ #=========================================================
+ stream = None #stream we're wrapping
+ _hnd = None #windows fhandle for stream
+ _attrs = None #last attr state we read from console
+ _reverse = False #flag if reverse video is enabled
+ _conceal = None #flag bg if concealed text is enabled
+ _conceal_fg = None
+
+ #=========================================================
+ #init
+ #=========================================================
+
+ #XXX: classmethod such as "has_console(stream)" if isatty() isn't sufficient?
+
+ def __init__(self, stream=None):
+ if stream is None:
+ stream = sys.stdout
+ if not stream.isatty():
+ raise ValueError, "stream is not attached to a tty"
+ self._hnd = _get_hnd(stream)
+ assert isinstance(self._hnd, int)
+ self.stream = stream
+
+ #=========================================================
+ #state management
+ #=========================================================
+ def _get_info(self):
+ info = CONSOLE_SCREEN_BUFFER_INFO()
+ ok = kernel32.GetConsoleScreenBufferInfo(self._hnd, byref(info))
+ if ok:
+ return info
+ else:
+ log.error("failed to read screen buffer info: stream=%r error=%r", self.stream, _pp_get_last_error())
+ return None
+
+ def _update_state(self):
+ "update internal state from console"
+ info = self._get_info()
+ if info:
+ self._attrs = info.wAttributes
+ else:
+ self._attrs = FOREGROUND_WHITE
+
+ def _apply_code(self, code):
+ "apply change requested by AnsiCode instance"
+ if code.cseq_code == "m":
+ self._apply_styles(code.args)
+ elif code.cseq_code == "A":
+ self._move_cursor(0, -code.offset)
+ elif code.cseq_code == "B":
+ self._move_cursor(0, code.offset)
+ elif code.cseq_code == "C":
+ self._move_cursor(code.offset,0)
+ elif code.cseq_code == "D":
+ self._move_cursor(-code.offset,0)
+ elif code.cseq_code == "H":
+ self._set_cursor(code.col, code.row)
+ #TODO: support abs vert & horiz csr movement codes
+ elif code.cseq_code == "J":
+ self._do_clear_screen(code.mode)
+ ##elif code.cseq_code == "K":
+ ## self._do_clear_line(code.mode)
+ else:
+ #TODO: we could support the cursor repositioning commands
+ log.debug("discarding unsupported ansi escape code: %r", code)
+
+ def _do_clear_screen(self, mode):
+ if mode == 0:
+ #clear line -> bottom
+ info = self._get_info()
+ if not info:
+ return
+ cpos = info.dwCursorPosition
+ cx, cy = cpos.X, cpos.Y
+ c = COORD(0,cy)
+ d = c_short()
+ s = info.dwSize.X * (info.dwSize.Y-cy+1)
+ ok = kernel32.FillConsoleOutputCharacterA(self._hnd, 32, s, c, byref(d) )
+ if not ok:
+ log.error("failed to clear screen: stream=%r error=%r", self.stream, _pp_get_last_error())
+ elif mode == 1:
+ #clear top -> line
+ info = self._get_info()
+ if not info:
+ return
+ cpos = info.dwCursorPosition
+ cx, cy = cpos.X, cpos.Y
+ c = COORD(0,0)
+ d = c_short()
+ s = info.dwSize.X * (info.dwSize.Y-cy+1)
+ ok = kernel32.FillConsoleOutputCharacterA(self._hnd, 32, s, c, byref(d) )
+ if not ok:
+ log.error("failed to clear screen: stream=%r error=%r", self.stream, _pp_get_last_error())
+ elif mode == 2:
+ #clear all
+ info = self._get_info()
+ if not info:
+ return
+ c = COORD(0,0)
+ d = c_short()
+ s = info.dwSize.X * info.dwSize.Y
+ ok = kernel32.FillConsoleOutputCharacterA(self._hnd, 32, s, c, byref(d) )
+ if not ok:
+ log.error("failed to clear screen: stream=%r error=%r", self.stream, _pp_get_last_error())
+ else:
+ log.debug("unsupported J mode: %r", num)
+
+ def _set_cursor(self, cx, cy):
+ info = self._get_info()
+ if not info:
+ return
+ bsize = info.dwSize
+ bx, by = bsize.X, bsize.Y
+ #FIXME: is windows relative to 0,0 or 1,1? cause H codes is 1,1
+ #TODO: support single-dim movement when cx / cy is None
+ cx = limit(cx,0,bx-1)
+ cy = limit(cy,0,by-1)
+ #get csr position
+ cpos = COORD(cx,cy)
+ ok = kernel32.SetConsoleCursorPosition(self._hnd, cpos)
+ if not ok:
+ log.error("failed to set cursor position: stream=%r error=%r", self.stream, _pp_get_last_error())
+
+ def _move_cursor(self, rx, ry):
+ info = self._get_info()
+ if not info:
+ return
+ cpos = info.dwCursorPosition
+ cx, cy = cpos.X, cpos.Y
+ bsize = info.dwSize
+ bx, by = bsize.X, bsize.Y
+ cx = limit(cx+rx,0,bx-1)
+ cy = limit(cy+ry,0,by-1)
+ #get csr position
+ cpos = COORD(cx,cy)
+ ok = kernel32.SetConsoleCursorPosition(self._hnd, cpos)
+ if not ok:
+ log.error("failed to set cursor position: stream=%r error=%r", self.stream, _pp_get_last_error())
+
+ ##bufx = info.dwSize.X
+ ##bufy = into.dwSize.Y
+ ##curx = info.dwCursorPosition.X
+ ##cury = info.dwCursorPosition.Y
+ ##win = info.srWindow
+ ##l,t,r,b = win.Left, win.Top, win.Right, win.Bottom
+ ##ws = info.dwMaximumWindowSize
+ ##mx, my = ws.X, ws.Y
+ ##sizex = r-l+1
+ ##sizey = b-t+1
+
+ def _apply_styles(self, values):
+ "apply values attached to ansi 'm' code"
+ clear = _clear_bits
+
+ #load attrs, rearrange based on flags
+ attr = self._attrs
+ rev = self._reverse
+ if rev: #undo attr swap if reversed
+ attr = _swap_bgfg(attr)
+ conceal = self._conceal
+ if conceal: #restore orig bg color if concealed
+ attr = attr-(attr & FOREGROUND_ALL) + self._conceal_fg
+
+ #make changes
+ for value in values:
+ if value == 0:
+ #reset all
+ attr = FOREGROUND_WHITE
+ rev = conceal = False
+ elif value == 1:
+ #enable bold
+ attr |= FOREGROUND_INTENSITY
+ #4,21 - underline
+ elif value == 5 or value == 6:
+ #enable blink (as background highlight)
+ attr |= BACKGROUND_INTENSITY
+ elif value == 7:
+ #reverse text mode
+ rev = True
+ elif value == 8:
+ #concealed text mode
+ conceal = True
+
+ elif value == 22:
+ #disable bold
+ attr = clear(attr, FOREGROUND_INTENSITY)
+ elif value == 25:
+ #disable blink
+ attr = clear(attr, BACKGROUND_INTENSITY)
+ #24 - undo underline
+ elif value == 27:
+ #undo reverse text mode
+ rev = False
+ elif value == 28:
+ #undo concealed mode
+ conceal = False
+
+ elif 30 <= value < 38 or value == 39:
+ #set fg color
+ if value == 39: #treat white as default
+ value = 37
+ attr = clear(attr, FOREGROUND_WHITE) | ansi_to_dos[value-30]
+ elif 40 <= value < 48 or value == 49:
+ #set bg color
+ if value == 49: #treat black as default
+ value = 40
+ attr = clear(attr, BACKGROUND_WHITE) | (ansi_to_dos[value-40]<<4)
+ else:
+ #we ignore all other attr codes
+ log.debug("ignoring unsupported ansi style attr: %r", value)
+ continue
+
+ #rearrange attr based on flags
+ if conceal:
+ old = self._conceal_fg = attr & FOREGROUND_ALL
+ new = (attr&BACKGROUND_ALL)>>4
+ attr = attr-old+new
+ if rev: #swap colors if reversed
+ attr = _swap_bgfg(attr)
+
+ #now that we're done, try to update
+ assert isinstance(attr, int)
+ ok = kernel32.SetConsoleTextAttribute(self._hnd, attr)
+ if ok:
+ self._attrs = attr
+ self._reverse = rev
+ self._conceal = conceal
+ else:
+ log.error("failed to write attrstate to console: stream=%r error=%r", self.stream, _pp_get_last_error())
+
+ #=========================================================
+ #methods to proxy real stream
+ #=========================================================
+ def __getattr__(self, attr):
+ return getattr(self.stream, attr)
+
+ def write(self, text):
+ self._update_state()
+ raw_write = self.stream.write
+ apply_code = self._apply_code
+ for elem in ansi.parse_ansi_string(text, rtype=iter, malformed_codes="ignore"):
+ if hasattr(elem, "code"):
+ apply_code(elem)
+ else:
+ raw_write(elem)
+
+ def writelines(self, seq):
+ self._update_state()
+ raw_write = self.stream.write
+ apply_code = self._apply_code
+ for text in seq:
+ for elem in ansi.parse_ansi_string(text, rtype=iter, malformed_codes="ignore"):
+ if hasattr(elem, "code"):
+ apply_code(elem)
+ else:
+ raw_write(elem)
+
+ #=========================================================
+ #eoc
+ #=========================================================
+
+#=========================================================
+#eof
+#=========================================================
diff --git a/bps/warndep.py b/bps/warndep.py
new file mode 100644
index 0000000..1f330f0
--- /dev/null
+++ b/bps/warndep.py
@@ -0,0 +1,592 @@
+"""bps.warndep -- warning and deprecation utilities"""
+#===================================================
+#imports
+#===================================================
+#core
+import inspect
+from functools import update_wrapper
+from warnings import warn
+#needed imports
+#legacy imports
+from bps.undef import Undef
+from bps.meta import isstr, find_attribute
+from bps.error.types import ParamError
+
+__all__ = [
+ #deprecation decorators
+ 'deprecated_function',
+ 'deprecated_method',
+
+ #deprecation constructors
+ 'deprecated_property',
+ 'relocated_function',
+ 'relocated_class',
+]
+
+#=========================================================
+#deprecation decorators
+#=========================================================
+def deprecated_function(use=None, name=None, removal=None, msg=None, **kwds):
+ #TODO: once positional version deprecated, move "name" -> after removal, remove kwds
+ """Used to indicate a function has been deprecated,
+ and issues a warning telling user where to find the recommended replacement (if any)
+
+ :type use: str|None
+ :param use:
+ [optional]
+ name of replacement function.
+ if provided, the default message will indicate this function
+ should be used instead. if not provided, the default
+ message will indicate the function is scheduled for removal.
+
+ :type name: str|None
+ :param name:
+ [optional]
+ Overrides name of original function
+ (else derived from function that's being wrapped).
+
+ :type removal: str|True|None
+ :param removal:
+ [optional]
+ A string containing information about when this function
+ will be removed. Typically, this will either be a date
+ or a version number. It is inserted
+ into the default message by appending the phrase:
+
+ ``"and will be removed after %(removal)s"``
+
+ :type msg: str|None
+ :param msg:
+ [optional]
+ Overrides default warning message.
+ This message will be passed through ``msg % opts``, where
+ opts is a dictionary containing the following keys:
+
+ name
+ deprecated function name: ``name`` keyword, or name of original function
+ mod
+ name of module old function came from.
+ use
+ replacement name (``use`` keyword, or ``None``).
+ removal
+ value of ``removal`` keyword, or ``None``.
+
+ .. todo:: give usage example for depfunc
+
+ .. note::
+
+ all options should be specified as kwd args
+ """
+ #XXX: should we expose stacklevel for weird invocation cases?
+
+ #handle deprecated kwds
+ if 'new_name' in kwds:
+ warn("'new_name' deprecated, use 'use' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ use = kwds.pop("new_name")
+ if 'old_name' in kwds:
+ warn("'old_name' deprecated, use 'name' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ name = kwds.pop("old_name")
+ if kwds:
+ raise TypeError, "unknown kwds: %r" % (kwds,)
+
+ #create default msg
+ if not msg:
+ msg = "function %(name)r is deprecated"
+ if use:
+ msg += ", use %(use)r instead"
+ if removal:
+ if removal is True:
+ msg += "; it will be removed in the future"
+ else:
+ msg += "; it will be removed after %(removal)s"
+ if '%(mod)' not in msg: #just to make sure user is oriented about warning
+ msg = "%(mod)s: " + msg
+
+ #decorator-builder
+ def builder(func):
+ #FIXME: old_mod currently points to *new* module,
+ # which is usually the same, but not always
+ text = msg % dict(
+ mod=func.__module__,
+ name=name or func.__name__,
+ use=use,
+ removal=removal,
+ )
+ def wrapper(*args, **kwds):
+ warn(text, DeprecationWarning, stacklevel=2)
+ return func(*args, **kwds)
+ update_wrapper(wrapper, func)
+ return wrapper
+ return builder
+
+def deprecated_method(use=None, name=None, removal=None, msg=None, **kwds):
+ """Used to indicate a method has been deprecated, and displays msg w/ recommended replacement.
+
+ Aside from decorating a method instead of a function,
+ this is exactly the same as :func:`deprecated_function`,
+ except for the additionl of the following extra keywords
+ available for formatting inside *msg*:
+
+ cls
+ name of class the deprecated function is stored inside.
+ """
+ #XXX: should we expose stacklevel for weird invocation cases?
+
+ #handle deprecated kwds
+ if 'new_name' in kwds:
+ warn("'new_name' deprecated, use 'use' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ use = kwds.pop("new_name")
+ if 'old_name' in kwds:
+ warn("'old_name' deprecated, use 'name' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ name = kwds.pop("old_name")
+ if kwds:
+ raise TypeError, "unknown kwds: %r" % (kwds,)
+
+ #create default msg
+ if not msg:
+ msg = "method %(name)r is deprecated"
+ if use:
+ msg += ", use %(use)r instead"
+ if removal:
+ if removal is True:
+ msg += "; it will be removed in the future"
+ else:
+ msg += "; it will be removed after %(removal)s"
+ if '%(mod)' not in msg: #just to make sure user is oriented about warning
+ msg = "%(mod)s.%(cls)s: " + msg
+
+ #decorator-builder
+ def builder(func):
+ state = dict(
+ use=use,
+ name=name or func.__name__,
+ removal=removal,
+ #params filled in when bound to class...
+ mod=None,
+ cls=None,
+ text=None,
+ )
+ def wrapper(self, *args, **kwds):
+ text = state['text']
+ if not text:
+ cls = self.__class__
+ state.update(mod=cls.__module__, cls=cls.__name__)
+ text = state['text'] = msg % state
+ warn(text, DeprecationWarning, stacklevel=2)
+ return func(self, *args, **kwds)
+ wrapper._deprecated_func = func #used to let deprecated_property strip this off
+ update_wrapper(wrapper, func)
+ return wrapper
+ return builder
+
+def deprecated_property(fget=None, fset=None, fdel=None, doc=None,
+ new_name=None, old_name=None, msg=None, removal=None):
+ """replacement for property() which issues deprecation warning when property is used.
+
+ :arg fget:
+ get function, same as for :func:`property()`
+ :arg fset:
+ set function, same as for :func:`property()`
+ :arg fdel:
+ delete function, same as for :func:`property()`
+ :arg doc:
+ alternate docstring, same as for :func:`property()`
+
+ :param new_name:
+ Name of alternate attribute that should be used.
+ If not set, default message will indicate this property
+ is deprecated without any alternatives.
+
+ :param old_name:
+ Name of the attribute this property will be stored in.
+ If not specified, an attempt will be made to derive
+ it from the name of the fget / fset methods.
+ If that fails, a ParamError will be raised.
+
+ :param removal:
+ [optional]
+ A string containing information about when this function
+ will be removed. Typically, this will either be a date
+ or a version number. It is inserted
+ into the default message by appending the phrase:
+
+ ``"and will be removed after %(removal)s"``
+
+ :param msg:
+ If this is specified, it overrides the default warning message.
+ All message strings will be passed through ``msg % vars``,
+ where *vars* is a dictionary containing the following keys:
+
+ new_name
+ value of new_name parameter passed into constructor
+
+ old_name
+ value of old_name parameter passed into constructor
+
+ old_cls
+ name of class that attribute is part of
+
+ old_mod
+ name of module that old_class belongs to
+ """
+ if not msg:
+ msg = "attribute %(old_name)r is deprecated"
+ if removal:
+ if removal is True:
+ msg += ", and will be removed in the future"
+ else:
+ msg += ", and will be removed after %(removal)s"
+ if new_name:
+ msg += ", use %(new_name)r instead"
+ if '%(old_mod)' not in msg:
+ msg = "%(old_mod)s.%(old_cls)s: " + msg
+ if old_name is None:
+ #try to guess it from fget
+ assert fget
+ name = fget.__name__.lstrip("_")
+ if name.startswith("get"):
+ old_name = name[3:]
+ elif fset:
+ #try to guess from fset
+ name = fset.__name__.lstrip("_")
+ if name.startswith("set"):
+ old_name = name[3:]
+ else:
+ raise ParamError, "old_name must be specified, can't guess from fget/fset"
+ state = dict(
+ new_name=new_name,
+ old_name=old_name,
+ removal=removal,
+ )
+ def builder(func):
+ if func is None:
+ return None
+ if hasattr(func, "_deprecated_func"): #set by deprecated_method, so we don't do a double warning
+ func = func._deprecated_func
+ def wrapper(self, *args, **kwds):
+ if 'text' not in state:
+ cls = self.__class__
+ state.update(old_mod=cls.__module__, old_cls=cls.__name__)
+ state['text'] = msg % state
+ warn(state['text'], DeprecationWarning, stacklevel=2)
+ return func(self, *args, **kwds)
+ update_wrapper(wrapper, func)
+ return wrapper
+ return property(builder(fget), builder(fset), builder(fdel), doc)
+
+#=========================================================
+#func/class generators
+#=========================================================
+#TODO: rename "new_func" to "handler", and swap locations with "new_name",
+# which should now be required (see relocated-method)
+def relocated_function(name, use, removal=None, lazy=True, msg=None, **kwds):
+ #detect swapped params
+ """Used to indicate a function has been deprecated:
+ this generates and returns a wrapper function which acts as a proxy
+ for the replacement, after issuing a suitable warning.
+
+ The replacement can either be passed in directly,
+ or lazily imported when needed.
+
+ :type name: str
+ :arg name:
+ [required]
+ name of old (deprecated) function we're creating a wrapper for
+
+ :type use: str|callable
+ :arg use:
+ [required]
+ new function to use, or string containing absolute module path + name func for importing
+
+ :type msg: str|None
+ :param msg:
+ [optional, kwd only]
+ overrides the entire string displayed in the warning message.
+
+ :type lazy: bool
+ :param lazy:
+ If ``True`` (the default), the import of *new_func* is delayed until needed.
+ If False, the new function is imported immediately.
+
+ Usage examples ::
+
+ >>> from bps.warndep import relocated_function
+
+ >>> #function 'old_func' has been renamed to 'new_func' with the same module
+ >>> old_func = relocated_function("old_func", new_func)
+
+ >>> #function 'old_func' has been moved to another module,
+ >>> #and must be specified by name only (it will be lazily imported)
+ >>> old_func = relocated_function("old_func", "othermod.new_func")
+
+ >>> #this function is basically a helpful wrapper for deprecated_function,
+ >>> #the equivalent usage of which would be the following...
+ >>> from bps.warndep import deprecated_function
+ >>> from othermod import new_func
+ >>> @deprecated_function(new_func.__name__)
+ >>> def old_func(*a,**k):
+ >>> return new_func(*a,**k)
+ >>> #... but relocated_function offers lazy imports
+ >>> # plus handles creating a stub function automatically
+
+ """
+ #handle deprecated kwds
+ if 'new_name' in kwds:
+ warn("'new_name' deprecated, use 'use' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ use = kwds.pop("new_name")
+ if 'new_func' in kwds:
+ warn("'new_func' deprecated, use 'use' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ use = kwds.pop("new_func")
+ if 'old_name' in kwds:
+ warn("'old_name' deprecated, use 'name' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ name = kwds.pop("old_name")
+ if kwds:
+ raise TypeError, "unknown kwds: %r" % (kwds,)
+
+ #inspect caller to determine current module
+ mod = inspect.currentframe(1).f_globals.get("__name__", "???")
+
+ #parse 'use' into use_mod, use_name, handler
+ if isinstance(use, str):
+ if ':' in use:
+ #assume they used "a.b.c:func" syntax
+ idx = use.rindex(":")
+ elif '.' in use:
+ #assume they used "a.b.c.func" syntax
+ idx = use.rindex(".")
+ else:
+ #assume in this module
+ use = mod + "." + use
+ idx = use.rindex(".")
+ use_mod = use[:idx]
+ use_name = use[idx+1:]
+ handler = None
+ elif callable(use):
+ use_mod = use.__module__
+ use_name = use.__name__
+ handler = use
+ else:
+ raise ValueError, "new function path or instance must be specified ('use' kwd)"
+
+ #fill in some defaults
+ if name is None:
+ if mod == use_mod:
+ raise ValueError, "old function name must be specified ('name' kwd)"
+ name = use_name
+ old_path = mod + "." + name
+ use_path = use_mod + "." + use_name
+
+ #create default msg
+ if not msg:
+ #TODO: have this use name only if w/in same module.
+ # this will require better test_warndep code, using external modules,
+ # as well as changing the text of test_security_pwhash's legacy warnings
+ ##if mod == use_mod:
+ ## msg = "function %(name)r is deprecated, use %(use_name)r instead"
+ ##else:
+ msg = "function %(name)r is deprecated, use %(use_path)r instead"
+ if removal:
+ if removal is True:
+ msg += "; it will be removed in the future"
+ else:
+ msg += "; it will be removed after %(removal)s"
+ if '%(mod)' not in msg: #just to make sure user is oriented about warning
+ msg = "%(mod)s: " + msg
+
+ #render warning message
+ text = msg % dict(
+ mod=mod,
+ name=name,
+ use_path=use_path,
+ use_name=use_name,
+ removal=removal,
+ )
+
+ #resolve handler
+ wrapper = None
+ def resolve():
+ module = __import__(use_mod, fromlist=[use_name])
+ try:
+ value = getattr(module, use_name)
+ except AttributeError:
+ raise AttributeError("module %r has no attribute %r" % (use_mod, use_name))
+ if value is wrapper:
+ raise ParamError, "relocated_function(%r,%r): 'name' & 'use' parameters reversed" % (old_path,use_path,)
+ return value
+ if handler is None and not lazy:
+ handler = resolve()
+ assert handler
+
+ #create appropriate wrapper
+ if handler:
+ #direct wrapper
+ def wrapper(*args, **kwds):
+ warn(text, DeprecationWarning, stacklevel=2)
+ return handler(*args, **kwds)
+ update_wrapper(wrapper, handler)
+ else:
+ #delayed importing wrapper
+ cache = [None]
+ def wrapper(*args, **kwds):
+ warn(text, DeprecationWarning, stacklevel=2)
+ func = cache[0]
+ if func is None:
+ func = cache[0] = resolve()
+ update_wrapper(wrapper, func)
+ return func(*args, **kwds)
+ wrapper.__doc__ = "relocated_function wrapper for %r" % use
+ wrapper.__name__ = name
+ return wrapper
+
+#TODO: work out way for this to operate on class & static methods
+def relocated_method(name=None, use=None, removal=None, msg=None, **kwds):
+ """Companion to deprecated_method(): This actually returns a function
+ which proxies the replacement method after issuing a deprecation warning.
+
+ :type name: str|None
+ :arg name:
+ name of old (deprecated) method we're creating a function for.
+ (If ``None``, name will be autodetected when method is first used).
+
+ :type use: str|callable
+ :arg use:
+ [required]
+ name of new method which should be used in this method's place.
+
+ :type msg: str|None
+ :param msg:
+ optionally override the deprecation message displayed.
+
+ :type removal: str|None
+ :param removal:
+ optional indicate date or release this will be removed in.
+ """
+ #handle deprecated kwds
+ if 'new_name' in kwds:
+ warn("'new_name' deprecated, use 'use' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ use = kwds.pop("new_name")
+ if 'old_name' in kwds:
+ warn("'old_name' deprecated, use 'name' instead; to be removed soon", DeprecationWarning, stacklevel=2)
+ name = kwds.pop("old_name")
+ if kwds:
+ raise TypeError, "unknown kwds: %r" % (kwds,)
+
+ #
+ #validate inputs (let deprecated_method catch other cases)
+ #
+ if not use:
+ raise ValueError, "new method name to use must be specified ('use' kwd)"
+
+ if callable(use):
+ handler = use
+ use = handler.__name__
+ else:
+ handler = None
+
+ #
+ #build msg
+ #
+ if not msg:
+ msg = "method %(name)r is deprecated, use %(use)r instead"
+ if removal:
+ if removal is True:
+ msg += "; it will be removed in the future"
+ else:
+ msg += "; it will be removed after %(removal)s"
+ if '%(mod)' not in msg: #just to make sure user is oriented about warning
+ msg = "%(mod)s.%(cls)s: " + msg
+
+ #
+ #build wrapped handler
+ #
+ state = dict(
+ #from constructor
+ use=use,
+ name=name,
+ removal=removal,
+
+ #filled in after being bound to class
+ text=None,
+ mod=None,
+ cls=None,
+ handler=None,
+ )
+ def wrapper(self, *a, **k):
+ #FIXME: this text optimization doesn't work if 2+ subclasses call relocated method of parent.
+ # remove optimization & add a unit test
+ ##text = state['text']
+ ##handler = state['handler']
+ ##if text:
+ ## warn(text, DeprecationWarning, stacklevel=2)
+ ##else:
+ cls = self.__class__
+ if not state['name']:
+ state['name'] = find_attribute(cls, wrapper, required=True)
+ state.update(
+ mod=cls.__module__,
+ cls=cls.__name__,
+ )
+ #TODO: detect old_name here
+ text = state['text'] = msg % state
+ warn(text, DeprecationWarning, stacklevel=2)
+ handler = state['handler'] = getattr(cls, use)
+ if getattr(handler,"im_func",None) is wrapper:
+ raise ParamError, "%r: relocated_method(%r,%r): 'name' & 'use' parameters reversed" % (cls, name, use)
+ return handler(self, *a, **k)
+ wrapper.__name__ = name or "<deprecated alias for %r>" % (use,)
+ return wrapper
+
+#XXX: this func hasn't been documented yet, because it doesn't work right yet
+def relocated_class(old_name, new_class=None, new_name=None, msg=None, lazy=True, inheritable=True):
+ """equivalent to relocated_function() for classes.
+
+ :param subclass:
+ If ``False``, the stub this returns will not be suitable for subclassing.
+ This allows a cheaper implementation to be used.
+
+ .. todo::
+ Right now this doesn't actually do anything
+ besides return the named class, and just acts as a placeholder
+ till a proper implementation is devised.
+
+ Such an implementation must:
+ * issue a warning when the class is first imported OR instantiated/subclassed
+ * still act like just the real class as far as inheritance goes.
+ """
+ if not inheritable:
+ #TODO: work up better msg
+ return relocated_function(old_name, new_class, new_name, msg, lazy)
+
+ #FIXME:
+ if not old_name:
+ raise ValueError, "old function name must be specified"
+ if not new_class:
+ #new_func stored in old_name,
+ #and proper old_name should be derived
+ #from end of new_func
+ new_class = old_name
+ assert isinstance(new_class, str)
+ assert '.' in new_class or ':' in new_class
+ old_name = None
+ if isinstance(new_class, str):
+ #assume this is a full path name,
+ #which will be imported lazily w/in the wrapper
+ if ':' in new_class:
+ #assume they used "a.b.c:func" syntax
+ idx = new_class.rindex(":")
+ else:
+ #assume they used "a.b.c.func" syntax
+ idx = new_class.rindex(".")
+ new_mod = new_class[:idx]
+ new_class = new_class[idx+1:]
+
+ #and now the hack.. we resolve & return new_class
+ if isinstance(new_class, str):
+ mod = __import__(new_mod, fromlist=[new_class])
+ return getattr(mod, new_class)
+ else:
+ return new_class
+
+#=========================================================
+#
+#=========================================================
diff --git a/docs/_static/logo.ico b/docs/_static/logo.ico
new file mode 100644
index 0000000..3ac2f26
--- /dev/null
+++ b/docs/_static/logo.ico
Binary files differ
diff --git a/docs/_static/logo.png b/docs/_static/logo.png
new file mode 100644
index 0000000..b859c98
--- /dev/null
+++ b/docs/_static/logo.png
Binary files differ
diff --git a/docs/_static/logo.svg b/docs/_static/logo.svg
new file mode 100644
index 0000000..43be7d1
--- /dev/null
+++ b/docs/_static/logo.svg
@@ -0,0 +1,382 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="48"
+ height="48"
+ id="svg2383"
+ sodipodi:version="0.32"
+ inkscape:version="0.46"
+ sodipodi:docname="logo.svg"
+ inkscape:output_extension="org.inkscape.output.svg.inkscape"
+ inkscape:export-filename="/home/biscuit/dev/libs/bps/trunk/docs/_static/logo.png"
+ inkscape:export-xdpi="135"
+ inkscape:export-ydpi="135"
+ version="1.0"
+ style="display:inline">
+ <defs
+ id="defs2385">
+ <linearGradient
+ id="linearGradient3426">
+ <stop
+ style="stop-color:#cdcdcd;stop-opacity:1;"
+ offset="0"
+ id="stop3428" />
+ <stop
+ style="stop-color:#989898;stop-opacity:1;"
+ offset="1"
+ id="stop3430" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient3361">
+ <stop
+ style="stop-color:#d1d1d1;stop-opacity:1;"
+ offset="0"
+ id="stop3363" />
+ <stop
+ style="stop-color:#85867f;stop-opacity:0;"
+ offset="1"
+ id="stop3365" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient3343">
+ <stop
+ style="stop-color:#e7e8e7;stop-opacity:1;"
+ offset="0"
+ id="stop3345" />
+ <stop
+ id="stop3351"
+ offset="0.35526317"
+ style="stop-color:#85867f;stop-opacity:1;" />
+ <stop
+ style="stop-color:#8a8b85;stop-opacity:1;"
+ offset="0.55263162"
+ id="stop3357" />
+ <stop
+ style="stop-color:#e8e8e6;stop-opacity:1;"
+ offset="0.75"
+ id="stop3353" />
+ <stop
+ id="stop3355"
+ offset="0.875"
+ style="stop-color:#e3e3e2;stop-opacity:1;" />
+ <stop
+ style="stop-color:#85867f;stop-opacity:1;"
+ offset="1"
+ id="stop3347" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient3330">
+ <stop
+ style="stop-color:#63645e;stop-opacity:1;"
+ offset="0"
+ id="stop3332" />
+ <stop
+ style="stop-color:#d8d9d7;stop-opacity:1;"
+ offset="1"
+ id="stop3334" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient3174">
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0;"
+ offset="0"
+ id="stop3176" />
+ <stop
+ id="stop3182"
+ offset="0.02577317"
+ style="stop-color:#ffffff;stop-opacity:0;" />
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0.68103451;"
+ offset="0.10257728"
+ id="stop3184" />
+ <stop
+ id="stop3186"
+ offset="0.29355666"
+ style="stop-color:#ffffff;stop-opacity:0;" />
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0;"
+ offset="0.49417913"
+ id="stop3188" />
+ <stop
+ id="stop3190"
+ offset="0.76791382"
+ style="stop-color:#ffffff;stop-opacity:0.68103451;" />
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0.70689654;"
+ offset="0.83300149"
+ id="stop3194" />
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0;"
+ offset="1"
+ id="stop3178" />
+ </linearGradient>
+ <inkscape:perspective
+ sodipodi:type="inkscape:persp3d"
+ inkscape:vp_x="0 : 24 : 1"
+ inkscape:vp_y="0 : 1000 : 0"
+ inkscape:vp_z="48 : 24 : 1"
+ inkscape:persp3d-origin="24 : 16 : 1"
+ id="perspective2391" />
+ <inkscape:perspective
+ id="perspective2511"
+ inkscape:persp3d-origin="372.04724 : 350.78739 : 1"
+ inkscape:vp_z="744.09448 : 526.18109 : 1"
+ inkscape:vp_y="0 : 1000 : 0"
+ inkscape:vp_x="0 : 526.18109 : 1"
+ sodipodi:type="inkscape:persp3d" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3174"
+ id="linearGradient3180"
+ x1="6.2500014"
+ y1="26.857143"
+ x2="39.892857"
+ y2="26.857143"
+ gradientUnits="userSpaceOnUse" />
+ <radialGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3330"
+ id="radialGradient3338"
+ cx="24.08217"
+ cy="6.5837455"
+ fx="24.08217"
+ fy="6.5837455"
+ r="3.3319807"
+ gradientTransform="matrix(1,0,0,0.3178702,0,4.490969)"
+ gradientUnits="userSpaceOnUse" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3343"
+ id="linearGradient3349"
+ x1="20.156134"
+ y1="9.6145229"
+ x2="27.476151"
+ y2="9.6145229"
+ gradientUnits="userSpaceOnUse" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3426"
+ id="linearGradient3432"
+ x1="20.619965"
+ y1="4.9160261"
+ x2="23.637569"
+ y2="12.999183"
+ gradientUnits="userSpaceOnUse" />
+ <radialGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3361"
+ id="radialGradient3238"
+ cx="23.637569"
+ cy="8.9576044"
+ fx="23.637569"
+ fy="8.9576044"
+ r="14.501575"
+ gradientTransform="matrix(1.0932998,4.0390633e-7,-7.5505546e-8,0.3972952,-2.2053809,5.3987817)"
+ gradientUnits="userSpaceOnUse" />
+ <filter
+ inkscape:collect="always"
+ id="filter3252"
+ x="-0.15294118"
+ width="1.3058824"
+ y="-0.55714287"
+ height="2.1142857">
+ <feGaussianBlur
+ inkscape:collect="always"
+ stdDeviation="3.9395948"
+ id="feGaussianBlur3254" />
+ </filter>
+ </defs>
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="9.8994949"
+ inkscape:cx="15.162291"
+ inkscape:cy="22.03092"
+ inkscape:current-layer="layer2"
+ showgrid="true"
+ inkscape:grid-bbox="true"
+ inkscape:document-units="px"
+ inkscape:window-width="1272"
+ inkscape:window-height="723"
+ inkscape:window-x="0"
+ inkscape:window-y="25"
+ borderlayer="true" />
+ <metadata
+ id="metadata2388">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:groupmode="layer"
+ id="layer6"
+ inkscape:label="shadow"
+ style="display:inline"
+ sodipodi:insensitive="true">
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:#000000;fill-opacity:0.24878049;stroke:none;stroke-width:0.1;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter3252)"
+ id="path3370"
+ sodipodi:cx="24.445692"
+ sodipodi:cy="38.302536"
+ sodipodi:rx="30.910667"
+ sodipodi:ry="8.485281"
+ d="M 55.356359,38.302536 A 30.910667,8.485281 0 1 1 -6.4649754,38.302536 A 30.910667,8.485281 0 1 1 55.356359,38.302536 z"
+ transform="matrix(0.7380952,0,0,0.7380952,6.0128136,10.031617)" />
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer4"
+ inkscape:label="color"
+ sodipodi:insensitive="true">
+ <path
+ style="fill:#5884bf;fill-opacity:1;stroke:none;stroke-width:0.49592856;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 6.4265364,11.283679 L 40.573462,11.283679 L 40.573462,38.430609 C 30.992581,43.227768 15.769467,43.154069 6.4265364,38.430609 L 6.4265364,11.283679 z"
+ id="rect2384"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ style="fill:#e13636;fill-opacity:1;stroke:none;stroke-width:0.49592856;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 6.4630916,13.953569 L 40.610016,13.953569 L 40.610017,23.624859 C 31.029136,28.422018 15.806022,28.348319 6.4630916,23.624859 L 6.4630916,13.953569 z"
+ id="path3200"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ style="fill:#f2db25;fill-opacity:1;stroke:none;stroke-width:0.49592856;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 6.1600462,11.731234 L 40.306972,11.731234 L 40.306972,18.97816 C 30.726091,23.775319 15.502976,23.70162 6.1600461,18.97816 L 6.1600462,11.731234 z"
+ id="path3198"
+ sodipodi:nodetypes="ccccc" />
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer3"
+ inkscape:label="shine"
+ sodipodi:insensitive="true">
+ <path
+ style="fill:url(#linearGradient3180);fill-opacity:1;stroke:#825540;stroke-width:0.49592856;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 6.4979658,11.498535 L 40.644892,11.498535 L 40.644892,38.645465 C 31.064011,43.442624 15.840896,43.368925 6.4979658,38.645465 L 6.4979658,11.498535 z"
+ id="path3171"
+ sodipodi:nodetypes="ccccc" />
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer5"
+ inkscape:label="text"
+ sodipodi:insensitive="true"
+ style="display:inline">
+ <path
+ style="font-size:40px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:#ffffff;stroke-width:0.1;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;font-family:Bitstream Vera Serif;-inkscape-font-specification:Bitstream Vera Serif Bold"
+ d="M 17.060881,32.56056 L 17.060881,31.386704 L 18.914033,31.386704 L 18.914033,19.250394 L 17.060881,19.250394 L 17.060881,18.076537 L 25.647474,18.076537 C 27.413294,18.076552 28.739281,18.374058 29.625443,18.969056 C 30.511579,19.564082 30.954652,20.459833 30.954668,21.656312 C 30.954652,22.516504 30.647411,23.198826 30.032943,23.703285 C 29.424914,24.20776 28.519362,24.524668 27.31628,24.654011 C 28.771623,24.789836 29.887394,25.177888 30.663596,25.818165 C 31.43977,26.458456 31.827865,27.308935 31.82788,28.369604 C 31.827865,29.805397 31.284532,30.862836 30.197883,31.541925 C 29.117674,32.221016 27.422996,32.56056 25.113844,32.56056 L 17.060881,32.56056 M 22.649443,24.12044 L 23.920453,24.12044 C 25.032982,24.120449 25.857682,23.919955 26.394555,23.51896 C 26.931409,23.111516 27.199842,22.490634 27.199852,21.656312 C 27.199842,20.815547 26.937878,20.204366 26.413961,19.82277 C 25.896492,19.441198 25.065322,19.250407 23.920453,19.250394 L 22.649443,19.250394 L 22.649443,24.12044 M 22.649443,31.386704 L 24.036882,31.386704 C 25.272306,31.386705 26.187562,31.14094 26.782651,30.649407 C 27.377717,30.157877 27.675256,29.397943 27.675268,28.369604 C 27.675256,27.334805 27.374484,26.561937 26.772949,26.050996 C 26.171391,25.540069 25.25937,25.284602 24.036882,25.284594 L 22.649443,25.284594 L 22.649443,31.386704"
+ id="text3202" />
+ <text
+ xml:space="preserve"
+ style="font-size:5.7741642px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:#f7f7f7;stroke-width:0.1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:0.71428576;font-family:Bitstream Vera Sans;-inkscape-font-specification:Bitstream Vera Sans"
+ x="17.585989"
+ y="39.817764"
+ id="text3320"
+ sodipodi:linespacing="125%"><tspan
+ sodipodi:role="line"
+ id="tspan3324"
+ x="17.585989"
+ y="39.817764">+4v</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-size:0.94746196px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans;-inkscape-font-specification:Bitstream Vera Sans Bold"
+ x="-38.376022"
+ y="37.053452"
+ id="text3466"
+ sodipodi:linespacing="125%"
+ transform="matrix(0,-1,1,0,0,0)"><tspan
+ sodipodi:role="line"
+ id="tspan2438"
+ x="-38.376022"
+ y="37.053452">This product adheres to all FDA </tspan><tspan
+ sodipodi:role="line"
+ id="tspan2440"
+ x="-38.376022"
+ y="38.237778"> regulations, and contains at least 50% </tspan><tspan
+ sodipodi:role="line"
+ id="tspan2442"
+ x="-38.376022"
+ y="39.422108"> bits by volume.</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-size:0.94746196px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans;-inkscape-font-specification:Bitstream Vera Sans Bold"
+ x="-38.067734"
+ y="12.216213"
+ id="text3482"
+ sodipodi:linespacing="125%"
+ transform="matrix(0,-1,1,0,0,0)"><tspan
+ sodipodi:role="line"
+ id="tspan2452"
+ x="-38.067734"
+ y="12.216213">Not a floatation device.</tspan></text>
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer2"
+ inkscape:label="top"
+ sodipodi:insensitive="true"
+ style="display:inline">
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:#bf8158;fill-opacity:1;stroke:#825540;stroke-width:0.40979934;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="path2387"
+ sodipodi:cx="24.571428"
+ sodipodi:cy="11.071428"
+ sodipodi:rx="14.285714"
+ sodipodi:ry="3.2142856"
+ d="M 38.857142,11.071428 A 14.285714,3.2142856 0 1 1 10.285714,11.071428 A 14.285714,3.2142856 0 1 1 38.857142,11.071428 z"
+ transform="matrix(1.1947106,0,0,1.2876026,-5.7128888,-3.1127432)" />
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:#696a64;fill-opacity:1;stroke:none;stroke-width:0.30000001;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:0.5320197"
+ id="path3326"
+ sodipodi:cx="24.748737"
+ sodipodi:cy="11.129432"
+ sodipodi:rx="11.818785"
+ sodipodi:ry="2.0203052"
+ d="M 36.567522,11.129432 A 11.818785,2.0203052 0 1 1 12.929953,11.129432 A 11.818785,2.0203052 0 1 1 36.567522,11.129432 z"
+ transform="matrix(1.2606838,0,0,1.5072438,-7.5122541,-5.6599702)" />
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:url(#radialGradient3238);fill-opacity:1;stroke:url(#linearGradient3432);stroke-width:0.58742505;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="path3392"
+ sodipodi:cx="23.637569"
+ sodipodi:cy="8.9576044"
+ sodipodi:rx="14.142136"
+ sodipodi:ry="3.6870568"
+ d="M 37.779705,8.9576044 A 14.142136,3.6870568 0 1 1 9.4954338,8.9576044 A 14.142136,3.6870568 0 1 1 37.779705,8.9576044 z"
+ transform="matrix(0.8219163,0,0,0.5641407,4.2071579,5.9245612)" />
+ <path
+ style="fill:url(#linearGradient3349);fill-opacity:1;stroke:none;stroke-width:0.30000001;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 19.697975,8.1999896 L 27.375135,8.1999896 L 27.47615,11.230448 C 24.760736,12.324973 22.246648,12.010212 19.79899,11.230448 L 19.697975,8.1999896 z"
+ id="rect3340"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:url(#radialGradient3338);fill-opacity:1;stroke:none;stroke-width:0.08430404;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="path3328"
+ sodipodi:cx="23.789093"
+ sodipodi:cy="6.5837455"
+ sodipodi:rx="3.1819806"
+ sodipodi:ry="0.90913731"
+ d="M 26.971074,6.5837455 A 3.1819806,0.90913731 0 1 1 20.607112,6.5837455 A 3.1819806,0.90913731 0 1 1 26.971074,6.5837455 z"
+ transform="matrix(1.2063492,0,0,1.1135745,-5.1613985,1.0871994)" />
+ </g>
+</svg>
diff --git a/docs/_static/masthead.png b/docs/_static/masthead.png
new file mode 100644
index 0000000..154b198
--- /dev/null
+++ b/docs/_static/masthead.png
Binary files differ
diff --git a/docs/_static/masthead.svg b/docs/_static/masthead.svg
new file mode 100644
index 0000000..76d9df1
--- /dev/null
+++ b/docs/_static/masthead.svg
@@ -0,0 +1,423 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="180"
+ height="52"
+ id="svg2383"
+ sodipodi:version="0.32"
+ inkscape:version="0.47pre4 r22446"
+ sodipodi:docname="masthead.svg"
+ inkscape:output_extension="org.inkscape.output.svg.inkscape"
+ inkscape:export-filename="/home/biscuit/dev/libs/bps/trunk/docs/_static/masthead.png"
+ inkscape:export-xdpi="90"
+ inkscape:export-ydpi="90"
+ version="1.0"
+ style="display:inline">
+ <defs
+ id="defs2385">
+ <linearGradient
+ id="linearGradient3426">
+ <stop
+ style="stop-color:#cdcdcd;stop-opacity:1;"
+ offset="0"
+ id="stop3428" />
+ <stop
+ style="stop-color:#989898;stop-opacity:1;"
+ offset="1"
+ id="stop3430" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient3361">
+ <stop
+ style="stop-color:#d1d1d1;stop-opacity:1;"
+ offset="0"
+ id="stop3363" />
+ <stop
+ style="stop-color:#85867f;stop-opacity:0;"
+ offset="1"
+ id="stop3365" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient3343">
+ <stop
+ style="stop-color:#e7e8e7;stop-opacity:1;"
+ offset="0"
+ id="stop3345" />
+ <stop
+ id="stop3351"
+ offset="0.35526317"
+ style="stop-color:#85867f;stop-opacity:1;" />
+ <stop
+ style="stop-color:#8a8b85;stop-opacity:1;"
+ offset="0.55263162"
+ id="stop3357" />
+ <stop
+ style="stop-color:#e8e8e6;stop-opacity:1;"
+ offset="0.75"
+ id="stop3353" />
+ <stop
+ id="stop3355"
+ offset="0.875"
+ style="stop-color:#e3e3e2;stop-opacity:1;" />
+ <stop
+ style="stop-color:#85867f;stop-opacity:1;"
+ offset="1"
+ id="stop3347" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient3330">
+ <stop
+ style="stop-color:#63645e;stop-opacity:1;"
+ offset="0"
+ id="stop3332" />
+ <stop
+ style="stop-color:#d8d9d7;stop-opacity:1;"
+ offset="1"
+ id="stop3334" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient3174">
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0;"
+ offset="0"
+ id="stop3176" />
+ <stop
+ id="stop3182"
+ offset="0.02577317"
+ style="stop-color:#ffffff;stop-opacity:0;" />
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0.68103451;"
+ offset="0.10257728"
+ id="stop3184" />
+ <stop
+ id="stop3186"
+ offset="0.29355666"
+ style="stop-color:#ffffff;stop-opacity:0;" />
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0;"
+ offset="0.49417913"
+ id="stop3188" />
+ <stop
+ id="stop3190"
+ offset="0.76791382"
+ style="stop-color:#ffffff;stop-opacity:0.68103451;" />
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0.70689654;"
+ offset="0.83300149"
+ id="stop3194" />
+ <stop
+ style="stop-color:#ffffff;stop-opacity:0;"
+ offset="1"
+ id="stop3178" />
+ </linearGradient>
+ <inkscape:perspective
+ sodipodi:type="inkscape:persp3d"
+ inkscape:vp_x="0 : 24 : 1"
+ inkscape:vp_y="0 : 1000 : 0"
+ inkscape:vp_z="48 : 24 : 1"
+ inkscape:persp3d-origin="24 : 16 : 1"
+ id="perspective2391" />
+ <inkscape:perspective
+ id="perspective2511"
+ inkscape:persp3d-origin="372.04724 : 350.78739 : 1"
+ inkscape:vp_z="744.09448 : 526.18109 : 1"
+ inkscape:vp_y="0 : 1000 : 0"
+ inkscape:vp_x="0 : 526.18109 : 1"
+ sodipodi:type="inkscape:persp3d" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3174"
+ id="linearGradient3180"
+ x1="6.2500014"
+ y1="26.857143"
+ x2="39.892857"
+ y2="26.857143"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(1.230234,0,0,1.230234,1.714269,-5.1906289)" />
+ <radialGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3330"
+ id="radialGradient3338"
+ cx="24.08217"
+ cy="6.5837455"
+ fx="24.08217"
+ fy="6.5837455"
+ r="3.3319807"
+ gradientTransform="matrix(1,0,0,0.3178702,0,4.490969)"
+ gradientUnits="userSpaceOnUse" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3343"
+ id="linearGradient3349"
+ x1="20.156134"
+ y1="9.6145229"
+ x2="27.476151"
+ y2="9.6145229"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(1.230234,0,0,1.230234,1.714269,-5.1906289)" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3426"
+ id="linearGradient3432"
+ x1="20.619965"
+ y1="4.9160261"
+ x2="23.637569"
+ y2="12.999183"
+ gradientUnits="userSpaceOnUse" />
+ <radialGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient3361"
+ id="radialGradient3238"
+ cx="23.637569"
+ cy="8.9576044"
+ fx="23.637569"
+ fy="8.9576044"
+ r="14.501575"
+ gradientTransform="matrix(1.0932998,4.0390633e-7,-7.5505546e-8,0.3972952,-2.2053809,5.3987817)"
+ gradientUnits="userSpaceOnUse" />
+ <filter
+ inkscape:collect="always"
+ id="filter3534"
+ x="-0.11470588"
+ width="1.2294118"
+ y="-0.41785715"
+ height="1.8357143">
+ <feGaussianBlur
+ inkscape:collect="always"
+ stdDeviation="2.9546961"
+ id="feGaussianBlur3536" />
+ </filter>
+ <filter
+ inkscape:collect="always"
+ id="filter3222"
+ x="-0.18431562"
+ width="1.3686312"
+ y="-0.46863765"
+ height="1.9372753">
+ <feGaussianBlur
+ inkscape:collect="always"
+ stdDeviation="7.1308532"
+ id="feGaussianBlur3224" />
+ </filter>
+ </defs>
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#1c4e63"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="2.4748737"
+ inkscape:cx="39.98405"
+ inkscape:cy="11.121273"
+ inkscape:current-layer="layer6"
+ showgrid="true"
+ inkscape:grid-bbox="true"
+ inkscape:document-units="px"
+ inkscape:window-width="1920"
+ inkscape:window-height="1005"
+ inkscape:window-x="0"
+ inkscape:window-y="24"
+ borderlayer="true"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata2388">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:groupmode="layer"
+ id="layer6"
+ inkscape:label="shadow"
+ style="display:inline">
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:#000000;fill-opacity:0.18811882;stroke:none;stroke-width:0.10000000000000001;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;filter:url(#filter3534)"
+ id="path3370"
+ sodipodi:cx="24.445692"
+ sodipodi:cy="38.302536"
+ sodipodi:rx="30.910667"
+ sodipodi:ry="8.485281"
+ d="M 55.356359,38.302536 A 30.910667,8.485281 0 1 1 -6.4649754,38.302536 A 30.910667,8.485281 0 1 1 55.356359,38.302536 z"
+ transform="matrix(0.9080298,0,0,0.9080298,9.111437,7.1506078)" />
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer4"
+ inkscape:label="color">
+ <path
+ style="fill:#5884bf;fill-opacity:1;stroke:none;stroke-width:0.49592856;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 9.620413,8.6909368 L 51.629123,8.6909368 L 51.629123,42.088015 C 39.842397,47.989643 21.114404,47.898976 9.620413,42.088015 L 9.620413,8.6909368 z"
+ id="rect2384"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ style="fill:#e13636;fill-opacity:1;stroke:none;stroke-width:0.49592856;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 9.665384,11.975527 L 51.674093,11.975527 L 51.674094,23.873477 C 39.887368,29.775105 21.159375,29.684438 9.665384,23.873477 L 9.665384,11.975527 z"
+ id="path3200"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ style="fill:#f2db25;fill-opacity:1;stroke:none;stroke-width:0.49592856;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 9.292567,9.2415348 L 51.301278,9.2415348 L 51.301278,18.15695 C 39.514552,24.058578 20.786558,23.967911 9.292567,18.15695 L 9.292567,9.2415348 z"
+ id="path3198"
+ sodipodi:nodetypes="ccccc" />
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer3"
+ inkscape:label="shine">
+ <path
+ style="fill:url(#linearGradient3180);fill-opacity:1;stroke:#825540;stroke-width:0.6101082;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 9.708288,8.9552598 L 51.716999,8.9552598 L 51.716999,42.352338 C 39.930273,48.253966 21.202278,48.163299 9.708288,42.352338 L 9.708288,8.9552598 z"
+ id="path3171"
+ sodipodi:nodetypes="ccccc" />
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer5"
+ inkscape:label="text"
+ style="display:inline">
+ <path
+ style="font-size:40px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:#f9ee98;stroke-width:0.30597168;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;font-family:Bitstream Vera Serif;-inkscape-font-specification:Bitstream Vera Serif Bold"
+ d="m 22.61279,34.855594 0,-1.360485 2.147781,0 0,-14.065843 -2.147781,0 0,-1.360486 9.951762,0 c 2.046563,1.9e-5 3.583367,0.344825 4.610418,1.03442 1.027021,0.689628 1.540539,1.727793 1.540557,3.114498 -1.8e-5,0.996953 -0.356108,1.787757 -1.068268,2.372418 -0.704699,0.584681 -1.754223,0.951973 -3.148581,1.10188 1.686725,0.157419 2.979891,0.607167 3.8795,1.349241 0.899576,0.74209 1.349374,1.727785 1.349391,2.957088 -1.7e-5,1.664068 -0.629734,2.889627 -1.889147,3.676683 -1.251949,0.787058 -3.216062,1.180586 -5.892342,1.180586 l -9.33329,0 m 6.477079,-9.782 1.473085,0 c 1.289408,1.1e-5 2.245226,-0.23236 2.867455,-0.697108 0.622207,-0.472223 0.933318,-1.191818 0.933331,-2.158788 -1.3e-5,-0.974436 -0.303626,-1.682788 -0.91084,-2.125054 -0.59974,-0.442237 -1.563057,-0.663362 -2.889946,-0.663378 l -1.473085,0 0,5.644328 m 0,8.421515 1.608024,0 c 1.431843,0 2.492613,-0.284838 3.182315,-0.854519 0.689674,-0.569678 1.034518,-1.450432 1.034533,-2.642265 -1.5e-5,-1.19932 -0.348606,-2.095065 -1.045778,-2.68724 -0.697198,-0.592158 -1.754219,-0.888242 -3.17107,-0.88825 l -1.608024,0 0,7.072274"
+ id="text3202" />
+ <text
+ xml:space="preserve"
+ style="font-size:7.10357332px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:#f7f7f7;stroke-width:0.12302341;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:0.71428576;stroke-dasharray:none;font-family:Bitstream Vera Sans;-inkscape-font-specification:Bitstream Vera Sans"
+ x="23.349152"
+ y="44.400631"
+ id="text3320"
+ sodipodi:linespacing="125%"><tspan
+ sodipodi:role="line"
+ id="tspan3324"
+ x="23.349152"
+ y="44.400631"
+ style="stroke-width:0.12302341">+4v</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-size:1.16559994px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;font-family:Bitstream Vera Sans;-inkscape-font-specification:Bitstream Vera Sans Bold"
+ x="-42.020859"
+ y="47.298687"
+ id="text3466"
+ sodipodi:linespacing="125%"
+ transform="matrix(0,-1,1,0,0,0)"><tspan
+ sodipodi:role="line"
+ id="tspan2438"
+ x="-42.020859"
+ y="47.298687">This product adheres to all FDA </tspan><tspan
+ sodipodi:role="line"
+ id="tspan2440"
+ x="-42.020859"
+ y="48.755688"> regulations, and contains at least 50% </tspan><tspan
+ sodipodi:role="line"
+ id="tspan2442"
+ x="-42.020859"
+ y="50.212688"> bits by volume.</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-size:1.16559994px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;font-family:Bitstream Vera Sans;-inkscape-font-specification:Bitstream Vera Sans Bold"
+ x="-41.64159"
+ y="16.743071"
+ id="text3482"
+ sodipodi:linespacing="125%"
+ transform="matrix(0,-1,1,0,0,0)"><tspan
+ sodipodi:role="line"
+ id="tspan2452"
+ x="-41.64159"
+ y="16.743071">Not a floatation device.</tspan></text>
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer2"
+ inkscape:label="top"
+ style="display:inline">
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:#bf8158;fill-opacity:1;stroke:#825540;stroke-width:0.40979934;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="path2387"
+ sodipodi:cx="24.571428"
+ sodipodi:cy="11.071428"
+ sodipodi:rx="14.285714"
+ sodipodi:ry="3.2142856"
+ d="M 38.857142,11.071428 A 14.285714,3.2142856 0 1 1 10.285714,11.071428 A 14.285714,3.2142856 0 1 1 38.857142,11.071428 z"
+ transform="matrix(1.4697737,0,0,1.5840526,-5.3139214,-9.0200316)" />
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:#696a64;fill-opacity:1;stroke:none;stroke-width:0.30000001;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:0.5320197"
+ id="path3326"
+ sodipodi:cx="24.748737"
+ sodipodi:cy="11.129432"
+ sodipodi:rx="11.818785"
+ sodipodi:ry="2.0203052"
+ d="M 36.567522,11.129432 A 11.818785,2.0203052 0 1 1 12.929953,11.129432 A 11.818785,2.0203052 0 1 1 36.567522,11.129432 z"
+ transform="matrix(1.5509361,0,0,1.8542626,-7.5275619,-12.153717)" />
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:url(#radialGradient3238);fill-opacity:1;stroke:url(#linearGradient3432);stroke-width:0.58742505;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="path3392"
+ sodipodi:cx="23.637569"
+ sodipodi:cy="8.9576044"
+ sodipodi:rx="14.142136"
+ sodipodi:ry="3.6870568"
+ d="M 37.779705,8.9576044 A 14.142136,3.6870568 0 1 1 9.4954338,8.9576044 A 14.142136,3.6870568 0 1 1 37.779705,8.9576044 z"
+ transform="matrix(1.0111494,0,0,0.6940251,6.890058,2.097968)" />
+ <path
+ style="fill:url(#linearGradient3349);fill-opacity:1;stroke:none;stroke-width:0.30000001;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1"
+ d="M 25.947388,4.8972774 L 35.392092,4.8972774 L 35.516364,8.6254508 C 32.175769,9.9719728 29.082853,9.5847428 26.07166,8.6254508 L 25.947388,4.8972774 z"
+ id="rect3340"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ sodipodi:type="arc"
+ style="opacity:1;fill:url(#radialGradient3338);fill-opacity:1;stroke:none;stroke-width:0.08430404;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="path3328"
+ sodipodi:cx="23.789093"
+ sodipodi:cy="6.5837455"
+ sodipodi:rx="3.1819806"
+ sodipodi:ry="0.90913731"
+ d="M 26.971074,6.5837455 A 3.1819806,0.90913731 0 1 1 20.607112,6.5837455 A 3.1819806,0.90913731 0 1 1 26.971074,6.5837455 z"
+ transform="matrix(1.4840919,0,0,1.3699573,-4.6354593,-3.8531192)" />
+ </g>
+ <g
+ inkscape:groupmode="layer"
+ id="layer1"
+ style="display:inline">
+ <text
+ xml:space="preserve"
+ style="font-size:51.36699295000000376px;font-style:italic;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:0.31353134;stroke:none;display:inline;filter:url(#filter3222);font-family:Constantia;-inkscape-font-specification:Constantia Bold Italic;opacity:1"
+ x="55.297119"
+ y="43.612377"
+ id="text3514"
+ sodipodi:linespacing="125%"
+ transform="matrix(0.9689059,0,0,0.2882374,15.181422,33.203144)"><tspan
+ sodipodi:role="line"
+ id="tspan3516"
+ x="55.297119"
+ y="43.612377">BPS</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-size:49.76978302px;font-style:italic;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;font-family:Constantia;-inkscape-font-specification:Constantia Bold Italic"
+ x="64.488541"
+ y="41.454266"
+ id="text2740"
+ sodipodi:linespacing="125%"><tspan
+ sodipodi:role="line"
+ id="tspan2742"
+ x="64.488541"
+ y="41.454266">BPS</tspan></text>
+ </g>
+</svg>
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..782bb79
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+#
+# BPS documentation build configuration file, created by
+# sphinx-quickstart on Mon Mar 2 14:12:06 2009.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+from bps import *
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+##sys.path.append(filepath("_exts").abspath)
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.todo',
+ 'bps.unstable.bpsdoc.index_styles',
+ 'bps.unstable.bpsdoc.relbar_toc',
+ 'bps.unstable.bpsdoc.nested_sections',
+ ]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'contents'
+index_doc = 'index'
+
+# General information about the project.
+project = u'BPS'
+copyright = u'2004-2009, Assurance Technologies, LLC'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+
+# version: The short X.Y version.
+# release: The full version, including alpha/beta/rc tags.
+from bps import __version__ as release
+from bps.unstable import main_version
+version = main_version(release, str=True)
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+unused_docs = [ ]
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+##default_role = 'obj'
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+modindex_common_prefix = [ "bps." ]
+
+# -- Options for all output ---------------------------------------------------
+todo_include_todos = True
+keep_warnings = True
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+##html_style = 'bps.css'
+
+# The theme to use for HTML and HTML Help pages. Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'cloud'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme_options = { "roottarget": index_doc }
+
+# Add any paths that contain custom themes here, relative to this directory.
+from bps.unstable.bpsdoc import theme_path
+html_theme_path = [theme_path]
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+html_title = project + " v" + release + " Documentation"
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+html_short_title = project + " Documentation"
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+html_logo = filepath("_static", "masthead.png")
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+html_favicon = "logo.ico"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = project + 'Doc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ (index_doc, project + '.tex', project + u' Documentation',
+ u'Assurance Technologies, LLC', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/docs/contents.rst b/docs/contents.rst
new file mode 100644
index 0000000..1dcbd88
--- /dev/null
+++ b/docs/contents.rst
@@ -0,0 +1,35 @@
+=================
+Table Of Contents
+=================
+
+.. toctree::
+
+ Front Page <index>
+ install
+ overview
+
+ lib/bps
+ lib/bps.basic
+ lib/bps.cache
+ lib/bps.error.types
+ lib/bps.error.utils
+ lib/bps.fs
+ lib/bps.host
+ lib/bps.logs
+ lib/bps.meta
+ lib/bps.misc
+ lib/bps.numeric
+ lib/bps.rng
+ lib/bps.security
+ lib/bps.stream
+ lib/bps.text
+ lib/bps.types
+ lib/bps.refs
+ lib/bps.warndep
+
+ history
+ roadmap
+ copyright
+
+* :ref:`General Index <genindex>`
+* :ref:`Module List <modindex>`
diff --git a/docs/copyright.rst b/docs/copyright.rst
new file mode 100644
index 0000000..22d8f68
--- /dev/null
+++ b/docs/copyright.rst
@@ -0,0 +1,142 @@
+=====================
+Copyrights & Licenses
+=====================
+
+Copyright
+=========
+The BPS library is (c) 2004-2009 `Assurance Technologies, LLC <http://www.assurancetechnologies.com>`_,
+excepting any code noted below as taken from :ref:`third party sources <third-party-software>`.
+Such portions are copyright their respective owners.
+
+License
+=======
+This library is released under the BSD license; we hope you find it useful.
+
+::
+
+ The BPS Python Library
+
+ Copyright (c) 2004-2009 Assurance Technologies, LLC
+
+ Permission to use, copy, modify, and distribute this software for any
+ purpose with or without fee is hereby granted, provided that the above
+ copyright notice and this permission notice appear in all copies.
+
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.. _third-party-software:
+
+Third Party Software
+====================
+BPS contains some code taken from various third-party sources, which have their
+own licenses (all of which, it should be noted, are BSD-compatible).
+The following is a list of these sources, their owners, licenses, and the parts
+of BPS derived from them.
+
+GPW
+---
+The class :class:`bps.security.pwgen.GpwGenerator`
+is a python implementation of Tom Van Vleck's phonetic
+password algorithm `GPW <http://www.multicians.org/thvv/gpw.html>`_.
+It's released under informally worded BSD-like terms.
+
+jBcrypt
+-------
+`jBCrypt <http://www.mindrot.org/projects/jBCrypt/>`_ is a pure-java
+implementation of OpenBSD's BCrypt algorithm, written by Damien Miller,
+and released under a BSD license.
+
+:mod:`bps.security._bcrypt` is a python translation of this code,
+which is used as a fallback backend for :class:`bps.security.pwhash.BCrypt`
+when the external python library `py-bcrypt <http://www.mindrot.org/projects/py-bcrypt/>`_
+is not available.
+
+This is the license and copyright for jBCrypt::
+
+ Copyright (c) 2006 Damien Miller <djm@mindrot.org>
+
+ Permission to use, copy, modify, and distribute this software for any
+ purpose with or without fee is hereby granted, provided that the above
+ copyright notice and this permission notice appear in all copies.
+
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+MD5-Crypt
+---------
+The class :class:`bps.security.pwgen.Md5Crypt` is a pure-python
+implementation of the md5-crypt password hashing algorithm.
+It's derived from the FreeBSD md5-crypt implementation `<http://www.freebsd.org/cgi/cvsweb.cgi/~checkout~/src/lib/libcrypt/crypt.c?rev=1.2>`_,
+which was released under the following license::
+
+ "THE BEER-WARE LICENSE" (Revision 42):
+ <phk@login.dknet.dk> wrote this file. As long as you retain this notice you
+ can do whatever you want with this stuff. If we meet some day, and you think
+ this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp
+
+PEP 3101
+--------
+:pep:`3101` defines a new string templating system
+via the method ``string.format()``, which is built-in
+to Python 2.6 and higher. :mod:`bps.text._string_format` is a pure-python
+implementation of PEP 3101, used by BPS to backport this feature
+to Python 2.5 (see :mod:`bps.text` for usage).
+
+While the current implementation has been rewritten drastically
+(to pass the python 2.6 format() unittests), it was originally
+based on the one created by Patrick Maupin and Eric V. Smith, as found in
+the PEP 3101 sandbox at `<http://svn.python.org/view/sandbox/trunk/pep3101/>`_.
+While no license was attached, it is assumed to have been released
+under an equivalent license to the `Python source code`_.
+
+Python Source Code
+------------------
+BPS contains many small fragments taken from the Python 2.6.2 source code,
+mainly for the purpose of backporting 2.6 features to python 2.5:
+
+ * :mod:`bps.text._string_format`, contains a modified copy of
+ Python 2.6's :class:`string.Formatter`, as part of BPS's
+ Python 2.6-compatible PEP3101 implementation for Python 2.5.
+
+ * :class:`bps.types.namedtuple` is a adaptation of
+ the Python 2.6 namedtuple class, for use with Python 2.5.
+
+The Python 2.6.2 source code is licensed under the
+`Python Software Foundation License, Version 2 <http://www.python.org/download/releases/2.6.2/license/>`_.
+
+UnixCrypt.java
+--------------
+`UnixCrypt.java <http://www.dynamic.net.au/christos/crypt/UnixCrypt2.txt>`_
+is a pure-java implementation of the historic unix-crypt password hash algorithm.
+Originally written by Aki Yoshida, and modified by others,
+it was released under a BSD-like license.
+
+:mod:`bps.security._unix_crypt` is a python translation of this code,
+which is used as a fallback backend for :class:`bps.security.pwhash.UnixCrypt`
+for platforms where stdlib's :mod:`crypt` is not available.
+
+This is the license and copyright for UnixCrypt.java::
+
+ UnixCrypt.java 0.9 96/11/25
+ Copyright (c) 1996 Aki Yoshida. All rights reserved.
+ Permission to use, copy, modify and distribute this software
+ for non-commercial or commercial purposes and without fee is
+ hereby granted provided that this copyright notice appears in
+ all copies.
+
+ modified April 2001
+ by Iris Van den Broeke, Daniel Deville
+
+ modified Aug 2005
+ by Greg Wilkins (gregw)
diff --git a/docs/history.rst b/docs/history.rst
new file mode 100644
index 0000000..65b6b52
--- /dev/null
+++ b/docs/history.rst
@@ -0,0 +1,70 @@
+===============
+Release History
+===============
+
+The following is a brief account of the BPS release history,
+noting all the major releases:
+
+BPS 4.6 -- 2009-12-1
+ * added "bps.security.policy" - a lightweight access control framework
+ * refactored parts of warndep module, added unittest coverage
+ * various minor bugfixes
+ * cleaned up bps' sphinx extensions, theme, and build script
+
+BPS 4.5 -- 2009-11-6
+ Some deprecated filepath methods removed (iterdirlist, rtype, etc).
+ All code relating to "Undef" singleton now available under "bps.undef".
+ Minor bugfixes.
+
+BPS 4.4 -- 2009-9-3
+ First release with BPS 3 legacy package completely removed.
+ Minor improvements and bugfixes.
+
+BPS 4.3 -- 2009-8-4
+ Cleaned up bps.fs package, added unittests for most of bps.fs.
+
+BPS 4.2 -- 2009-7-24
+ BPS3's logging package rewritten from ground up for 4.0.
+ Very large number of bugfixes and unittests added.
+
+BPS 4.1 -- 2009-7-11
+ Hash algorithm code cleaned up.
+ Numerous bugfixes and math library improvements.
+
+BPS 4.0 -- 2009-6-5
+ Reorganized and documented whole library. Backward compatibility with 3.x
+ mostly abandoned. Removed many functions that proved over the years
+ to be too application-specific to remain here.
+
+BPS 3.9 -- 2009-6-5
+ Maintenance release of the 3.x series.
+
+BPS 3.7 -- 2009-3-23
+ Rewrote host resources module, gained command line framework
+
+BPS 3.6 -- 2009-2-24
+ Numerous deprecations and cleanups
+
+BPS 3.5 -- 2008-10-12
+ Shadow passwords and host resources modules added
+
+BPS 3.4 -- 2008-5-19
+ Many bugfixes; Rewrote logging tools completely
+
+BPS 3.2 -- 2008-2-4
+ Logging system enhancements, all kinds of decorators were added.
+ Documentation project began.
+
+BPS 3.0 -- 2006-3-9
+ This was the second major reorganization, many features such as the magic filepath
+ were introduced, though backward compatibility was attempted.
+ BPS gained main utilities and decorators harvested from in-house projects.
+
+BPS 2.2 -- 2005-8-15
+ Last edition of the 2.x branch.
+
+BPS 2.0 -- 2004-9-9
+ First cleanup, began service as Assurance Technologies' in-house collection of tools.
+
+BPS 1.0 -- 2003
+ Began life as a poor little file that was copied between friends and projects.
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..1ad69e5
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,52 @@
+==========================================
+BPS |release| documentation
+==========================================
+
+Introduction
+============
+Welcome to the documentation for BPS.
+
+BPS is a "swiss army knife"-style library,
+composed of a number of smaller interdependant modules.
+Collectively, these modules form a package which provides
+a wide array of useful tools and classes for writing
+programs under Python. There are good odds that BPS contains
+something useful for any python program, large or small.
+
+A quick sample of some of the more frequently used modules:
+
+ * :doc:`bps.fs <lib/bps.fs/filepath>` -- object-oriented filesystem access
+ * :mod:`bps.host` -- desktop and host resources
+ * :mod:`bps.logs` -- enhancements to Python's logging system
+ * :mod:`bps.text` -- text parsing and formatting
+ * :mod:`bps.security.pwhash` -- password hashing algorithms
+
+ ... see the :doc:`library overview <overview>` for a complete list.
+
+Quick Links
+===========
+
+.. raw:: html
+
+ <table class="contentstable" align="center">
+ <tr>
+ <td width="50%" valign="top">
+ <p class="biglink"><a class="biglink" href="contents.html">Table of Contents</a><br>
+ <span class="linkdescr">lists all sections and subsections</span></p>
+
+ <p class="biglink"><a class="biglink" href="overview.html">Library Overview</a><br>
+ <span class="linkdescr">describes how BPS is laid out</span></p>
+
+ </td><td width="50%">
+
+ <p class="biglink"><a class="biglink" href="genindex.html">General Index</a><br>
+ <span class="linkdescr">all functions, classes, terms</span></p>
+
+ <p class="biglink"><a class="biglink" href="modindex.html">Module List</a><br>
+ <span class="linkdescr">quick access to all modules</span></p>
+
+ <p class="biglink"><a class="biglink" href="search.html">Search Page</a><br>
+ <span class="linkdescr">search this documentation</span></p>
+
+ </td></tr>
+ </table>
diff --git a/docs/install.rst b/docs/install.rst
new file mode 100644
index 0000000..2303bdd
--- /dev/null
+++ b/docs/install.rst
@@ -0,0 +1,53 @@
+============
+Installation
+============
+
+Requirements
+============
+BPS tries to use pure-python implementations of things whereever possible,
+and have as few dependancies as possible. The current set of requirements is:
+
+ * Python 2.5 or better is required (Python 2.6 is supported).
+
+ * BPS is no longer tested for Python 2.4.x or earlier,
+ no guarantees are made about whether BPS will work with them.
+
+ * Python 3.0 has **not** been assessed for compatibility. It probably won't work.
+
+ * The `pywin32 <http://sourceforge.net/projects/pywin32/>`_ package is required
+ when running under windows.
+
+The following libraries will be used if present, but they are not required:
+
+ * If installed, `py-bcrypt <http://www.mindrot.org/projects/py-bcrypt/>`_ will be
+ used instead of BPS's slower pure-python bcrypt implementation.
+ (see :class:`bps.security.pwhash.BCrypt`).
+
+Installing
+==========
+BPS can be installed with easy_install, linked/copied into sys.path directly
+from it's source directory, or installed using "setup.py".
+BPS is pure python, there is nothing to compile or configure.
+
+Testing
+=======
+BPS contains a number of unittests (sadly, coverage is not yet complete).
+all of which are contained within the :mod:`bps.tests` module,
+and are designed to be run use the `nose <http://somethingaboutorange.com/mrl/projects/nose>`_ library.
+Once BPS and nose have been installed, you may run the following commands::
+
+ #to run the full bps test suite
+ nosetests bps.tests
+
+ #the full suite with some extra longer-running tests
+ export BPS_DEV_TESTS=true
+ nosetests bps.tests
+
+Documentation
+=============
+BPS uses Sphinx to generate it's documentation.
+To create your own copy, make sure you have Sphinx 0.6.3 or better installed,
+as well as BPS, and run ``python $SOURCE/docs/make.py clean html``,
+where ``$SOURCE`` is the path to the BPS source directory.
+Once this completes, point a browser to the file at ``$SOURCE/docs/_build/html/index.html``
+to access the BPS documentation.
diff --git a/docs/lib/bps.basic.rst b/docs/lib/bps.basic.rst
new file mode 100644
index 0000000..1698f9d
--- /dev/null
+++ b/docs/lib/bps.basic.rst
@@ -0,0 +1,35 @@
+========================================================
+:mod:`bps.basic` -- Manipulation of basic Python objects
+========================================================
+
+.. module:: bps.basic
+ :synopsis: tools for manipulating basic python datatypes
+
+This module contains utilities for manipulating the basic python
+datatypes, like :class:`dict` or :class:`list`. It also
+contains functions such as would be found in :mod:`functools`
+and :mod:`itertools`, under the rationale that functions
+and generators can also be considered basic python objects.
+
+Dictionary Helpers
+==================
+.. autofunction:: invert_dict
+.. autofunction:: zip_dict
+.. autofunction:: unzip_dict
+.. autofunction:: pop_from_dict
+.. autofunction:: update_dict_defaults
+.. autofunction:: prefix_from_dict
+
+Iterator and Functional Helpers
+===============================
+.. autofunction:: iter_unique
+.. autofunction:: unique
+
+Set and Sequence Helpers
+========================
+.. autofunction:: intersects
+.. autofunction:: sameset
+
+..
+ not documented:
+ .. autofunction:: revpartial
diff --git a/docs/lib/bps.cache.rst b/docs/lib/bps.cache.rst
new file mode 100644
index 0000000..aba0e80
--- /dev/null
+++ b/docs/lib/bps.cache.rst
@@ -0,0 +1,26 @@
+=================================
+:mod:`bps.cache` -- Caching Tools
+=================================
+
+.. module:: bps.cache
+ :synopsis: caching tools
+
+This module defines a number of function decorators,
+most of which come in function- and method- specific
+variants, and aid in caching.
+
+Caching Decorators
+==================
+These decorators allow for quick "memoization" of a function.
+
+.. autofunction:: cached_function
+.. autofunction:: cached_method
+
+Stateful Decorators
+===================
+These decorators allow for quick and easy setup of callbacks,
+allowing the decorated method to alert listeners that a value has changed.
+
+.. autofunction:: stateful_function
+.. autofunction:: stateful_method
+.. autofunction:: is_stateful
diff --git a/docs/lib/bps.error.types.rst b/docs/lib/bps.error.types.rst
new file mode 100644
index 0000000..7cf9baa
--- /dev/null
+++ b/docs/lib/bps.error.types.rst
@@ -0,0 +1,73 @@
+===========================================
+:mod:`bps.error.types` -- BPS Error Classes
+===========================================
+
+.. module:: bps.error.types
+ :synopsis: All the BPS Error Classes
+
+This modules contains all the exceptions classes which BPS
+defines, stored in one location for easy access.
+Some of these are errors raised by various parts of BPS,
+while others are helpers designed to be used inside your own code.
+Many of them exist mainly to act as pretty-printed helpers
+for specific cases of more generic Python exceptions.
+
+Attribute Errors
+=========================
+Helpers for creating explicit :exc:`AttributeError` messages.
+
+.. autoexception:: MissingAttributeError
+.. autoexception:: ReadonlyAttributeError
+.. autoexception:: PermanentAttributeError
+.. autoexception:: UnsetAttributeError
+
+Function Errors
+===============
+These errors are useful when implementing complicated
+python functions.
+
+.. autoexception:: ParamError
+.. autoexception:: NormError
+.. autoexception:: RangeError
+
+.. note::
+ BPS 3.x used to define an :exc:`InvariantError` which could be raised
+ when an internal invariant was violated in an application.
+ However, the common Python practice seems to be
+ to raise :exc:`AssertionError`.
+ Unlike ``assert`` statements, raising this error
+ directly will not be disabling when in optimized mode.
+ This, ``InvariantError`` was removed, in favor of :exc:`AssertionError`.
+
+Reference Errors
+================
+These errors will be raised by the :mod:`bps.refs` module.
+
+.. autoexception:: ProxyEmptyError
+.. autoexception:: ProxyNestError
+
+Meta Errors
+===========
+
+.. autoexception:: AbstractMethodError
+
+..
+ Command Line Errors:
+
+ These errors are useful when implemented code that's
+ acting as a command line frontend. They are designed
+ to integrate well with the :mod:`bps.app.command` framework,
+ see it for more details.
+
+ .. autoexception:: CommandError
+ .. autoexception:: ParseError
+ .. autoexception:: InputError
+
+
+ Command Class Errors:
+
+ These errors are useful mainly for :mod:`bps.app.command`,
+ and will not be needed otherwise.
+
+ .. autoexception:: DistTypeError
+ .. autoexception:: EnvTypeError
diff --git a/docs/lib/bps.error.utils.rst b/docs/lib/bps.error.utils.rst
new file mode 100644
index 0000000..b733a06
--- /dev/null
+++ b/docs/lib/bps.error.utils.rst
@@ -0,0 +1,17 @@
+=============================================
+:mod:`bps.error.utils` -- BPS Error Utilities
+=============================================
+
+.. module:: bps.error.utils
+ :synopsis: Utilties for dealing with errors
+
+This module contains a few utilties used by BPS
+for handling errors. :func:`format_exception` in particular
+is used by the :class:`bps.logs.formatters.FancyFormatter` to print tracebacks.
+
+.. autofunction:: format_exception
+.. autofunction:: get_sysexit_rc
+
+.. seealso::
+ :func:`bps.develop.trap`
+
diff --git a/docs/lib/bps.fs.rst b/docs/lib/bps.fs.rst
new file mode 100644
index 0000000..8b5bb55
--- /dev/null
+++ b/docs/lib/bps.fs.rst
@@ -0,0 +1,20 @@
+========================================
+:mod:`bps.fs` -- Filesystem Interaction
+========================================
+
+.. module:: bps.fs
+ :synopsis: filesystem interaction
+
+This module provides a clean object-oriented interface
+to the host filesystem, in the form of the :class:`FilePath` object,
+as well as a number of additional utilities for accessing the filesystem
+and managing permissions:
+
+* :doc:`The filepath object <bps.fs/filepath>`
+* :doc:`Other filesystem utilities <bps.fs/utils>`
+
+.. toctree::
+ :hidden:
+
+ bps.fs/filepath
+ bps.fs/utils
diff --git a/docs/lib/bps.fs/filepath.rst b/docs/lib/bps.fs/filepath.rst
new file mode 100644
index 0000000..2ce129f
--- /dev/null
+++ b/docs/lib/bps.fs/filepath.rst
@@ -0,0 +1,91 @@
+====================================
+:mod:`bps.fs` -- The filepath object
+====================================
+
+.. module:: bps.fs
+ :synopsis: filesystem interaction
+
+Overview
+========
+
+This module provides a clean object-oriented interface
+to the host filesystem, in the form of the :class:`FilePath` object.
+Objects of this class act just like strings (they are in fact a subclass),
+but they contain additional attributes and methods for manipulating
+them as paths and interacting with the local filesystem.
+The methods and attributes wrap functionality available in the :mod:`os`,
+:mod:`os.path`, and :mod:`shutils` modules, and while the full contents
+of those modules is not directly available, the common ones are,
+and more are added frequently.
+
+Usage
+=====
+
+Usage is very simple, just call the :func:`filepath` function
+with a string, and a new :class:`FilePath` object
+will be returned which will act exactly like the original
+string, but with additional methods for manipulating the filesystem.
+
+Some examples using the filepath object::
+
+ >>> #this imports the default bps3 objects,
+ >>> #you can alternately use "from bps.fs import filepath"
+ >>> from bps import *
+ >>> #this example code assumes the current directory is the bps3 source dir
+ >>> path = filepath(".")
+ >>> path #it looks like a string
+ '.'
+ >>> type(path) #but it's not
+ <class 'bps.fs.FilePath'>
+ >>> #get the absolute path (your exact path will vary)
+ >>> path.abspath
+ '/home/elic/dev/libs/bps'
+ >>> #get a directory listing
+ >>> path.listdir()
+ [ '.svn', 'bps', 'docs', 'tests', 'setup.cfg', 'setup.py', 'bps.e4p' ]
+ >>> #join paths together, equivalent of os.path.join...
+ >>> #note that this will always use the host-specific path separator
+ >>> docs = path / "docs" / "_static"
+ >>> docs
+ './docs/_static'
+ >>> #note that under windows, this would appear as '.\\docs\\_static'
+ >>> #get the absolute version of the path (your result will vary)
+ >>> docs.abspath
+ '/home/elic/dev/libs/bps/docs/_static'
+ >>> #check the filetype of a path
+ >>> docs.ftype
+ 'dir'
+ >>> #touch a path (updating it's mtime & atime)
+ >>> docs.touch()
+
+
+Creating Filepaths
+==================
+.. autofunction:: filepath
+
+Using Filepaths
+===============
+.. autoclass:: FilePath
+
+ .. warning::
+
+ Relative paths will always be relative to the current working directory
+ *at the time of the method call*, so changing the cwd will usually
+ result in a different outcome when the instance in question
+ references a relative path.
+
+.. note::
+
+ :class:`FilePath` will probably *never* be extended to include urls
+ and other similar resources: that was tried in an earlier iteration
+ of this library, and it was determined there was so little
+ commonality between the two (filepaths and urls), both in terms
+ of interface, code, and use cases, that tieing them together
+ would confusing and without much benefit. A similar-but-separate
+ UrlPath may be added in the future, however.
+
+.. todo::
+
+ :class:`FilePath` needs to support unicode.
+ Currently waiting for the Python 3.x design team to provide
+ some guidance-by-example on how to handle differing OS encoding policies.
diff --git a/docs/lib/bps.fs/utils.rst b/docs/lib/bps.fs/utils.rst
new file mode 100644
index 0000000..a58c4f3
--- /dev/null
+++ b/docs/lib/bps.fs/utils.rst
@@ -0,0 +1,47 @@
+===========================================
+:mod:`bps.fs` -- Other Filesystem Utilities
+===========================================
+
+.. currentmodule:: bps.fs
+
+In addition to :func:`filepath`, this module also provides
+some additional utility functions for manipulating the filesystem.
+
+Permissions
+===========
+The following functions deal with file access permissions
+(mainly unix-centric, though they are functional under windows).
+While they are ostensibly wrappers for similar functions
+available under :mod:`os`, these versions provide some enhanced capabilities:
+
+.. autofunction:: chmod
+.. autofunction:: setumask
+.. autofunction:: getumask
+
+Mode Parsing
+------------
+To help manipulate symbolic mode strings,
+the following helper functions are available:
+
+.. autofunction:: parse_mode_mask
+.. autofunction:: repr_mode_mask
+
+Other Functions
+===============
+This module provides some additional functions for interacting with the filesystem:
+
+.. autofunction:: is_filepath
+.. autofunction:: posix_to_local
+.. autofunction:: local_to_posix
+.. autofunction:: splitsep
+.. autofunction:: is_shortcut
+.. autofunction:: read_shortcut
+
+.. data:: os_has_symlinks
+
+ This is a module-level constant set to ``True`` if the os supports symlinks.
+
+.. data:: os_has_shortcuts
+
+ This is a module-level constant set to ``True`` if the os supports windows shortcuts (aka LNK files).
+ This will only be true under windows, though :func:`read_shortcut` will work cross-platform.
diff --git a/docs/lib/bps.host.posix.rst b/docs/lib/bps.host.posix.rst
new file mode 100644
index 0000000..7f616fb
--- /dev/null
+++ b/docs/lib/bps.host.posix.rst
@@ -0,0 +1,23 @@
+=================================================
+:mod:`bps.host.posix` -- Posix-specific Utilties
+=================================================
+
+.. module:: bps.host.posix
+ :platform: posix
+ :synopsis: posix-specific utilities
+
+This contains a number of posix-specific helper functions.
+They are either very posix-specific, or simply haven't
+been rolled into a common function in :mod:`bps.host`
+along with compatriots from other OS modules...
+
+.. autofunction:: resolve_uid
+
+.. autofunction:: resolve_gid
+
+.. autofunction:: resolve_user
+
+.. autofunction:: resolve_group
+
+.. autofunction:: chown
+
diff --git a/docs/lib/bps.host.rst b/docs/lib/bps.host.rst
new file mode 100644
index 0000000..63cea1f
--- /dev/null
+++ b/docs/lib/bps.host.rst
@@ -0,0 +1,128 @@
+==========================================
+:mod:`bps.host` -- Locating Host Resources
+==========================================
+
+.. module:: bps.host
+ :synopsis: host resource discovery & desktop interaction
+
+
+This package provides methods for accessing various host resources,
+much like stdlib's ``os`` package. In fact, this package
+mainly exists to provide routines which ``os`` does not provide,
+for one reason or another.
+
+The this module is broken into the following sections:
+
+* `Process Management`_ -- OS-agnostic signaling & pid management
+* `System Interaction`_ -- finding installed applications
+* `Desktop Interaction`_ -- opening, printing, executing files via desktop environment
+* `Resource Paths`_ -- helpers for locating home dir, desktop, user config directory, and more.
+* `User Accounts`_ -- retrieve basic information about the user accounts on the host system.
+
+.. toctree::
+ :maxdepth: 2
+
+ bps.host.posix
+ bps.host.windows
+ bps.host.utils
+
+.. note::
+
+ The main two reasons many of these functions probably are not included in the stdlib
+ is that this module relies on `pywin32 <http://sourceforge.net/projects/pywin32/>`_ under Windows,
+ and the fact that this module makes some arbitrary decisions
+ about path locations which work 90% of cases, but not the 100% that the stdlib requires.
+
+Usage
+=====
+The typical use of this module's core functions is to import ``bps.host`` into
+your package, and then access it's various methods from the imported object::
+
+ >>> #note that while this example was written under linux, the host module interface
+ >>> #is designed to be uniform, so that you can use the *exact same calls*
+ >>> #to acheive the same effect under windows, without changing your code.
+ >>> from bps import host
+ >>>
+ >>> #check what desktop environment you're running under
+ >>> host.get_desktop_name()
+ 'gnome'
+ >>>
+ >>> #find location of an executable
+ >>> host.find_exe("meld")
+ '/usr/bin/meld'
+ >>>
+ >>> #tell desktop to open a file
+ >>> host.desktop_open("myfile.txt")
+ >>>
+ >>> #get current pid
+ >>> host.get_pid()
+ 12984
+ >>>
+ >>> #check if a pid is running
+ >>> host.has_pid(12984)
+ True
+ >>>
+ >>> #kill a pid
+ >>> host.term_pid(12984)
+ >>>
+
+Process Management
+==================
+
+.. function:: get_pid
+
+ Returns current PID.
+ Alias for ``os.getpid()``, included just for symmetry with the other pid functions.
+
+.. autofunction:: has_pid
+.. autofunction:: term_pid
+.. autofunction:: kill_pid
+
+System Interaction
+==================
+.. autofunction:: find_exe
+
+.. attribute:: exe_exts
+
+ This should be a tuple of all the extensions that will be searched
+ when trying to find an exe. For example, under posix, the list will be ``('',)``,
+ but under windows the tuple will contain ``('.exe','.bat')``.
+
+.. todo::
+ Would like to add database for detecting & locating applications via windows registry,
+ or other methods.
+
+Desktop Interaction
+===================
+
+.. autofunction:: get_desktop_name
+.. autofunction:: desktop_open
+.. autofunction:: desktop_compose_email
+
+Resource Paths
+==============
+All the resource path functions are designed to quickly
+locate the directories that are important to a cross-platform
+desktop application, without having to know os-specific details...
+
+.. autofunction:: get_env_path
+
+.. autoclass:: EnvPaths
+
+----
+
+The following functions return a :class:`ProgPaths` instance,
+with various resource paths chosen according to the default conventions
+of the OS you are currently running on, allowing quick and easy
+creation of applications which store their config in the right place
+no matter what OS you run them on...
+
+.. autofunction:: get_app_path
+.. autofunction:: get_service_path
+.. autoclass:: ProgPaths
+
+User Accounts
+=============
+.. autofunction:: find_user
+
+.. autoclass:: UserProfile
diff --git a/docs/lib/bps.host.utils.rst b/docs/lib/bps.host.utils.rst
new file mode 100644
index 0000000..b3c40b6
--- /dev/null
+++ b/docs/lib/bps.host.utils.rst
@@ -0,0 +1,20 @@
+===========================================
+:mod:`bps.host.utils` -- General Utilties
+===========================================
+
+.. module:: bps.host.utils
+
+Signals
+=======
+The signal functions provide a enhanced interface
+to stdlib's :mod:`signal` module. Much like :mod:`atexit`
+enhances the ``sys.exitfunc``, these utilties
+allow multiple handlers to be chained to a given unix signal.
+
+.. autofunction:: has_signal
+
+.. autofunction:: add_signal_handler
+
+.. autofunction:: remove_signal_handler
+
+.. autofunction:: adapt_sig_term
diff --git a/docs/lib/bps.host.windows.rst b/docs/lib/bps.host.windows.rst
new file mode 100644
index 0000000..120d3a8
--- /dev/null
+++ b/docs/lib/bps.host.windows.rst
@@ -0,0 +1,23 @@
+======================================================
+:mod:`bps.host.windows` -- Windows-specific Utilities
+======================================================
+
+.. module:: bps.host.windows
+ :platform: nt
+ :synopsis: windows-specific utilities
+
+This contains a number of windows-specific helper functions.
+They are either very windows-specific, or simply haven't
+been rolled into a common function in :mod:`bps.host`
+along with compatriots from other OS modules...
+
+.. autofunction:: regpath
+
+.. autoclass:: RegistryPath
+
+.. autofunction:: reghandle
+
+.. autoclass:: RegistryHandle
+
+.. autofunction:: detect_office_app
+
diff --git a/docs/lib/bps.logs-config.rst b/docs/lib/bps.logs-config.rst
new file mode 100644
index 0000000..521f027
--- /dev/null
+++ b/docs/lib/bps.logs-config.rst
@@ -0,0 +1,73 @@
+Overview
+========
+Python's logging system offers two levels
+of interaction when you want to configure the
+loggers: you can either interact with
+the low-level logger, handler, and formatter objects;
+or you can hand it a filepath to a separate file
+containing a monolithic configuration of the entire
+system.
+
+The :mod:`bps.logs` package attempts to fill in the
+programmatic "middle ground" between these two
+styles, through it's :func:`parse_config`
+and :func:`config_logging` functions.
+Take a large number of input styles,
+included external files or strings
+containing a full configuration file,
+or fragments, all of which are normalized
+into a standard dictionary-based data structure.
+This may then be manipuled programmatically, re-normalized,
+or passed on to the the configuration function,
+allowing for complex configuration needs
+to be accomplished with a few short commands.
+
+Normalized Configuration Structure
+==================================
+The data structure which BPS uses
+to represent a set of changes to be applied
+to the logging system's configuration is a dictionary
+which contains certain predefined keys (none are required unless otherwise noted).
+The value attached to each key has a "normalized" format,
+which will be the format it is in as returned by :func:`parse_config`,
+but there are also other "input" formats, which will be accepted
+by :func`parse_config` and returned normalized.
+The following keys are recognized:
+
+ ``"levels"``
+ If present, this should be a dictionary whose keys
+ are the names of logger objects, and the corresponding
+ values the level that logger should be set to.
+
+ formatters
+ [Optional]
+ This should be a dictionary mapping formatter names to dicts of formatter options,
+ to be passed to compile_formatter(). The names may be referred to by the handlers.
+ handlers
+ [Optional]
+ This should be a dictionary mapping handlers names to dicts of handlers options,
+ to be passed to compile_handler(). The names may be referred to be the output section.
+ outputs
+ [Optional]
+ This should be a dictionary mapping loggers to lists of handler names,
+ as specified in the handler section, or in the default handler presets.
+
+The following keywords are accepted by :func:`parse_config`,
+and will be merged into one of the above keys during normalization:
+
+ ``"level"``
+ This keyword specifies the logging level used by the root logger.
+ This is a shortcut allowing the master level to be set quickly,
+ without needing to create a dictionary.
+
+ It will be used as the default value for the "<root>" key
+ inside the "levels" dictionary (above).
+
+ ``"default_handler"``
+ This is a shortcut which allows you to specify just a keywords
+ for creating a handler, but which will result in all the commands
+ needed to create the handler and attach it as the sole output
+ for the root logger. For example, setting ``default_handler=opts``
+ will result in the following normalized options:
+ ``output="<root>=default only", handlers=dict(default=opts)``.
+
diff --git a/docs/lib/bps.logs-config_format.rst b/docs/lib/bps.logs-config_format.rst
new file mode 100755
index 0000000..5744702
--- /dev/null
+++ b/docs/lib/bps.logs-config_format.rst
@@ -0,0 +1,283 @@
+=======================
+BPS Logging File Format
+=======================
+
+The BPS Logging Format is an alternate ini-based file format
+for configuring python's builtin logging system. Both this format
+and the stdlib format are accepted (and auto-detected) by :func:`bps3.logs.config_logging`.
+
+.. warning::
+
+ This documentation currently assumes you are familiar with
+ the python logging package, it's standard format,
+ and it's object system. There may eventually be a rewrite to
+ correct this.
+
+Why another format?
+===================
+Python's builtin logging system specifies a format for configuring the logging
+system [#stdfmt]_. While this format offers the ability to configure every
+aspect of the logging system, the manner in which it does this is somewhat
+verbose, makes some simple tasks much more time consuming than they need to be,
+and deciphering an existing config file is not the trivial task it should be.
+
+A prime example of this issue is configuring the logging levels of a number
+of loggers at once. Under the stdlib logging format, you would need to do
+the following:
+
+.. code-block:: cfg
+
+ [loggers]
+ keys=root,app,app.model,mylib
+
+ [logger_root]
+ level = WARNING
+
+ [logger_app]
+ level = DEBUG
+
+ [logger_app.model]
+ level = INFO
+
+ [logger_mylib]
+ level = DEBUG
+
+For doing development work, where various loggers may need to be added and
+removed frequently, this format becomes incredibly cumbersome. This
+was the main motivation for creating a new format. Under the BPS Logging Format,
+the equivalent commands to acheive the above would be:
+
+.. code-block:: cfg
+
+ [logging:levels]
+ <root> = WARNING
+ app = DEBUG
+ app.model = INFO
+ mylib = DEBUG
+
+While a couple of rare features of the stdlib format have not been replicated
+in the new format, work is ongoing, and the majority of the features have been
+converted over into what is hoped to be a more consise, understandable, and
+easily editable format.
+
+Format Overview
+===============
+The BPS Logging Format is based around the ConfigParser's file format.
+It defines the following section names, all of which begin with the prefix
+``logging:``, and sections lacking this prefix will be ignored.
+None of the following sections are required, except where interdepedant
+references exist. The sections are as follows:
+
+ `logging:levels`_
+ This section lets you configure the logging levels for any logger
+ in the logging system.
+
+ `logging:options`_
+ This section lets you set various global logging system options,
+ including some custom extensions provided by BPS.
+
+ `logging:output`_
+ This section maps loggers to handlers,
+ allowing you to control where the output of the logging system
+ is going.
+
+ `logging:handler:$NAME`_
+ Sections of this type (eg `logging:handler:myhandler`) define
+ the configuration to be used when a handler name is referenced
+ in the `logging:output`_ section.
+
+ `logging:formatter:$NAME`_
+ Sections of this type (eg `logging:formatter:myformatter`) define
+ the configuration to be used when a formatter name is referenced
+ in a `logging:handler:* <logging:handler:$NAME>`_ section.
+
+logging:levels
+--------------
+This section lets you configure the logging levels for any logger
+in the logging system.
+
+The keys in this section correspond to logger names,
+and the values to a predefined logging level. This logging level can
+be a predefined name (eg ``NOTSET``, ``DEBUG``, etc), or an integer value ( ``0``, ``10``, etc).
+Spaces in the logging level will be ignored, as will any text following a ``#`` symbol,
+allowing in-line comments.
+
+The logger name of ``<root>`` is interpreted as a convenient alias for the empty string,
+which corresponds to the root logger of python's logging system. All other logger names
+which start with ``<``, contain a series of letters, and end with ``>``,
+are considered reserved by this format, for use in an grouping/alias system which is still under development.
+
+A very verbose example of the ``logging:levels`` section, showing off the various options:
+
+.. code-block:: cfg
+
+ [logging:levels]
+
+ #this is an example of a full-line comment
+
+ #this will set the root logger level
+ <root> = WARNING
+
+ app = DEBUG #this is an example of a in-line comment
+
+ #note that "#WARNING" below will be ignored
+ app.model = INFO #WARNING
+
+ #this uses an integer level
+ mylib = 10
+
+A more compact example, without all the comments:
+
+.. code-block:: cfg
+
+ [logging:levels]
+ <root> = WARNING
+ app = DEBUG
+ app.model = INFO
+ mylib = 10
+
+.. note::
+ If a undefined textual logging level is specified,
+ a :exc:`KeyError` will be raised at the time this file is loaded.
+
+logging:options
+---------------
+
+This section controls for the python logging system.
+The following keys are currently recognized (unrecognized
+keys will be ignored):
+
+ ``capture_stdout``
+ This is a boolean keyword. If set to ``true``,
+ standard output will be captured, and re-routed to
+ a logger object named ``sys.stdout``.
+ If set to ``false``, and stdout is currently being
+ captured by BPS, the capturing of stdou will be stopped.
+
+ See :mod:`bps3.log.capture` for details.
+
+ ``capture_stderr``
+ This functions identically to ``capture_stdout``,
+ except that it operates on standard error.
+
+ ``capture_warnings``
+ This functions similarly to ``capture_stdout``,
+ except that it captures the warning issued by the
+ python :mod:`warning` module, and sends such messages
+ to the logger appropriate for the module which issued
+ the warning.
+
+ *Setting this option is HIGHLY recommended*, as it will
+ integrate the warnings module into the logging system
+ (how python should have had it to begin with).
+
+ ``warning_fmt``
+ When used with ``capture_warnings``, this option
+ allows you to specify a custom warning format string.
+ See :func:`capture_warnings` for details about the format
+ of this string, which correponds to the ``fmt`` keyword.
+
+ ``warning_target``
+ When used with ``capture_warnings``, this options
+ allows you to specify a custom target for any warnings
+ sent to the logging system.
+ See :func:`capture_warnings` for details about the format
+ of this string, which correponds to the ``target`` keyword.
+
+As an example, the following configuration snippet captures
+everything from stdout and warnings, and leaves stderr alone:
+
+.. code-block:: cfg
+
+ [logging:options]
+ capture_warnings = true
+ #if no warning_fmt is specified, the default will be used:
+ #warning_fmt = %(category)s:\n\t message: %(message)s\n\tfilename: %(filename)s\n\t lineno: %(lineno)s
+
+ capture_stderr = true
+
+ #uncomment this next to explicitly release stdout
+ #capture_stdout = false
+
+logging:output
+--------------
+This section maps loggers to handlers, allowing you to control where the output of the logging system
+is going. It consists of "name = handler1, handler2, ..." entries,
+which have the effect of attaching (one or more) handlers to the named logger.
+If a given entry ends with ``" only"``, any existing handlers attached to the logger
+will be removed before adding the specified handlers, and messages
+will not propagate past this logger.
+
+.. todo::
+ give examples
+
+logging:handler:$NAME
+---------------------
+If a handler is specified by name in `logging:output`_,
+the configuration loader will look for a section with
+the corresponding name to determine the handler's class
+and configuration. If a handler entry is present,
+but not referenced by the `logging:output`_ section
+of the that file, it will be ignored.
+
+It consists of keyword arguments passed to the
+:func:`compile_handler` function, which has pretty much
+the same syntax as the `fileConfig` format.
+
+.. todo::
+ document keywords, give examples
+
+logging:formatter:$NAME
+-----------------------
+This section configures a named formatter,
+and must be present for all formatters
+referenced in a ``[logging:handler:$NAME]`` section.
+It consists of keyword arguments passed to the
+`create_formatter` function, which has pretty much
+the same syntax as the `fileConfig` format.
+
+
+Example Files
+=============
+
+An example of a full-featured logging config file,
+which is probably overkill for a typical application:
+
+.. code-block:: cfg
+
+ [logging:options]
+ capture_stdout = false
+ capture_warnings = true
+ warning_fmt = %(category)s: %(message)s
+
+ [logging:levels]
+ <root> = INFO
+ myapp = DEBUG
+ pylons = WARNING
+
+ [logging:output]
+ <root> = console
+ myapp = syslog
+
+ [logging:handler:console]
+ class = StreamHandler
+ args = (sys.stderr,)
+ level = NOTSET
+ formatter = generic
+ startup_msg = True
+
+ [logging:handler:syslog]
+ class=handlers.SysLogHandler
+ level=ERROR
+ formatter=generic
+ args=(('localhost', handlers.SYSLOG_UDP_PORT), handlers.SysLogHandler.LOG_USER)
+
+ [logging:formatter:generic]
+ format = %(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+ datefmt = %H:%M:%S
+
+=============
+
+.. rubric:: Footnotes
+
+.. [#stdfmt] `<http://docs.python.org/library/logging.html#configuration-file-format>`_
diff --git a/docs/lib/bps.logs.rst b/docs/lib/bps.logs.rst
new file mode 100644
index 0000000..add9fad
--- /dev/null
+++ b/docs/lib/bps.logs.rst
@@ -0,0 +1,34 @@
+=====================================
+:mod:`bps.logs` -- Logging Utilities
+=====================================
+
+.. module:: bps.logs
+ :synopsis: logging utilities
+
+This module provides a number of extensions to the standard python logging module.
+Features include:
+
+ * `setup_std_logging`: a replacement for ``basicConfig()`` for initializing the logging system,
+ This features many additional features, such as improved default heuristics,
+ the ability to capture stdout, stderr, and python warnings and rerouting them
+ through the logging system, etc.
+ * `config_logging`: a replacement for ``fileConfig()`` which supports a more compact file format.
+ * `FancyFormatter`: a formatter which supports numerous formatting options
+ * `log`: a intelligent proxy Logger, which uses module inspection to determine which logger it should invoke
+
+General usage:
+
+ Call setupLogging() to initialize logging system,
+ and/or call configLogging(path) to load configuration from a file.
+ Most of the components (log, the formatters, etc) are designed
+ to be used individually, they don't require use of any of the other
+ bps3.logs components.
+
+.. toctree::
+
+ bps.logs-config_format
+
+.. todo::
+
+ document this module
+
diff --git a/docs/lib/bps.meta.rst b/docs/lib/bps.meta.rst
new file mode 100755
index 0000000..6d11846
--- /dev/null
+++ b/docs/lib/bps.meta.rst
@@ -0,0 +1,42 @@
+===============================================================
+:mod:`bps.meta` -- Introspection & Monkeypatching
+===============================================================
+
+.. module:: bps.meta
+ :synopsis: introspection & monkeypatching
+
+This module contains various utilities introspection
+utilities, to enhance what is already provided through
+python's :mod:`inspect` module,
+
+Interface Tests
+===============
+.. autofunction:: is_class
+.. autofunction:: is_num
+.. autofunction:: is_seq
+.. autofunction:: is_oseq
+.. autofunction:: is_str
+
+Class Inspection
+================
+.. autofunction:: is_overridden
+.. autofunction:: find_attribute
+
+Module Inspection
+=================
+.. autofunction:: get_module_exports
+
+Decorators
+===========
+.. autofunction:: abstractmethod
+.. autofunction:: decorate_per_instance
+
+Autosuper
+=========
+.. autofunction:: instrument_super
+
+Monkeypatching
+==============
+.. autofunction:: monkeypatch
+.. autofunction:: monkeypatch_mixin
+
diff --git a/docs/lib/bps.misc.rst b/docs/lib/bps.misc.rst
new file mode 100644
index 0000000..e39affb
--- /dev/null
+++ b/docs/lib/bps.misc.rst
@@ -0,0 +1,23 @@
+===============================================
+:mod:`bps.misc` -- Miscellanous Utilities
+===============================================
+
+.. module:: bps.misc
+ :synopsis: utilities which fit in no other category
+
+This module contains everything which didn't naturally
+fit in any category handled by an existing module.
+
+Entries in this module will occasionally be moved
+into other modules after an appropriate category is created.
+That is done on an as-needed basis when enough related
+functions are collected in this module.
+
+Properties
+==========
+.. autofunction:: indirect_property
+.. autofunction:: constructor_property
+
+Functions
+=========
+.. autofunction:: stepped_delay
diff --git a/docs/lib/bps.numeric.rst b/docs/lib/bps.numeric.rst
new file mode 100644
index 0000000..1fff928
--- /dev/null
+++ b/docs/lib/bps.numeric.rst
@@ -0,0 +1,55 @@
+=================================================
+:mod:`bps.numeric` -- Numeric Tools
+=================================================
+
+.. module:: bps.numeric
+ :synopsis: mathematical and numeric tools
+
+Number Theory
+=============
+.. autofunction:: factors
+.. autofunction:: gcd
+.. autofunction:: lcm
+
+Primality Testing
+=================
+.. autofunction:: is_prime
+.. autofunction:: next_prime
+.. autofunction:: prev_prime
+.. autofunction:: iter_primes
+
+Numeric Formats
+===============
+.. autofunction:: int_to_base
+.. autofunction:: float_to_base
+.. autofunction:: int_to_roman
+.. autofunction:: roman_to_int
+
+Miscellaneous Functions
+=======================
+.. autofunction:: sdivmod
+.. autofunction:: splitfrac
+.. autofunction:: avgsd
+.. autofunction:: digits
+.. autofunction:: limit
+
+Bytes Strings
+=============
+The following functions manipulate strings
+as if they were binary data, not characters.
+They allow for doing bit-wise boolean operations
+on strings, converting them to integers, etc.
+
+.. note::
+ When this module is converted to Python 3.0,
+ these will all be operations on ``bytes``, not ``str``.
+
+.. autofunction:: int_to_bytes
+.. autofunction:: bytes_to_int
+.. autofunction:: list_to_bytes
+.. autofunction:: bytes_to_list
+.. autofunction:: xor_bytes
+.. autofunction:: or_bytes
+.. autofunction:: and_bytes
+.. autofunction:: invert_bytes
+.. autofunction:: binop_bytes
diff --git a/docs/lib/bps.refs.rst b/docs/lib/bps.refs.rst
new file mode 100644
index 0000000..f23aa8a
--- /dev/null
+++ b/docs/lib/bps.refs.rst
@@ -0,0 +1,22 @@
+=================================================
+:mod:`bps.refs` -- Weak Reference and Proxy Tools
+=================================================
+
+.. module:: bps.refs
+ :synopsis: weak reference and proxy tools
+
+Properties
+==========
+.. autofunction:: weakref_property
+
+Classes
+=======
+.. autoclass:: SoftValueDict
+.. autoclass:: WeakSet
+
+Proxies
+=======
+.. autoclass:: ProxyObject
+
+.. autofunction:: is_proxy_active
+.. autofunction:: proxy_using_object
diff --git a/docs/lib/bps.rng.rst b/docs/lib/bps.rng.rst
new file mode 100644
index 0000000..5869ff4
--- /dev/null
+++ b/docs/lib/bps.rng.rst
@@ -0,0 +1,71 @@
+=================================================
+:mod:`bps.rng` -- Random Number Generation
+=================================================
+
+.. module:: bps.rng
+ :synopsis: random number generation
+
+This module is essentially just a wrapper for stdlib's
+random module. It provides a few additional
+methods for managing & getting random numbers,
+but also provides a more useful interface
+for the *type* of randomness you want.
+
+Random Number Generators
+========================
+The following random number generator
+instances are always available from this module:
+
+.. data:: random
+
+ This will be an instance of the best pseudo random number generator
+ available (currently the python builtin prng), with as good
+ an entropic source as is available for seeding via
+ the seed() and reseed() methods.
+ Use this for most non-cryptographic purposes.
+
+.. data:: srandom
+
+ This will be an instance of the strongest random number generator
+ available on your system. It will use python's SystemRandom
+ if os.urandom support is available, otherwise it will fall back
+ to the same generator as prandom. This should be used
+ for cryptographic purposes over the normal prng.
+
+ .. warning::
+ If urandom is present, this is dependant on the strength
+ of your system's urandom implementation. If urandom is missing,
+ the fallback (normal) may not have enough entropy to defend
+ from attackers. To help this somewhat, it is recommended
+ to call ``strong.reseed()`` before calls which will consume
+ randomness for critical purposes, just to help scramble things
+ as best as possible (reseed is a no-op if urandom is being used).
+
+.. data:: drandom
+
+ This is a variant of the *random* generator,
+ except that all outside entropic
+ sources are disabled, so that it's state is completely
+ deteremined by the value passed into seed().
+
+ This is mainly useful in unitests, when you need
+ to reliably repeat the same results over an over.
+
+Extra Methods
+=============
+In addition to the methods provided by stdlib's random module,
+all the above rngs will contain the following extra methods:
+
+.. function:: reseed()
+
+ Unlike seed(), which attempts to set the random number generator's
+ state explicitly, this method attempts to pull in outside
+ entropy sources (current rng state, time, etc) to help
+ randomize the state of your prng as much as possible.
+
+ .. todo::
+ In the future, need a way for app to add entropy to the system.
+
+.. function:: getrandbytes()
+
+.. function:: weightedchoice()
diff --git a/docs/lib/bps.rst b/docs/lib/bps.rst
new file mode 100644
index 0000000..6956ec4
--- /dev/null
+++ b/docs/lib/bps.rst
@@ -0,0 +1,79 @@
+==============
+Global Exports
+==============
+
+This page deals lists the objects that are imported
+when using the command ``from bps import *``.
+
+While the BPS package is generally accessed by importing
+one one of the submodules, it does offer a limited list
+of exports, which are designed to be dropped into your
+module's global namespace directly.
+
+Since populating the global namespace like this usually
+causes havoc due to it's implict nature, the objects
+exported by default have been limited only to ones
+which the BPS authors felt day in and day out were
+going to be needed so often, and so unpredictably,
+that it would be nice if they were available almost like builtins.
+Thus, much of our code begins with the stanza::
+
+ >>> #import from the bps global namespace
+ >>> from bps import *
+
+This ensures a number of very useful objects
+are always available. But since this import can be abused,
+objects are very rarely added to this list.
+
+Exported Objects
+================
+The following objects will be exported by ``from bps import *``.
+While they are documented more fully elsewhere, here is a quick description:
+
+ :func:`abstractmethod() <bps.meta.abstractmethod>`
+
+ This is a very useful decorator to have around if you do a lot
+ of interface-style class creation.
+
+ .. note::
+ A native version has been introduced
+ in Python 2.6, but that is not yet used by BPS.
+
+ :class:`BaseClass <bps.types.BaseClass>`
+
+ This class can be used as a drop-in replacement for ``object``,
+ it provides features such as an intelligent ``self.__super`` method,
+ and a ``cls.__initsubclass__`` method for performing actions
+ based on the created of inherited classes.
+
+ :func:`filepath() <bps.fs.filepath>`
+
+ This is the constructor for BPS's all-singing-all-dancing filepath object.
+ It's so useful, this was the first global export added.
+ Never use `os.path` again!
+
+ :func:`log <bps.logs.log>`
+
+ This is a magic logger object.
+ Import it into your module and call it,
+ and through introspection, it will act just like ``logging.getLogger(__name__)``,
+ and log all messages to the name of the module it was called from.
+
+ :func:`partial`
+ This is an export of stdlib's `functools.partial <http://docs.python.org/library/functools.html#functools.partial>`_,
+ since it is used a lot (at least, by the BPS developers it is).
+ An implementation of this has been exported by BPS since it's inception,
+ it was only when Python 2.5 was set as a minimum requirement
+ that BPS started using the stdlib version.
+
+ :data:`Undef <bps.types.Undef>`
+
+ A companion to ``None`` which represents an undefined/missing value.
+ Same as javascript's "undefined".
+
+ :func:`warn`
+
+ This is an export of stdlib's `warnings.warn <http://docs.python.org/library/warnings.html#warnings.warn>`_.
+ Warnings should be used much more often then they are,
+ and that would be encouraged if not for the inconvience
+ of having to add an import stanza at the top.
diff --git a/docs/lib/bps.security.policy.rst b/docs/lib/bps.security.policy.rst
new file mode 100644
index 0000000..e6135de
--- /dev/null
+++ b/docs/lib/bps.security.policy.rst
@@ -0,0 +1,106 @@
+==================================================================
+:mod:`bps.security.policy` -- Lightweight Access Control Framework
+==================================================================
+
+.. module:: bps.security.policy
+ :synopsis: lightweight access control framework
+
+Overview
+========
+This module provides a framework for applications
+to build complex permission and security policies,
+centered around the common "user -> role -> permission" pattern.
+This framework is derived from one deployed in a few web applications,
+which in turn was inspired by Roundup's `access control mechanism <http://www.roundup-tracker.org/docs/design.html#access-control>`_.
+Never-the-less, it's generic enough that it should be suitable for use
+by gui and command line applications as well.
+
+An application can make use of this framework by:
+
+ * creating a :class:`Policy` instance for the application.
+ * registering all potential roles with the policy
+ * registering all permissions, as expressed
+ in terms of actions, roles, and optional guard functions.
+ * querying the policy either to enumerate a given user's permissions,
+ or check if the user has permission to perform a specific action.
+
+.. _permission-question:
+
+Framing a Permission Question
+=============================
+
+.. todo:: finish write up the structure of the "permission question"
+
+When an application needs to test whether a user has permission
+to perform a given action, the first thing that must be done
+to use any policy framework is to encode the question in a format
+the permission system understands. This module encodes
+permission questions using the following 5 parameters:
+
+ * ``action`` - a string, usually a verb such as ``"update"``,
+ which represents the action permission is being requested for.
+
+ * ``klass`` - optional string, usually a noun such as ``"BlogEntry"``
+ which the action will be acting upon. (Some actions
+ act globally, and won't have a class specified).
+
+ * ``item`` - optional object, usually an instance of the class identified by ``klass``.
+ This is generally used when the permission applies to only certain instances
+ of the class, which must be decided on a case-by-case basis.
+
+ * ``attr`` - optional string, usually a attribute of ``klass`` such as ``"date"``.
+ This is typically used when the action is restricted on a per-attribute basis.
+
+ * ``scope`` - optional object, usually the owner of the instance
+ being acted on, or a composite object which the action is being
+ performed inside of. This is needed very rarely, but there are
+ some cases, such as when requesting permission to create
+ a new instance of class which will be stored inside a particular
+ object, and that object affects the outcome of the permission check.
+
+Combinations of 1 or more of these parameters can be put together
+in order to encode the following questions:
+
+ 1. ``Does {user} have permission to {action}?``
+
+ 2. ``Does {user} have permission to {action} an object of type {klass}?``.
+
+ 3. ``Does {user} have permission to {action} the object {item} of type {klass}?``
+
+ 4. ``Does {user} have permission to {action} the attribute {attr} of an object of type {klass}?``
+
+ 5. ``Does {user} have permission to {action} the attribute {attr} of the object {item} of type {klass}?``
+
+ 6. ``Does {user} have permission to {action} an object of type {klass} as part of {scope}?``.
+ As an exmaple: does the user have permission to *create* an object of type
+ *entry* as part of *<a specific journal instance>*?
+
+Usage Example
+=============
+
+.. todo:: write usage example for sec policy
+
+The Policy Class
+================
+
+.. autoclass:: Policy
+
+The Support Classes
+===================
+The following classes are used internally by :class:`Policy`,
+and generally the programmer will not need to create them directly
+(though they may need to examine them if preparing a list of
+the user's permissions for display).
+
+.. autoclass:: Role
+
+.. autoclass:: Permission
+
+..
+ Not documenting these right now, the system is usuable without
+ knowledge of this bit, although they could be usuable by the guard func,
+ but no use-case has needed this just yet:
+
+ .. _permissions-constants:
+
+ .. autoclass:: PERM
diff --git a/docs/lib/bps.security.pwgen.rst b/docs/lib/bps.security.pwgen.rst
new file mode 100644
index 0000000..c657b12
--- /dev/null
+++ b/docs/lib/bps.security.pwgen.rst
@@ -0,0 +1,15 @@
+================================================
+:mod:`bps.security.pwgen` -- Password Generation
+================================================
+
+.. module:: bps.security.pwgen
+ :synopsis: password generation algorithms
+
+The following single function allows
+easy password generation in a number of styles:
+
+.. autofunction:: generate_secret
+
+.. todo::
+ document internal classes
+
diff --git a/docs/lib/bps.security.pwhash.rst b/docs/lib/bps.security.pwhash.rst
new file mode 100644
index 0000000..b2f0aa1
--- /dev/null
+++ b/docs/lib/bps.security.pwhash.rst
@@ -0,0 +1,44 @@
+=============================================
+:mod:`bps.security.pwhash` - Password Hashing
+=============================================
+
+.. module:: bps.security.pwhash
+ :synopsis: password hashing (unix-crypt, md5-crypt, etc)
+
+Overview
+========
+This module handles encrypting and verifying password hashes
+(such as from unix shadow files). This module contains implementations of most
+of the modern password hashing algorithms,
+as well as a complex framework for implementing
+new algorithms, managing hashes generated
+within different contexts with different supported
+algorithms, and other features.
+
+The algorithms currently supported by default in BPS:
+
+ * Unix-Crypt
+ * MD5-Crypt
+ * BCrypt
+ * SHA-Crypt (256 & 512 bit modes)
+
+ * PostgreSQL & MySQL password hashes
+
+Sections
+========
+The documentation for the pwhash module is broken into the following sections:
+
+* :doc:`Quick Start <bps.security.pwhash/quickstart>` -- frontend funcs for quickly creating / validating hashes
+* :doc:`Crypt Contexts <bps.security.pwhash/contexts>` -- for using just the algorithms your application needs
+* :doc:`Crypt Algorithms <bps.security.pwhash/algorithms>` -- details of the algorithms BPS implements
+* :doc:`Implementing a Custom Crypt Algorithm <bps.security.pwhash/implementation>` -- Roll your own
+* :doc:`Helper Functions <bps.security.pwhash/utils>`
+
+.. toctree::
+ :hidden:
+
+ bps.security.pwhash/quickstart
+ bps.security.pwhash/contexts
+ bps.security.pwhash/algorithms
+ bps.security.pwhash/implementation
+ bps.security.pwhash/utils
diff --git a/docs/lib/bps.security.pwhash/algorithms.rst b/docs/lib/bps.security.pwhash/algorithms.rst
new file mode 100644
index 0000000..6973313
--- /dev/null
+++ b/docs/lib/bps.security.pwhash/algorithms.rst
@@ -0,0 +1,49 @@
+=============================================
+:mod:`bps.security.pwhash` - Crypt Algorithms
+=============================================
+
+.. currentmodule:: bps.security.pwhash
+
+All of the crypt algorithms must inherit from :class:`CryptAlgorithm`,
+which defines a common interface all algorithms must support.
+You may use the algorithms directly, by creating
+an instance and calling it as described in :doc:`Implementing a Crypt Algorithm <implementation>`.
+However, you will normally will not need to deal with the internals of the algorithms
+directly, but rather take advantage of one of the predefined algorithms,
+through the :doc:`frontend functions <quickstart>` or a
+custom :doc:`crypt context <contexts>`.
+
+Standard Algorithms
+===================
+The following algorithms are all standard password hashing algorithms
+used by various Posix operating systems over the years.
+
+.. note::
+ BPS tries to use external accelaration for these classes when possible,
+ but provides a pure-python fallback so that these algorithms will
+ ALWAYS be available for use.
+
+.. autoclass:: UnixCrypt
+.. autoclass:: Md5Crypt
+.. autoclass:: Sha256Crypt
+.. autoclass:: Sha512Crypt
+.. autoclass:: BCrypt
+
+Database Algorithms
+===================
+BPS also provides implementations of the hash
+algorithms used by MySql and PostgreSQL.
+
+.. autoclass:: Mysql10Crypt
+.. autoclass:: Mysql41Crypt
+.. autoclass:: PostgresMd5Crypt
+
+.. data:: mysql_context
+
+ This context object contains the algorithms used by MySql 4.1 and newer
+ for storing user passwords.
+
+.. data:: postgres_context
+
+ This context object should be able to read/write/verify
+ the values found in the password field of the pg_shadow table in Postgres.
diff --git a/docs/lib/bps.security.pwhash/contexts.rst b/docs/lib/bps.security.pwhash/contexts.rst
new file mode 100644
index 0000000..3312355
--- /dev/null
+++ b/docs/lib/bps.security.pwhash/contexts.rst
@@ -0,0 +1,35 @@
+=============================================
+:mod:`bps.security.pwhash` - Crypt Contexts
+=============================================
+
+.. currentmodule:: bps.security.pwhash
+
+For more complex deployment scenarios than
+the frontend functions described in :doc:`Quick Start <quickstart>`,
+the CryptContext class exists...
+
+.. autoclass:: CryptContext
+
+Predefined Contexts
+===================
+The following context objects are predefined by BPS:
+
+.. data:: default_context
+
+ This context object contains all the algorithms
+ supported by BPS, listed (mostly) in order of strength.
+ :func:`identify`, :func:`verify`, and :func:`encrypt`
+ are all merely wrappers for this object's methods
+ of the same name.
+
+.. data:: linux_context
+
+ This context object contains only the algorithms
+ in use on modern linux systems (namely:
+ unix-crypt, md5-crypt, sha512-crypt).
+
+.. data:: bsd_context
+
+ This context object contains only the algorithms
+ in use on modern BSD systems (namely:
+ unix-crypt, md5-crypt, bcrypt).
diff --git a/docs/lib/bps.security.pwhash/implementation.rst b/docs/lib/bps.security.pwhash/implementation.rst
new file mode 100644
index 0000000..2bf1661
--- /dev/null
+++ b/docs/lib/bps.security.pwhash/implementation.rst
@@ -0,0 +1,17 @@
+===================================================================
+:mod:`bps.security.pwhash` - Implementing a Custom Crypt Algorithm
+===================================================================
+
+.. currentmodule:: bps.security.pwhash
+
+New password algorithms can be implemented
+by subclassing :class:`CryptAlgorithm`,
+which provides the underlying framework used
+for all the password algorithms.
+
+To create a new one,
+you simple subclass CryptAlgorithm,
+and implement the identify, encrypt, and verify methods
+(at the very least).
+
+.. autoclass:: CryptAlgorithm
diff --git a/docs/lib/bps.security.pwhash/quickstart.rst b/docs/lib/bps.security.pwhash/quickstart.rst
new file mode 100644
index 0000000..44e3fa3
--- /dev/null
+++ b/docs/lib/bps.security.pwhash/quickstart.rst
@@ -0,0 +1,46 @@
+========================================
+:mod:`bps.security.pwhash` - Quick Start
+========================================
+
+.. currentmodule:: bps.security.pwhash
+
+Usage Example
+=============
+In order to get off the ground quickly, here's an
+example of how to quickly encrypt and verify passwords
+without having to delve too deeply into this module::
+
+ >>> from bps.security import pwhash
+
+ >>> #encrypt password using strongest algorithm defined by this module
+ >>> hash = pwhash.encrypt("too many secrets")
+ >>> hash
+ $6$rounds=39000$DNnCxm85LEP1WXUh$IVkALQeSuhr2hcUV90Tv8forzli3K.XwX.1JzPjgwltgvCAgllN3x1jNpG9E1C8IQPm0gEIesqATDyKh/nEnh0'
+
+ >>> #verify password against hash
+ >>> pwhash.verify("mypass", hash)
+ False
+ >>> pwhash.verify("too many secrets", hash)
+ True
+
+ >>> #identify the algorithm used in a hash
+ >>> pwhash.identify(hash)
+ 'sha512-crypt'
+
+ >>> #choose a specific algorithm to use (instead of the default)
+ >>> hash2 = pwhash.encrypt("too many secrets", alg="bcrypt")
+ '$2a$11$unZuTsMEjeo5mqFX6rmRduQPBDx9t3djd2voi9W.oFhUDQu1NNMcW'
+
+ >>> #check if we used right algorithm
+ >>> pwhash.identify(hash2)
+ 'bcrypt'
+
+ >>> #the hash type is autodetected by verify
+ >>> pwhash.verify("too many secrets", hash2)
+ True
+
+Frontend Functions
+==================
+.. autofunction:: encrypt
+.. autofunction:: verify
+.. autofunction:: identify
diff --git a/docs/lib/bps.security.pwhash/utils.rst b/docs/lib/bps.security.pwhash/utils.rst
new file mode 100644
index 0000000..897349d
--- /dev/null
+++ b/docs/lib/bps.security.pwhash/utils.rst
@@ -0,0 +1,19 @@
+=============================================
+:mod:`bps.security.pwhash` - Helper Functions
+=============================================
+
+.. currentmodule:: bps.security.pwhash
+
+A couple of utility functions are available,
+mainly useful when writing custom password hash algorithms.
+The ``h64_*`` series of functions all provide
+utilities for encoding & decoding strings
+under the modified base64 system used by most
+of the standard unix hash algorithms.
+
+.. autofunction:: h64_encode
+.. autofunction:: h64_decode
+.. autofunction:: h64_gen_salt
+
+.. autofunction:: is_crypt_context
+.. autofunction:: is_crypt_alg
diff --git a/docs/lib/bps.security.rst b/docs/lib/bps.security.rst
new file mode 100644
index 0000000..c84bc8b
--- /dev/null
+++ b/docs/lib/bps.security.rst
@@ -0,0 +1,17 @@
+======================================
+:mod:`bps.security` -- Security Tools
+======================================
+
+.. module:: bps.security
+ :synopsis: security related modules
+
+This package provides nothing on it's own,
+but instead is a collection of a number
+of security-related subpackages:
+
+.. toctree::
+ :maxdepth: 1
+
+ bps.security.pwgen
+ bps.security.pwhash
+ bps.security.policy
diff --git a/docs/lib/bps.stream.rst b/docs/lib/bps.stream.rst
new file mode 100644
index 0000000..9955abd
--- /dev/null
+++ b/docs/lib/bps.stream.rst
@@ -0,0 +1,24 @@
+===============================================
+:mod:`bps.stream` -- Stream & Buffer Utilities
+===============================================
+
+.. module:: bps.stream
+ :synopsis: stream (file, StringIO) helpers
+
+This module contain various stream & buffer related utilities.
+
+Non-Blocking Reads
+==================
+
+.. autofunction:: nb_read
+.. autofunction:: nb_readline_iter
+.. autoclass:: nb_readline_list
+
+Other Functions
+===============
+.. autofunction:: get_stream_size
+
+..
+ not listing this one till it's heuristic or use-case is better defined:
+
+ .. autofunction:: get_input_type
diff --git a/docs/lib/bps.text.rst b/docs/lib/bps.text.rst
new file mode 100755
index 0000000..2dabd11
--- /dev/null
+++ b/docs/lib/bps.text.rst
@@ -0,0 +1,105 @@
+=================================================
+:mod:`bps.text` -- Text parsing & formatting
+=================================================
+
+.. module:: bps.text
+ :synopsis: text parsing & formatting
+
+This module provides various methods for manipulating
+various types of strings. It includes helpers
+for cleaning user input, inflecting english words,
+and some other features.
+
+String Parsing
+===============
+
+.. autofunction:: asbool
+.. autofunction:: condense
+.. autofunction:: split_condense
+
+Filename Sanitizing
+===================
+.. autofunction:: clean_filename
+
+Extending :func:`clean_filename`
+--------------------------------
+The clean_filename function is designed to be extended
+to suite your own requirements, and yet still perform
+optimally. If you have a preset configuration
+which you frequently use, simply create an instance
+of :class:`FileCleaner`, passing in the appropriate
+options for your preset, or clone an existing preset
+using ``preset.copy()``. These instances can be called
+directly, just like the `clean_filename` function proper.
+Or, you may insert it into ``bps3.text.cfg_presets``
+under a custom name, so that it will be globally available
+through :func:`clean_filename`. See the source code for more.
+
+Language Inflection
+===================
+BPS implements a language inflector class based off of
+the one implemented in Ruby On Rails. Current only English
+is supported (but see note below). While the system
+is class based, the following public functions
+are offered up for easy access:
+
+.. autofunction:: pluralize
+.. autofunction:: singularize
+.. autofunction:: countof
+.. autofunction:: oneof
+.. autofunction:: ordinal
+
+.. note::
+ Currently, there only exists an (American) English language inflector,
+ but if and when more Inflector subclasses are written for other languages,
+ this system will be expanded as the use cases require.
+
+..
+ Variable Renaming
+ =================
+ BPS has only the beginnings of support for variable name mangling,
+ such as converting from ``CamelCase`` to ``lower_case_with_underlines``.
+ This will hopefully be fleshed out more in the future.
+
+ .. autofunction:: lu_to_cc
+
+Format String Backport
+======================
+Python 2.6 introduced a new formatting system.
+BPS contains a pure-python implementation of this system,
+so that it is available to Python 2.5 deployments.
+Thus, the following methods are aliases for the native
+python implementations when available; otherwise
+they are backed by a pure-python implementation.
+
+.. autofunction:: render_format
+.. autofunction:: format
+.. autoclass:: Formatter
+
+.. note::
+ For Python 2.5 users who *really* want to have ``str.format()``
+ available to them directly, they may import :mod:`bps.text.patch_format`
+ somewhere in their application. By importing this module,
+ the native strings types of Python 2.5 will be monkeypatched to include
+ a format method which should be a compatible with the real thing.
+ This is not imported by default, as it's a somewhat evil thing to do.
+
+Format String Parsing
+=====================
+The following functions are available for examining
+format strings. They are rarely needed,
+but occasionally code has the need to inspect a format string template:
+
+.. autofunction:: fmt_has_field
+.. autofunction:: get_fmt_fields
+
+..
+ these are present, but not documented yet
+ .. autofunction:: parse_fmt_string
+ .. autofunction:: parse_fmt_field
+
+
+..
+ agent string parsing:
+ .. autofunction:: parse_agent_string
+ .. autofunction:: agent_string_has_product
diff --git a/docs/lib/bps.types.rst b/docs/lib/bps.types.rst
new file mode 100644
index 0000000..6375e6b
--- /dev/null
+++ b/docs/lib/bps.types.rst
@@ -0,0 +1,40 @@
+=================================================
+:mod:`bps.types` -- Useful Classes and Types
+=================================================
+
+.. module:: bps.types
+ :synopsis: useful classes and types
+
+This module contains most of the classes defined by BPS:
+
+ * `base classes`_
+ * `simple data structures`_
+ * `dictionary classes`_
+ * `other classes`_
+
+Base Classes
+============
+.. autoclass:: BaseClass
+.. autoclass:: BaseMetaClass
+
+Simple Data Structures
+======================
+.. autoclass:: stub
+
+.. class:: namedtuple
+
+ Returns a new subclass with named tuple fields
+
+ This class is just a backport from Python 2.6.
+ When BPS is loaded under 2.6 or higher,
+ the native implementation will be used instead.
+
+Dictionary Classes
+==================
+.. autoclass:: CustomDict
+.. autoclass:: OrderedDict
+
+Other Classes
+=============
+.. autoclass:: CloseableClass
+
diff --git a/docs/lib/bps.undef.rst b/docs/lib/bps.undef.rst
new file mode 100644
index 0000000..0f8441b
--- /dev/null
+++ b/docs/lib/bps.undef.rst
@@ -0,0 +1,37 @@
+==========================================
+:mod:`bps.undef` -- The "Undefined" Object
+==========================================
+
+.. module:: bps.undef
+ :synopsis: provides an "Undef" singleton (ala Javascript)
+
+Other languages like javascript (and frequently other python libraries)
+have the recurring need for a "undefined" singleton, representing
+that a value is not specified; this is opposed to ``None``
+which technically represents "no value present", but does double duty
+as meaning "undefined" as well. But sometimes, that double duty just doesn't
+cut it. BPS provides the following Undef object.
+
+.. data:: Undef
+
+ The Undef object signals that the value is not defined.
+ It has the unique property that is it never equal to anything (in a boolean sense),
+ including itself, much like the sql "NULL" object.
+
+.. function:: defined(value)
+
+ Helper for checking if a value is or is not the :data:`Undef` object.
+ This just for completeness, it's equivalent to ``value is not Undef``,
+ which is typically faster.
+
+.. function:: undefined(value)
+
+ Inverse of :func:`defined`.
+
+.. caution::
+ Mako's "Undefined" and peak's "NOT_GIVEN" objects are other examples
+ of this singleton. Hopefully a way will be found to unify these objects
+ before it becomes a problem. Because of this, it's generally
+ useful to use Undef as an internal value inside your code,
+ usually as a default value for a function keyword,
+ and never use it as a return value.
diff --git a/docs/lib/bps.warndep.rst b/docs/lib/bps.warndep.rst
new file mode 100644
index 0000000..a234f1b
--- /dev/null
+++ b/docs/lib/bps.warndep.rst
@@ -0,0 +1,29 @@
+=======================================================
+:mod:`bps.warndep` -- Warning and deprecation Utilities
+=======================================================
+
+.. module:: bps.warndep
+ :synopsis: warning & deprecation utilities
+
+This module contains some helpful functions for
+issuing deprecation warnings about methods,
+functions, and properties which are about to
+be relocated or removed entirely.
+
+Deprecation Decorators
+======================
+These decorators automatically issue
+a deprecation warning when the decorated
+object is accessed:
+
+.. autofunction:: deprecated_function
+.. autofunction:: deprecated_method
+
+Deprecation Constructors
+========================
+These functions create an entirely new object,
+usually wrapping the old object in some manner.
+
+.. autofunction:: deprecated_property
+.. autofunction:: relocated_function
+
diff --git a/docs/make.py b/docs/make.py
new file mode 100644
index 0000000..feeff9d
--- /dev/null
+++ b/docs/make.py
@@ -0,0 +1,3 @@
+"Makefile for Sphinx documentation, adapted to python"
+from bps.unstable.bpsdoc.make_helper import SphinxMaker
+SphinxMaker.execute(root=__file__)
diff --git a/docs/overview.rst b/docs/overview.rst
new file mode 100644
index 0000000..aae91cb
--- /dev/null
+++ b/docs/overview.rst
@@ -0,0 +1,154 @@
+================
+Library Overview
+================
+
+BPS started life in 2003 as an in-house collection of small functions
+and tools which were frequently needed by the programmers at
+`Assurance Technologies <http://www.assurancetechnologies.com>`_.
+Over the years, it has accumlated a more small functions,
+but it has also acquired some modules which provide major
+new features that go above and beyond simple utility functions.
+Since we have benefited greatly from open source software,
+this library was released publically in 2009, in order
+to fill a few niches for which there is a need (password hashing,
+desktop interaction), as well as to simply give something
+back to the community.
+
+.. module:: bps
+ :synopsis: Root of all BPS modules
+
+Organization
+============
+Everything in BPS falls into two main categories:
+There are modules which contain interconnected
+functions dealing with a specific topic (the `service modules`_),
+and there are the modules which contain smaller utility
+functions which aren't really connected to eachother,
+but which are grouped together for convience based on a common
+subject (the `utility modules`_). You may read through
+the entirety of the documentation to find any functions
+which might be useful, or jump directly to a submodule
+whose services you already know you need.
+
+Service Modules
+===============
+The following modules contain tightly-knit sets of interconnected functions,
+and each module provides a unique set of services which would not be possible
+without all the functions it contains:
+
+ :mod:`bps.fs`
+
+ This provides a "magic" filepath object, as well as some other filesystem
+ related helpers. The magic filepath object is a string subclass
+ which allows you to manipulate filepaths (and interact with the filesystem)
+ in an object oriented manner.
+ *Warning: use of this module can be incredibly addictive.*
+
+ :mod:`bps.host`
+
+ This provides a wide array of functions for detecting host resource
+ paths, managing processes, and interacting with the desktop,
+ all in a os-neutral manner.
+
+ :mod:`bps.logs`
+
+ This module contains a number of helper utilties
+ for using python's builting logging module:
+
+ * an easier-to-use logging config format for ini files.
+ * a more programmatic interface for configuring the logging system.
+ * ability to capture & redirect stdio, and the warnings module.
+
+ :mod:`bps.security`
+
+ This module contains a sophisticated system for creating & verifying
+ password hashes, supporting all the major unix password hashing schemes
+ (in native python no less).
+
+Utility Modules
+===============
+Unlike the service modules, the remaining modules in bps
+are collections of smaller standalone functions, grouped
+together by common theme:
+
+ :mod:`bps.basic`
+
+ Utility functions for manipulating
+ common python data structures, such as helpers
+ for manipulated dicts, sets, and others.
+
+ :mod:`bps.cache`
+
+ Decorators and helpers
+ for doing memoization and related activities.
+
+ :mod:`bps.error.types`
+
+ Assorted Exceptions classes which are used by BPS
+ or which may be generally useful.
+
+ :mod:`bps.meta`
+
+ Introspection tools,
+ decorators for meta-level activities (eg abstract methods),
+ and monkeypatching.
+
+ :mod:`bps.numeric`
+
+ Numeric related helpers,
+ mainly as an extension to stdlib's math module.
+
+ :mod:`bps.refs`
+
+ Weak reference helpers and proxy objects.
+
+ :mod:`bps.security`
+
+ Security tools, mainly password hashing and generation.
+
+ :mod:`bps.stream`
+
+ Buffer and stream related tools.
+
+ :mod:`bps.text`
+
+ Tool for manipulating text strings,
+ and other language related operations. This includes a noun
+ pluralization function, a function for sanitizing user-provided
+ filenames, ``asbool``, and more.
+ *For Python 2.5 users, this also provides a backport of Python 2.6's
+ "str.format()" system.*
+
+ :mod:`bps.types`
+
+ A collection of assorted classes which are frequently helpful
+ in programming, such as `bps.types.BaseClass`, which provides
+ automatic super() support.
+
+ :mod:`bps.warndep`
+
+ Decorators for easily raises deprecation warnings
+ when you move / relocate functions, methods, and properties
+ in your application.
+
+ :mod:`bps.misc`
+ This module contains any tools which don't fit into one of the other
+ categories.
+
+The things left out...
+===========================
+One other module exists which is purposely not documented:
+
+ :mod:`bps.unstable`
+ This module contains functions
+ which have been added to BPS by the developers, but aren't officially
+ included and documented for any number of reasons...
+
+ * too application specific
+ * not developed long enough
+ * not tested enough much
+ * look neat, but don't have any real world use-cases yet
+
+ Use them if you dare, they may be removed or recoded on the spur
+ of the moment. The same goes for some of the other
+ present-but-undocumented functions you may find in the BPS source.
diff --git a/docs/roadmap.rst b/docs/roadmap.rst
new file mode 100644
index 0000000..4585a9c
--- /dev/null
+++ b/docs/roadmap.rst
@@ -0,0 +1,54 @@
+=======
+Roadmap
+=======
+
+Planned Features
+================
+The follow is the list of pending tasks which definitely need to be completed
+for BPS, roughly in the order they will probably get done:
+
+* Finish rewriting and documentation BPS's enhancements to the
+ standard logging system.
+
+* Clean up "bps.host" interface system, and document it.
+
+* Make sure config_parser module has been converted.
+
+* Unittests do not have good overall coverage.
+
+* The following modules have yet to be documented:
+
+ - bps.numeric
+ - bps.undef
+ - bps.logs
+
+* Release to public.
+ This is being put off until documentation and unittests are fleshed out more,
+ and some needed redesigns are done before external apps become dependant
+ on legacy behaviors.
+
+Wishlist
+========
+The following are things which it would be nice to add to BPS,
+but the need is not pressing, and no particular plans have been drawn up:
+
+* Merge into BPS the security policy framework
+ currently used by many of our projects.
+ (probably under "bps.security.policy").
+
+* Fix major bug: :func:`bps.fs.filepath` does not support unicode.
+
+* Merge in the planetbox numeric and stream routines.
+
+* Merge in the threading and dispatcher routines
+ from internal "pxhelpers" library.
+
+* Merge into BPS the user-interaction subsystem from our internal
+ "automigrate" library (probably under "bps.host.interact").
+
+* Merge in "signals", "app.locking", and "app.command"
+ packages from the internal company library "astllc".
+
+Todos
+=====
+.. todolist::
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..43cbcd9
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,10 @@
+[egg_info]
+#tag_build = .dev.1
+tag_svn_revision = true
+
+[upload]
+repository = http://dl.astllc.org/eggs/upload
+
+[aliases]
+release =
+bdist_egg = bdist_egg rotate -k1 -m.egg
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..bc01b46
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,43 @@
+"""
+bps setup script
+"""
+#=========================================================
+#init app env
+#=========================================================
+import sys,os
+from os.path import abspath, join
+root_path = abspath(join(__file__, ".."))
+os.chdir(root_path)
+lib_path = '.'
+##lib_path = abspath(join(root_path,""))
+##if lib_path not in sys.path:
+## sys.path.insert(0, lib_path)
+#=========================================================
+#imports
+#=========================================================
+from setuptools import setup, find_packages
+from bps import __version__ as version
+#=========================================================
+#setup
+#=========================================================
+setup(
+ #package info
+ packages = find_packages(where=lib_path),
+## package_data = {},
+## package_dir= { '': lib_path },
+
+ # metadata
+ name = "bps",
+ version = version,
+ author = "Eli Collins",
+ author_email = "elic@astllc.org",
+ description = "a package of helper routines for python apps",
+ license = "BSD",
+ keywords = "ast",
+ url = "http://www.astllc.org",
+ # could also include long_description, download_url, classifiers, etc.
+ zip_safe=True,
+)
+#=========================================================
+#EOF
+#=========================================================