diff options
| author | Jesús Leganés Combarro "Piranna" <piranna@gmail.com> | 2012-05-19 21:46:02 +0200 |
|---|---|---|
| committer | Jesús Leganés Combarro "Piranna" <piranna@gmail.com> | 2012-05-19 21:46:02 +0200 |
| commit | ba2294f560755dcf4ce59fa706fda80e9738131f (patch) | |
| tree | d5ce276ad5e2661f8419f2c1b8a2338f780758f3 /sqlparse | |
| parent | 1badc3a3de466f521132d0fdf235c7dc1e64e5b7 (diff) | |
| parent | 66742da10ebdc2bc485022ecbd59278d3fc96488 (diff) | |
| download | sqlparse-ba2294f560755dcf4ce59fa706fda80e9738131f.tar.gz | |
Merge branch 'master' into milestone_0.1.5
Diffstat (limited to 'sqlparse')
| -rw-r--r-- | sqlparse/filters.py | 47 | ||||
| -rw-r--r-- | sqlparse/lexer.py | 3 | ||||
| -rw-r--r-- | sqlparse/sql.py | 1 | ||||
| -rw-r--r-- | sqlparse/utils.py | 46 |
4 files changed, 84 insertions, 13 deletions
diff --git a/sqlparse/filters.py b/sqlparse/filters.py index d40c936..0fbcbda 100644 --- a/sqlparse/filters.py +++ b/sqlparse/filters.py @@ -5,11 +5,11 @@ import re from os.path import abspath, join from warnings import warn -from sqlparse import sql -from sqlparse import tokens as T +from sqlparse import sql, tokens as T from sqlparse.engine import FilterStack from sqlparse.tokens import (Comment, Comparison, Keyword, Name, Punctuation, String, Whitespace) +from sqlparse.utils import memoize_generator # -------------------------- @@ -105,12 +105,17 @@ def StripWhitespace(stream): class IncludeStatement: """Filter that enable a INCLUDE statement""" - def __init__(self, dirpath=".", maxRecursive=10): + def __init__(self, dirpath=".", maxrecursive=10, raiseexceptions=False): + if maxrecursive <= 0: + raise ValueError('Max recursion limit reached') + self.dirpath = abspath(dirpath) - self.maxRecursive = maxRecursive + self.maxRecursive = maxrecursive + self.raiseexceptions = raiseexceptions self.detected = False + @memoize_generator def process(self, stack, stream): warn("Deprecated, use callable objects. This will be removed at 0.2.0", DeprecationWarning) @@ -126,30 +131,48 @@ class IncludeStatement: elif self.detected: # Omit whitespaces if token_type in Whitespace: - pass - - # Get path of file to include - path = None + continue + # Found file path to include if token_type in String.Symbol: # if token_type in tokens.String.Symbol: + + # Get path of file to include path = join(self.dirpath, value[1:-1]) - # Include file if path was found - if path: try: f = open(path) raw_sql = f.read() f.close() + + # There was a problem loading the include file except IOError, err: + # Raise the exception to the interpreter + if self.raiseexceptions: + raise + + # Put the exception as a comment on the SQL code yield Comment, u'-- IOError: %s\n' % err else: # Create new FilterStack to parse readed file # and add all its tokens to the main stack recursively - # [ToDo] Add maximum recursive iteration value + try: + filtr = IncludeStatement(self.dirpath, + self.maxRecursive - 1, + self.raiseexceptions) + + # Max recursion limit reached + except ValueError, err: + # Raise the exception to the interpreter + if self.raiseexceptions: + raise + + # Put the exception as a comment on the SQL code + yield Comment, u'-- ValueError: %s\n' % err + stack = FilterStack() - stack.preprocess.append(IncludeStatement(self.dirpath)) + stack.preprocess.append(filtr) for tv in stack.run(raw_sql): yield tv diff --git a/sqlparse/lexer.py b/sqlparse/lexer.py index dc794ab..5b0f116 100644 --- a/sqlparse/lexer.py +++ b/sqlparse/lexer.py @@ -179,6 +179,9 @@ class Lexer(object): (r'\$([a-zA-Z_][a-zA-Z0-9_]*)?\$', tokens.Name.Builtin), (r'\?{1}', tokens.Name.Placeholder), (r'[$:?%][a-zA-Z0-9_]+', tokens.Name.Placeholder), + # FIXME(andi): VALUES shouldn't be listed here + # see https://github.com/andialbrecht/sqlparse/pull/64 + (r'VALUES', tokens.Keyword), (r'@[a-zA-Z_][a-zA-Z0-9_]+', tokens.Name), (r'[a-zA-Z_][a-zA-Z0-9_]*(?=[.(])', tokens.Name), # see issue39 (r'[-]?0x[0-9a-fA-F]+', tokens.Number.Hexadecimal), diff --git a/sqlparse/sql.py b/sqlparse/sql.py index 4941e3d..05e078d 100644 --- a/sqlparse/sql.py +++ b/sqlparse/sql.py @@ -97,7 +97,6 @@ class Token(object): return True return False - print len(values) return self.value in values def is_group(self): diff --git a/sqlparse/utils.py b/sqlparse/utils.py new file mode 100644 index 0000000..fd6651a --- /dev/null +++ b/sqlparse/utils.py @@ -0,0 +1,46 @@ +''' +Created on 17/05/2012 + +@author: piranna +''' + + +def memoize_generator(func): + """Memoize decorator for generators + + Store `func` results in a cache according to their arguments as 'memoize' + does but instead this works on decorators instead of regular functions. + Obviusly, this is only useful if the generator will always return the same + values for each specific parameters... + """ + cache = {} + + def wrapped_func(*args, **kwargs): + params = (args, kwargs) + + # Look if cached + try: + cached = cache[params] + + # Not cached, exec and store it + except KeyError: + # Reset the cache if we have too much cached entries and start over + # In the future would be better to use an OrderedDict and drop the + # Least Recent Used entries + if len(cache) >= 10: + cache.clear() + + cached = [] + + for item in func(*args, **kwargs): + cached.append(item) + yield item + + cache[params] = cached + + # Cached, yield its items + else: + for item in cached: + yield item + + return wrapped_func |
