diff options
author | Jesús Leganés Combarro "Piranna" <piranna@gmail.com> | 2011-08-18 00:10:43 +0200 |
---|---|---|
committer | Jesús Leganés Combarro "Piranna" <piranna@gmail.com> | 2011-08-18 00:10:43 +0200 |
commit | c88aefb12aa07a2c24ec9cfcbe5b73353b5feed7 (patch) | |
tree | 43a293f3c46801f18391e13a49f5b8263d05d4b7 | |
parent | 9906f557a7ad7ec09a19720b4ca8cbedf249b03a (diff) | |
download | sqlparse-c88aefb12aa07a2c24ec9cfcbe5b73353b5feed7.tar.gz |
Added pipeline support maintaining backward compatibility.
-rw-r--r-- | sqlparse/__init__.py | 6 | ||||
-rw-r--r-- | sqlparse/filters.py | 10 | ||||
-rw-r--r-- | sqlparse/pipeline.py | 90 |
3 files changed, 105 insertions, 1 deletions
diff --git a/sqlparse/__init__.py b/sqlparse/__init__.py index 7698e46..5ccf092 100644 --- a/sqlparse/__init__.py +++ b/sqlparse/__init__.py @@ -53,3 +53,9 @@ def split(sql): stack = engine.FilterStack() stack.split_statements = True return [unicode(stmt) for stmt in stack.run(sql)] + + +from sqlparse.engine.filter import StatementFilter +def split2(stream): + splitter = StatementFilter() + return list(splitter.process(None, stream))
\ No newline at end of file diff --git a/sqlparse/filters.py b/sqlparse/filters.py index 4d849cb..6f9b579 100644 --- a/sqlparse/filters.py +++ b/sqlparse/filters.py @@ -463,6 +463,14 @@ class SerializerUnicode(Filter): res += '\n' return res +def Tokens2Unicode(stream): + result = "" + + for _, value in stream: + result += unicode(value) + + return result + class OutputPythonFilter(Filter): @@ -576,4 +584,4 @@ class Limit(Filter): if index and token_type in Keyword and value == 'LIMIT': return stream[4 - index][1] - return -1 + return -1
\ No newline at end of file diff --git a/sqlparse/pipeline.py b/sqlparse/pipeline.py new file mode 100644 index 0000000..167d083 --- /dev/null +++ b/sqlparse/pipeline.py @@ -0,0 +1,90 @@ +# Copyright (C) 2011 Jesus Leganes "piranna", piranna@gmail.com +# +# This module is part of python-sqlparse and is released under +# the BSD License: http://www.opensource.org/licenses/bsd-license.php. + +from types import GeneratorType + + +class Pipeline(list): + """Pipeline to process filters sequentially""" + + def __call__(self, stream): + """Run the pipeline + + Return a static (non generator) version of the result + """ + + # Run the stream over all the filters on the pipeline + for filter in self: + # Functions and callable objects (objects with '__call__' method) + if callable(filter): + stream = filter(stream) + + # Normal filters (objects with 'process' method) + else: + stream = filter.process(None, stream) + + # If last filter return a generator, staticalize it inside a list + if isinstance(stream, GeneratorType): + return list(stream) + return stream + + +if __name__ == '__main__': + sql = """-- type: script + -- return: integer + + INCLUDE "Direntry.make.sql"; + + INSERT INTO directories(inode) + VALUES(:inode) + LIMIT 1""" + + sql2 = """SELECT child_entry,asdf AS inode, creation + FROM links + WHERE parent_dir == :parent_dir AND name == :name + LIMIT 1""" + + sql3 = """SELECT + 0 AS st_dev, + 0 AS st_uid, + 0 AS st_gid, + + dir_entries.type AS st_mode, + dir_entries.inode AS st_ino, + COUNT(links.child_entry) AS st_nlink, + + :creation AS st_ctime, + dir_entries.access AS st_atime, + dir_entries.modification AS st_mtime, +-- :creation AS st_ctime, +-- CAST(STRFTIME('%s',dir_entries.access) AS INTEGER) AS st_atime, +-- CAST(STRFTIME('%s',dir_entries.modification) AS INTEGER) AS st_mtime, + + COALESCE(files.size,0) AS st_size, -- Python-FUSE + COALESCE(files.size,0) AS size -- PyFilesystem + + FROM dir_entries + LEFT JOIN files + ON dir_entries.inode == files.inode + LEFT JOIN links + ON dir_entries.inode == links.child_entry + + WHERE dir_entries.inode == :inode + + GROUP BY dir_entries.inode + LIMIT 1""" + + from filters import ColumnsSelect + from lexer import tokenize + + def show(args): + for a in args: + print repr(a) + + pipe = Pipeline() + pipe.append(tokenize) + pipe.append(ColumnsSelect()) + + show(pipe(sql3))
\ No newline at end of file |