diff options
author | Mike Bayer <mike_mp@zzzcomputing.com> | 2008-11-06 23:07:47 +0000 |
---|---|---|
committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2008-11-06 23:07:47 +0000 |
commit | c3352e5542001d1a5614af260d74ad8757a59f26 (patch) | |
tree | 109a2af43b80ad3b244a3d1d8a395435d37eff3d /lib/sqlalchemy/ext/serializer.py | |
parent | 84003a8d402c5d7539cf2d53f4061cde62d04413 (diff) | |
download | sqlalchemy-c3352e5542001d1a5614af260d74ad8757a59f26.tar.gz |
- Fixed bug in Query involving order_by() in conjunction with
multiple aliases of the same class (will add tests in
[ticket:1218])
- Added a new extension sqlalchemy.ext.serializer. Provides
Serializer/Deserializer "classes" which mirror Pickle/Unpickle,
as well as dumps() and loads(). This serializer implements
an "external object" pickler which keeps key context-sensitive
objects, including engines, sessions, metadata, Tables/Columns,
and mappers, outside of the pickle stream, and can later
restore the pickle using any engine/metadata/session provider.
This is used not for pickling regular object instances, which are
pickleable without any special logic, but for pickling expression
objects and full Query objects, such that all mapper/engine/session
dependencies can be restored at unpickle time.
Diffstat (limited to 'lib/sqlalchemy/ext/serializer.py')
-rw-r--r-- | lib/sqlalchemy/ext/serializer.py | 129 |
1 files changed, 129 insertions, 0 deletions
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py new file mode 100644 index 000000000..b62ee0ce6 --- /dev/null +++ b/lib/sqlalchemy/ext/serializer.py @@ -0,0 +1,129 @@ +"""Serializer/Deserializer objects for usage with SQLAlchemy structures. + +Any SQLAlchemy structure, including Tables, Columns, expressions, mappers, +Query objects etc. can be serialized in a minimally-sized format, +and deserialized when given a Metadata and optional ScopedSession object +to use as context on the way out. + +Usage is nearly the same as that of the standard Python pickle module:: + + from sqlalchemy.ext.serializer import loads, dumps + metadata = MetaData(bind=some_engine) + Session = scoped_session(sessionmaker()) + + # ... define mappers + + query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey) + + # pickle the query + serialized = dumps(query) + + # unpickle. Pass in metadata + scoped_session + query2 = loads(serialized, metadata, Session) + + print query2.all() + +Similar restrictions as when using raw pickle apply; mapped classes must be +themselves be pickleable, meaning they are importable from a module-level +namespace. + +Note that instances of user-defined classes do not require this extension +in order to be pickled; these contain no references to engines, sessions +or expression constructs in the typical case and can be serialized directly. +This module is specifically for ORM and expression constructs. + +""" + +from sqlalchemy.orm import class_mapper, Query +from sqlalchemy.orm.session import Session +from sqlalchemy.orm.mapper import Mapper +from sqlalchemy.orm.attributes import QueryableAttribute +from sqlalchemy import Table, Column +from sqlalchemy.engine import Engine +from sqlalchemy.util import pickle +import re +import base64 +from cStringIO import StringIO + +__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads'] + +def Serializer(*args, **kw): + pickler = pickle.Pickler(*args, **kw) + + def persistent_id(obj): + #print "serializing:", repr(obj) + if isinstance(obj, QueryableAttribute): + cls = obj.impl.class_ + key = obj.impl.key + id = "attribute:" + key + ":" + base64.b64encode(pickle.dumps(cls)) + elif isinstance(obj, Mapper) and not obj.non_primary: + id = "mapper:" + base64.b64encode(pickle.dumps(obj.class_)) + elif isinstance(obj, Table): + id = "table:" + str(obj) + elif isinstance(obj, Column) and isinstance(obj.table, Table): + id = "column:" + str(obj.table) + ":" + obj.key + elif isinstance(obj, Session): + id = "session:" + elif isinstance(obj, Engine): + id = "engine:" + else: + return None + return id + + pickler.persistent_id = persistent_id + return pickler + +our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)') + +def Deserializer(file, metadata=None, scoped_session=None, engine=None): + unpickler = pickle.Unpickler(file) + + def get_engine(): + if engine: + return engine + elif scoped_session and scoped_session().bind: + return scoped_session().bind + elif metadata and metadata.bind: + return metadata.bind + else: + return None + + def persistent_load(id): + m = our_ids.match(id) + if not m: + return None + else: + type_, args = m.group(1, 2) + if type_ == 'attribute': + key, clsarg = args.split(":") + cls = pickle.loads(base64.b64decode(clsarg)) + return getattr(cls, key) + elif type_ == "mapper": + cls = pickle.loads(base64.b64decode(args)) + return class_mapper(cls) + elif type_ == "table": + return metadata.tables[args] + elif type_ == "column": + table, colname = args.split(':') + return metadata.tables[table].c[colname] + elif type_ == "session": + return scoped_session() + elif type_ == "engine": + return get_engine() + else: + raise Exception("Unknown token: %s" % type_) + unpickler.persistent_load = persistent_load + return unpickler + +def dumps(obj): + buf = StringIO() + pickler = Serializer(buf) + pickler.dump(obj) + return buf.getvalue() + +def loads(data, metadata=None, scoped_session=None, engine=None): + buf = StringIO(data) + unpickler = Deserializer(buf, metadata, scoped_session, engine) + return unpickler.load() + +
\ No newline at end of file |