blob: aeba65e19db3472754cec66cbcf58e4c77d9bee8 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
|
"""environment.py
Establish data / cache file paths, and configurations,
bootstrap fixture data if necessary.
"""
from . import caching_query
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from dogpile.cache.region import make_region
import os
from hashlib import md5
import sys
py2k = sys.version_info < (3, 0)
if py2k:
input = raw_input
# dogpile cache regions. A home base for cache configurations.
regions = {}
# scoped_session. Apply our custom CachingQuery class to it,
# using a callable that will associate the dictionary
# of regions with the Query.
Session = scoped_session(
sessionmaker(
query_cls=caching_query.query_callable(regions)
)
)
# global declarative base class.
Base = declarative_base()
root = "./dogpile_data/"
if not os.path.exists(root):
input("Will create datafiles in %r.\n"
"To reset the cache + database, delete this directory.\n"
"Press enter to continue.\n" % root
)
os.makedirs(root)
dbfile = os.path.join(root, "dogpile_demo.db")
engine = create_engine('sqlite:///%s' % dbfile, echo=True)
Session.configure(bind=engine)
def md5_key_mangler(key):
"""Receive cache keys as long concatenated strings;
distill them into an md5 hash.
"""
return md5(key.encode('ascii')).hexdigest()
# configure the "default" cache region.
regions['default'] = make_region(
# the "dbm" backend needs
# string-encoded keys
key_mangler=md5_key_mangler
).configure(
# using type 'file' to illustrate
# serialized persistence. Normally
# memcached or similar is a better choice
# for caching.
'dogpile.cache.dbm',
expiration_time=3600,
arguments={
"filename": os.path.join(root, "cache.dbm")
}
)
# optional; call invalidate() on the region
# once created so that all data is fresh when
# the app is restarted. Good for development,
# on a production system needs to be used carefully
# regions['default'].invalidate()
installed = False
def bootstrap():
global installed
from . import fixture_data
if not os.path.exists(dbfile):
fixture_data.install()
installed = True
|