summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorJordan Cook <jordan.cook@pioneer.com>2021-05-26 21:25:41 -0500
committerJordan Cook <jordan.cook@pioneer.com>2021-06-11 11:13:37 -0500
commit3b638c9e8956bcd4900b62fcabafd0ce528069b6 (patch)
tree00c4ac152da3eecfda189197c4c333f5a3644cb5 /examples
parentd1d5b00b607ee41cb44b2ec0ab683bc2e8f3e81e (diff)
downloadrequests-cache-3b638c9e8956bcd4900b62fcabafd0ce528069b6.tar.gz
Refactor JSONSerializer using cattrs; add support for ultrajson
Diffstat (limited to 'examples')
-rw-r--r--examples/performance_test.py101
1 files changed, 64 insertions, 37 deletions
diff --git a/examples/performance_test.py b/examples/performance_test.py
index b3dc1ef..4d092b1 100644
--- a/examples/performance_test.py
+++ b/examples/performance_test.py
@@ -1,81 +1,108 @@
"""A manual test to compare performance of different serializers"""
# flake8: noqa: F401
"""
-CPU Results:
-jsonpickle.encode x10000: 5.673
-jsonpickle.decode x10000: 5.448
-pickle.dumps x10000: 0.256
-pickle.loads x10000: 0.260
-cattrs.unstructure x10000: 0.002
-cattrs.structure x10000: 0.002
-cattrs + pickle.dumps x10000: 0.251
-cattrs + pickle.loads x10000: 0.253
+CPU Results (x10000 iterations):
+jsonpickle.encode: 8.846
+jsonpickle.decode: 9.166
+pickle.dumps: 0.433
+pickle.loads: 0.734
+cattrs.unstructure: 1.124
+cattrs.structure: 1.048
+cattrs+pickle.dumps: 1.219
+cattrs+pickle.loads: 1.189
+cattrs+json.dumps: 2.005
+cattrs+json.loads: 2.312
+cattrs+ujson.dumps: 1.803
+cattrs+ujson.loads: 2.128
"""
+import json
+import os
import pickle
+import sys
+from os.path import abspath, dirname, join
from time import perf_counter as time
import jsonpickle
+import ujson
+from cattr.preconf.json import make_converter
from memory_profiler import profile
from rich import print
+# Add project path
+sys.path.insert(0, os.path.abspath('..'))
+
from requests_cache import CachedSession
-from requests_cache.serializers import PickleSerializer
+from requests_cache.serializers import BaseSerializer, JSONSerializer, PickleSerializer
ITERATIONS = 10000
session = CachedSession()
-session.cache.clear()
+r = session.get('https://httpbin.org/get?x=y')
r = session.get('https://httpbin.org/get?x=y')
def test_jsonpickle():
- start = time()
- serialized = [jsonpickle.encode(r, use_base85=True) for i in range(ITERATIONS)]
- print(f'jsonpickle.encode x{ITERATIONS}: {time() - start:.3f}')
-
- start = time()
- deserialized = [jsonpickle.decode(obj) for obj in serialized]
- print(f'jsonpickle.decode x{ITERATIONS}: {time() - start:.3f}')
+ base_test('jsonpickle', jsonpickle.encode, jsonpickle.decode)
def test_pickle():
- start = time()
- serialized = [pickle.dumps(r) for i in range(ITERATIONS)]
- print(f'pickle.dumps x{ITERATIONS}: {time() - start:.3f}')
-
- start = time()
- serialized = [pickle.dumps(r) for i in range(ITERATIONS)]
- print(f'pickle.loads x{ITERATIONS}: {time() - start:.3f}')
+ base_test('pickle', pickle.dumps, pickle.loads)
def test_cattrs():
s = PickleSerializer()
- start = time()
- serialized = [s.unstructure(r) for i in range(ITERATIONS)]
- print(f'cattrs.unstructure x{ITERATIONS}: {time() - start:.3f}')
-
- start = time()
- deserialized = [s.structure(obj) for obj in serialized]
- print(f'cattrs.structure x{ITERATIONS}: {time() - start:.3f}')
+ base_test('cattrs', s.unstructure, s.structure)
def test_cattrs_pickle():
s = PickleSerializer()
+ base_test('PickleSerializer', s.dumps, s.loads)
+
+
+def test_cattrs_json():
+ s = BaseSerializer(converter_factory=make_converter)
+ base_test(
+ 'json',
+ lambda obj: json.dumps(s.unstructure(obj)),
+ lambda obj: s.structure(json.loads(obj)),
+ )
+
+
+def test_cattrs_ujson():
+ s = BaseSerializer(converter_factory=make_converter)
+ base_test(
+ 'ujson',
+ lambda obj: ujson.dumps(s.unstructure(obj)),
+ lambda obj: s.structure(ujson.loads(obj)),
+ )
+
+
+def base_test(module, serialize, deserialize):
start = time()
- serialized = [s.dumps(r) for i in range(ITERATIONS)]
- print(f'cattrs + pickle.dumps x{ITERATIONS}: {time() - start:.3f}')
+ serialized = [serialize(r) for i in range(ITERATIONS)]
+ print(f'{module}.{serialize.__name__} x{ITERATIONS}: {time() - start:.3f}')
start = time()
- deserialized = [s.loads(obj) for obj in serialized]
- print(f'cattrs + pickle.loads x{ITERATIONS}: {time() - start:.3f}')
+ deserialized = [deserialize(obj) for obj in serialized]
+ print(f'{module}.{deserialize.__name__} x{ITERATIONS}: {time() - start:.3f}')
+
+
+def dumps(self, response: CachedResponse) -> bytes:
+ return json.dumps(super().unstructure(response), indent=2) # , cls=ResponseJSONEncoder)
+
+
+def loads(self, obj: bytes) -> CachedResponse:
+ return super().structure(json.loads(obj))
if __name__ == '__main__':
print('CPU:')
- test_jsonpickle()
+ # test_jsonpickle()
test_pickle()
test_cattrs()
test_cattrs_pickle()
+ test_cattrs_json()
+ test_cattrs_ujson()
# Memory
# print('\nMemory:')