summaryrefslogtreecommitdiff
path: root/tests/benchmark_serializers.py
blob: 6354026e81254a4c3d931735edc14eefc6e9d3bb (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#!/usr/bin/env python3
"""A manual test to compare performance of different serializers

Latest results:
---------------
CPU Results (x10000 iterations):
jsonpickle.encode:      8.846
jsonpickle.decode:      9.166
pickle.dumps:           0.433
pickle.loads:           0.734
cattrs.unstructure:     1.124
cattrs.structure:       1.048
cattrs+pickle.dumps:    1.219
cattrs+pickle.loads:    1.189
cattrs+json.dumps:      2.005
cattrs+json.loads:      2.312
cattrs+ujson.dumps:     1.803
cattrs+ujson.loads:     2.128
cattrs+bson.dumps: 1.550
cattrs+bson.loads: 1.322
"""
# flake8: noqa: F401
import json
import os
import pickle
import sys
from os.path import abspath, dirname, join
from time import perf_counter as time

import ujson
from cattr.preconf.json import make_converter

from requests_cache.backends.sqlite import SQLiteCache

try:
    from rich import print
except ImportError:
    pass

# import jsonpickle
# from memory_profiler import profile

# Add project path
sys.path.insert(0, os.path.abspath('..'))

from requests_cache import CachedSession
from requests_cache.serializers import CattrStage, bson_serializer, pickle_serializer

ITERATIONS = 10000

# Get an initial cached response
session = CachedSession(SQLiteCache(use_temp=True))
r = session.get('https://httpbin.org/json')
r = session.get('https://httpbin.org/json')


# def run_jsonpickle():
#     run_serialize_deserialize('jsonpickle', jsonpickle.encode, jsonpickle.decode)


def run_pickle():
    run_serialize_deserialize('pickle', pickle)


def run_cattrs():
    run_serialize_deserialize('cattrs', CattrStage())


def run_cattrs_pickle():
    run_serialize_deserialize('cattrs+pickle', pickle_serializer)


# def run_cattrs_json():
#     s = CattrStage(converter_factory=make_converter)
#     run_serialize_deserialize(
#         'cattrs+json',
#         lambda obj: json.dumps(s.unstructure(obj)),
#         lambda obj: s.structure(json.loads(obj)),
#     )


def run_cattrs_ujson():
    s = CattrStage(factory=make_converter)
    run_serialize_deserialize('cattrs+ujson', s)


def run_cattrs_bson():
    run_serialize_deserialize('cattrs+bson', bson_serializer)


def run_serialize_deserialize(module, serializer):
    start = time()
    serialized = [serializer.dumps(r) for i in range(ITERATIONS)]
    print(f'{module}.{serializer.__name__}.loads() x{ITERATIONS}: {time() - start:.3f}')

    start = time()
    deserialized = [serializer.loads(obj) for obj in serialized]
    print(f'{module}.{serializer.__name__}.dumps() x{ITERATIONS}: {time() - start:.3f}')


if __name__ == '__main__':
    print('CPU:')
    # run_jsonpickle()
    run_pickle()
    run_cattrs()
    run_cattrs_pickle()
    # run_cattrs_json()
    run_cattrs_ujson()
    run_cattrs_bson()

    # Memory
    # print('\nMemory:')
    # profile(run_jsonpickle)()
    # profile(run_pickle)()
    # profile(run_cattrs)()
    # profile(run_cattrs_pickle)()