summaryrefslogtreecommitdiff
path: root/turbo_hipster/cmd/analyse_historical.py
blob: a7ce33aecd676f31fd446c4792b6d19f114f1346 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
#!/usr/bin/python2
#
# Copyright 2013 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.


import argparse
import datetime
import json
import logging
import MySQLdb
import os
import re
import sys
import yaml

import swiftclient

from turbo_hipster.task_plugins.real_db_upgrade import handle_results


def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-c', '--config',
                        default='/etc/turbo-hipster/config.yaml',
                        help='Path to yaml config file.')
    args = parser.parse_args()

    with open(args.config, 'r') as config_stream:
        config = yaml.safe_load(config_stream)
    swift_config = config['publish_logs']

    log = logging.getLogger(__name__)
    if not os.path.isdir(os.path.dirname(config['debug_log'])):
        os.makedirs(os.path.dirname(config['debug_log']))
    logging.basicConfig(format='%(asctime)s %(name)s %(message)s',
                        filename=config['debug_log'], level=logging.INFO)

    # Open a connection to swift
    connection = swiftclient.client.Connection(
        authurl=swift_config['authurl'],
        user=swift_config['user'],
        key=swift_config['password'],
        os_options={'region_name': swift_config['region']},
        tenant_name=swift_config['tenant'],
        auth_version=2.0)
    log.info('Got connection to swift')

    # Open the results database
    db = MySQLdb.connect(host=config['results']['host'],
                         port=config['results'].get('port', 3306),
                         user=config['results']['username'],
                         passwd=config['results']['password'],
                         db=config['results']['database'])
    cursor = db.cursor(MySQLdb.cursors.DictCursor)

    # Iterate through the logs and determine timing information. This probably
    # should be done in a "more cloudy" way, but this is good enough for now.
    total_items = 0
    items = connection.get_container(swift_config['container'], limit=1000)[1]
    while items:
        total_items += len(items)
        print ('%s Processing %d items, %d items total'
               % (datetime.datetime.now(), len(items), total_items))

        for item in items:
            log.info('Processing %s' % item['name'])
            cursor.execute('select count(*) from summary where path="%s";'
                           % item['name'])
            if cursor.rowcount == 0:
                for engine, dataset, migration in process(
                        connection, swift_config['container'], item['name']):
                    if 'duration' not in migration:
                        continue

                    if migration['stats']:
                        cursor.execute('insert ignore into summary'
                                       '(path, parsed_at, engine, dataset, '
                                       'migration, duration, stats_json) '
                                       'values(%s, now(), %s, '
                                       '%s, %s, %s, %s);',
                                       (item['name'], engine, dataset,
                                        '%s->%s' % (migration['from'],
                                                    migration['to']),
                                        migration['duration'],
                                        json.dumps(migration['stats'])))
                    else:
                        cursor.execute('insert ignore into summary'
                                       '(path, parsed_at, engine, dataset, '
                                       'migration, duration, stats_json) '
                                       'values(%s, now(), %s, '
                                       '%s, %s, %s, NULL);',
                                       (item['name'], engine, dataset,
                                        '%s->%s' % (migration['from'],
                                                    migration['to']),
                                        migration['duration']))

                cursor.execute('commit;')

        items = connection.get_container(swift_config['container'],
                                         marker=item['name'], limit=1000)[1]

TEST_NAME1_RE = re.compile('.*/real-db-upgrade_nova_([^_]+)_([^/]*)/.*')
TEST_NAME2_RE = re.compile('.*/real-db-upgrade_nova_([^_]+)/.*/(.*).log')


def process(connection, container, name):
    log = logging.getLogger(__name__)
    engine_name = None
    test_name = None

    m = TEST_NAME1_RE.match(name)
    if m:
        engine_name = m.group(1)
        test_name = m.group(2)
    else:
        m = TEST_NAME2_RE.match(name)
        if m:
            engine_name = m.group(1)
            test_name = m.group(2)

    if not engine_name or not test_name:
        log.warn('Log name %s does not match regexp' % name)
        return

    content = connection.get_object(container, name)[1]
    with open('/tmp/logcontent', 'w') as f:
        f.write(content)

    lp = handle_results.LogParser('/tmp/logcontent', None)
    lp.process_log()
    if not lp.migrations:
        log.warn('Log %s contained no migrations' % name)

    for migration in lp.migrations:
        if 'start' not in migration:
            continue
        if 'end' not in migration:
            continue
        yield (engine_name, test_name, migration)


if __name__ == '__main__':
    sys.path.insert(0, os.path.abspath(
                    os.path.join(os.path.dirname(__file__), '../')))
    main()