summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--devstack/plugin.sh14
-rw-r--r--requirements.txt1
-rw-r--r--test-requirements.txt2
-rw-r--r--trove/common/api.py8
-rw-r--r--trove/common/apischema.py15
-rw-r--r--trove/common/cfg.py48
-rw-r--r--trove/common/exception.py5
-rw-r--r--trove/common/instance.py3
-rw-r--r--trove/common/stream_codecs.py13
-rw-r--r--trove/common/utils.py2
-rw-r--r--trove/guestagent/api.py14
-rw-r--r--trove/guestagent/common/operating_system.py45
-rw-r--r--trove/guestagent/common/sql_query.py2
-rw-r--r--trove/guestagent/datastore/experimental/postgresql/manager.py37
-rw-r--r--trove/guestagent/datastore/experimental/postgresql/service/process.py3
-rw-r--r--trove/guestagent/datastore/manager.py296
-rw-r--r--trove/guestagent/datastore/mysql_common/manager.py54
-rw-r--r--trove/guestagent/guest_log.py406
-rw-r--r--trove/instance/models.py2
-rw-r--r--trove/instance/service.py31
-rw-r--r--trove/instance/tasks.py1
-rw-r--r--trove/instance/views.py27
-rwxr-xr-xtrove/taskmanager/models.py21
-rw-r--r--trove/tests/config.py1
-rw-r--r--trove/tests/int_tests.py53
-rw-r--r--trove/tests/scenario/groups/guest_log_group.py246
-rw-r--r--trove/tests/scenario/helpers/mysql_helper.py6
-rw-r--r--trove/tests/scenario/helpers/postgresql_helper.py19
-rw-r--r--trove/tests/scenario/helpers/test_helper.py90
-rw-r--r--trove/tests/scenario/runners/backup_runners.py21
-rw-r--r--trove/tests/scenario/runners/guest_log_runners.py674
-rw-r--r--trove/tests/scenario/runners/instance_create_runners.py11
-rw-r--r--trove/tests/scenario/runners/test_runners.py51
-rw-r--r--trove/tests/unittests/guestagent/test_manager.py314
-rw-r--r--trove/tests/unittests/guestagent/test_operating_system.py10
35 files changed, 2467 insertions, 79 deletions
diff --git a/devstack/plugin.sh b/devstack/plugin.sh
index d4bcd203..d9ea6190 100644
--- a/devstack/plugin.sh
+++ b/devstack/plugin.sh
@@ -172,6 +172,20 @@ function init_trove {
# Initialize the trove database
$TROVE_MANAGE db_sync
+ # Add an admin user to the 'tempest' alt_demo tenant.
+ # This is needed to test the guest_log functionality.
+ # The first part mimics the tempest setup, so make sure we have that.
+ ALT_USERNAME=${ALT_USERNAME:-alt_demo}
+ ALT_TENANT_NAME=${ALT_TENANT_NAME:-alt_demo}
+ get_or_create_project ${ALT_TENANT_NAME} default
+ get_or_create_user ${ALT_USERNAME} "$ADMIN_PASSWORD" "default" "alt_demo@example.com"
+ get_or_add_user_project_role Member ${ALT_USERNAME} ${ALT_TENANT_NAME}
+
+ # The second part adds an admin user to the tenant.
+ ADMIN_ALT_USERNAME=${ADMIN_ALT_USERNAME:-admin_${ALT_USERNAME}}
+ get_or_create_user ${ADMIN_ALT_USERNAME} "$ADMIN_PASSWORD" "default" "admin_alt_demo@example.com"
+ get_or_add_user_project_role admin ${ADMIN_ALT_USERNAME} ${ALT_TENANT_NAME}
+
# If no guest image is specified, skip remaining setup
[ -z "$TROVE_GUEST_IMAGE_URL" ] && return 0
diff --git a/requirements.txt b/requirements.txt
index 60d7a996..b56c1f32 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -43,3 +43,4 @@ oslo.messaging!=2.8.0,!=3.1.0,>2.6.1 # Apache-2.0
osprofiler>=0.4.0 # Apache-2.0
oslo.log>=1.14.0 # Apache-2.0
oslo.db>=4.1.0 # Apache-2.0
+enum34;python_version=='2.7' or python_version=='2.6' or python_version=='3.3' # BSD
diff --git a/test-requirements.txt b/test-requirements.txt
index b0b2c2d6..ccb73848 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -6,7 +6,6 @@ hacking<0.11,>=0.10.0
sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2 # BSD
oslosphinx!=3.4.0,>=2.5.0 # Apache-2.0
reno>=0.1.1 # Apache2
-
coverage>=3.6 # Apache-2.0
nose # LGPL
nosexcover # BSD
@@ -16,7 +15,6 @@ WebTest>=2.0 # MIT
wsgi-intercept>=0.6.1 # MIT License
proboscis>=1.2.5.3 # Apache-2.0
http://tarballs.openstack.org/python-troveclient/python-troveclient-master.tar.gz#egg=python-troveclient
-
mock>=1.2 # BSD
mox3>=0.7.0 # Apache-2.0
testtools>=1.4.0 # MIT
diff --git a/trove/common/api.py b/trove/common/api.py
index 883df0fe..88053468 100644
--- a/trove/common/api.py
+++ b/trove/common/api.py
@@ -106,6 +106,14 @@ class API(wsgi.Router):
controller=instance_resource,
action="configuration",
conditions={'method': ['GET']})
+ mapper.connect("/{tenant_id}/instances/{id}/log",
+ controller=instance_resource,
+ action="guest_log_list",
+ conditions={'method': ['GET']})
+ mapper.connect("/{tenant_id}/instances/{id}/log",
+ controller=instance_resource,
+ action="guest_log_action",
+ conditions={'method': ['POST']})
def _cluster_router(self, mapper):
cluster_resource = ClusterController().create_resource()
diff --git a/trove/common/apischema.py b/trove/common/apischema.py
index 6863ced8..a07e7fa0 100644
--- a/trove/common/apischema.py
+++ b/trove/common/apischema.py
@@ -514,6 +514,21 @@ backup = {
}
}
+guest_log = {
+ "action": {
+ "name": "guest_log:action",
+ "type": "object",
+ "required": ["name"],
+ "properties": {
+ "name": non_empty_string,
+ "enable": boolean_string,
+ "disable": boolean_string,
+ "publish": boolean_string,
+ "discard": boolean_string
+ }
+ }
+}
+
configuration = {
"create": {
"name": "configuration:create",
diff --git a/trove/common/cfg.py b/trove/common/cfg.py
index 29d3a80d..75241d1a 100644
--- a/trove/common/cfg.py
+++ b/trove/common/cfg.py
@@ -396,6 +396,13 @@ common_opts = [
cfg.IntOpt('timeout_wait_for_service', default=120,
help='Maximum time (in seconds) to wait for a service to '
'become alive.'),
+ cfg.StrOpt('guest_log_container_name',
+ default='database_logs',
+ help='Name of container that stores guest log components.'),
+ cfg.IntOpt('guest_log_limit', default=1000000,
+ help='Maximum size of a chunk saved in guest log container.'),
+ cfg.IntOpt('guest_log_expiry', default=2592000,
+ help='Expiry (in seconds) of objects in guest log container.'),
]
# Profiling specific option groups
@@ -539,6 +546,11 @@ mysql_opts = [
help='Databases to exclude when listing databases.',
deprecated_name='ignore_dbs',
deprecated_group='DEFAULT'),
+ cfg.StrOpt('guest_log_exposed_logs', default='general,slow_query',
+ help='List of Guest Logs to expose for publishing.'),
+ cfg.IntOpt('guest_log_long_query_time', default=1000,
+ help='The time in milliseconds that a statement must take in '
+ 'in order to be logged in the slow_query log.'),
]
# Percona
@@ -612,6 +624,11 @@ percona_opts = [
help='Databases to exclude when listing databases.',
deprecated_name='ignore_dbs',
deprecated_group='DEFAULT'),
+ cfg.StrOpt('guest_log_exposed_logs', default='general,slow_query',
+ help='List of Guest Logs to expose for publishing.'),
+ cfg.IntOpt('guest_log_long_query_time', default=1000,
+ help='The time in milliseconds that a statement must take in '
+ 'in order to be logged in the slow_query log.'),
]
# Percona XtraDB Cluster
@@ -689,6 +706,11 @@ pxc_opts = [
cfg.StrOpt('root_controller',
default='trove.extensions.common.service.DefaultRootController',
help='Root controller implementation for pxc.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='general,slow_query',
+ help='List of Guest Logs to expose for publishing.'),
+ cfg.IntOpt('guest_log_long_query_time', default=1000,
+ help='The time in milliseconds that a statement must take in '
+ 'in order to be logged in the slow_query log.'),
]
# Redis
@@ -758,6 +780,8 @@ redis_opts = [
cfg.StrOpt('root_controller',
default='trove.extensions.common.service.DefaultRootController',
help='Root controller implementation for redis.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='',
+ help='List of Guest Logs to expose for publishing.'),
]
# Cassandra
@@ -803,6 +827,8 @@ cassandra_opts = [
cfg.StrOpt('root_controller',
default='trove.extensions.common.service.DefaultRootController',
help='Root controller implementation for cassandra.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='',
+ help='List of Guest Logs to expose for publishing.'),
]
# Couchbase
@@ -859,6 +885,8 @@ couchbase_opts = [
cfg.StrOpt('root_controller',
default='trove.extensions.common.service.DefaultRootController',
help='Root controller implementation for couchbase.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='',
+ help='List of Guest Logs to expose for publishing.'),
]
# MongoDB
@@ -940,6 +968,8 @@ mongodb_opts = [
cfg.StrOpt('root_controller',
default='trove.extensions.common.service.DefaultRootController',
help='Root controller implementation for mongodb.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='',
+ help='List of Guest Logs to expose for publishing.'),
]
# PostgreSQL
@@ -987,6 +1017,13 @@ postgresql_opts = [
cfg.StrOpt('root_controller',
default='trove.extensions.common.service.DefaultRootController',
help='Root controller implementation for postgresql.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='general',
+ help='List of Guest Logs to expose for publishing.'),
+ cfg.IntOpt('guest_log_long_query_time', default=0,
+ help="The time in milliseconds that a statement must take in "
+ "in order to be logged in the 'general' log. A value of "
+ "'0' logs all statements, while '-1' turns off "
+ "statement logging."),
]
# Apache CouchDB
@@ -1030,6 +1067,8 @@ couchdb_opts = [
cfg.StrOpt('root_controller',
default='trove.extensions.common.service.DefaultRootController',
help='Root controller implementation for couchdb.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='',
+ help='List of Guest Logs to expose for publishing.'),
]
# Vertica
@@ -1090,6 +1129,8 @@ vertica_opts = [
default='trove.extensions.vertica.service.'
'VerticaRootController',
help='Root controller implementation for Vertica.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='',
+ help='List of Guest Logs to expose for publishing.'),
]
# DB2
@@ -1134,6 +1175,8 @@ db2_opts = [
cfg.StrOpt('root_controller',
default='trove.extensions.common.service.DefaultRootController',
help='Root controller implementation for db2.'),
+ cfg.StrOpt('guest_log_exposed_logs', default='',
+ help='List of Guest Logs to expose for publishing.'),
]
# MariaDB
@@ -1203,6 +1246,11 @@ mariadb_opts = [
help='Databases to exclude when listing databases.',
deprecated_name='ignore_dbs',
deprecated_group='DEFAULT'),
+ cfg.StrOpt('guest_log_exposed_logs', default='general,slow_query',
+ help='List of Guest Logs to expose for publishing.'),
+ cfg.IntOpt('guest_log_long_query_time', default=1000,
+ help='The time in milliseconds that a statement must take in '
+ 'in order to be logged in the slow_query log.'),
]
# RPC version groups
diff --git a/trove/common/exception.py b/trove/common/exception.py
index b2770df6..1787ed7e 100644
--- a/trove/common/exception.py
+++ b/trove/common/exception.py
@@ -230,6 +230,11 @@ class UnprocessableEntity(TroveError):
message = _("Unable to process the contained request.")
+class UnauthorizedRequest(TroveError):
+
+ message = _("Unauthorized request.")
+
+
class CannotResizeToSameSize(TroveError):
message = _("No change was requested in the size of the instance.")
diff --git a/trove/common/instance.py b/trove/common/instance.py
index e13a2dab..6e636e90 100644
--- a/trove/common/instance.py
+++ b/trove/common/instance.py
@@ -93,12 +93,15 @@ class ServiceStatuses(object):
BUILDING = ServiceStatus(0x09, 'building', 'BUILD')
PROMOTING = ServiceStatus(0x10, 'promoting replica', 'PROMOTE')
EJECTING = ServiceStatus(0x11, 'ejecting replica source', 'EJECT')
+ LOGGING = ServiceStatus(0x12, 'transferring guest logs', 'LOGGING')
UNKNOWN = ServiceStatus(0x16, 'unknown', 'ERROR')
NEW = ServiceStatus(0x17, 'new', 'NEW')
DELETED = ServiceStatus(0x05, 'deleted', 'DELETED')
FAILED_TIMEOUT_GUESTAGENT = ServiceStatus(0x18, 'guestagent error',
'ERROR')
INSTANCE_READY = ServiceStatus(0x19, 'instance ready', 'BUILD')
+ RESTART_REQUIRED = ServiceStatus(0x20, 'restart required',
+ 'RESTART_REQUIRED')
# Dissuade further additions at run-time.
ServiceStatus.__init__ = None
diff --git a/trove/common/stream_codecs.py b/trove/common/stream_codecs.py
index 49c86d3f..279f4ede 100644
--- a/trove/common/stream_codecs.py
+++ b/trove/common/stream_codecs.py
@@ -19,7 +19,6 @@ import csv
import json
import re
import six
-import StringIO
import yaml
from ConfigParser import SafeConfigParser
@@ -198,7 +197,7 @@ class IniCodec(StreamCodec):
def serialize(self, dict_data):
parser = self._init_config_parser(dict_data)
- output = StringIO.StringIO()
+ output = six.StringIO()
parser.write(output)
return output.getvalue()
@@ -212,8 +211,8 @@ class IniCodec(StreamCodec):
for s in parser.sections()}
def _pre_parse(self, stream):
- buf = StringIO.StringIO()
- for line in StringIO.StringIO(stream):
+ buf = six.StringIO()
+ for line in six.StringIO(stream):
# Ignore commented lines.
if not line.startswith(self._comment_markers):
# Strip leading and trailing whitespaces from each line.
@@ -285,7 +284,7 @@ class PropertiesCodec(StreamCodec):
self._unpack_singletons = unpack_singletons
def serialize(self, dict_data):
- output = StringIO.StringIO()
+ output = six.StringIO()
writer = csv.writer(output, delimiter=self._delimiter,
quoting=self.QUOTING_MODE,
strict=self.STRICT_MODE,
@@ -297,7 +296,7 @@ class PropertiesCodec(StreamCodec):
return output.getvalue()
def deserialize(self, stream):
- reader = csv.reader(StringIO.StringIO(stream),
+ reader = csv.reader(six.StringIO(stream),
delimiter=self._delimiter,
quoting=self.QUOTING_MODE,
strict=self.STRICT_MODE,
@@ -373,4 +372,4 @@ class JsonCodec(StreamCodec):
return json.dumps(dict_data)
def deserialize(self, stream):
- return json.load(StringIO.StringIO(stream))
+ return json.load(six.StringIO(stream))
diff --git a/trove/common/utils.py b/trove/common/utils.py
index 52af8156..f86b306d 100644
--- a/trove/common/utils.py
+++ b/trove/common/utils.py
@@ -200,7 +200,7 @@ def build_polling_task(retriever, condition=lambda value: value,
raise exception.PollTimeOut
return loopingcall.FixedIntervalLoopingCall(
- f=poll_and_check).start(sleep_time, True)
+ f=poll_and_check).start(sleep_time, initial_delay=False)
def poll_until(retriever, condition=lambda value: value,
diff --git a/trove/guestagent/api.py b/trove/guestagent/api.py
index ba752b32..f7403a4f 100644
--- a/trove/guestagent/api.py
+++ b/trove/guestagent/api.py
@@ -427,3 +427,17 @@ class API(object):
LOG.debug("Demoting instance %s to non-master.", self.id)
self._call("demote_replication_master", AGENT_HIGH_TIMEOUT,
self.version_cap)
+
+ def guest_log_list(self):
+ LOG.debug("Retrieving guest log list for %s.", self.id)
+ result = self._call("guest_log_list", AGENT_HIGH_TIMEOUT,
+ self.version_cap)
+ LOG.debug("guest_log_list 1 returns %s", result)
+ return result
+
+ def guest_log_action(self, log_name, enable, disable, publish, discard):
+ LOG.debug("Processing guest log '%s' for %s.", log_name, self.id)
+ return self._call("guest_log_action", AGENT_HIGH_TIMEOUT,
+ self.version_cap, log_name=log_name,
+ enable=enable, disable=disable,
+ publish=publish, discard=discard)
diff --git a/trove/guestagent/common/operating_system.py b/trove/guestagent/common/operating_system.py
index 2174c6a9..19b8dfca 100644
--- a/trove/guestagent/common/operating_system.py
+++ b/trove/guestagent/common/operating_system.py
@@ -75,16 +75,21 @@ def exists(path, is_directory=False, as_root=False):
:param as_root: Execute as root.
:type as_root: boolean
"""
- if as_root:
+
+ found = (not is_directory and os.path.isfile(path) or
+ (is_directory and os.path.isdir(path)))
+
+ # Only check as root if we can't see it as the regular user, since
+ # this is more expensive
+ if not found and as_root:
test_flag = '-d' if is_directory else '-f'
cmd = 'test %s %s && echo 1 || echo 0' % (test_flag, path)
stdout, _ = utils.execute_with_timeout(
cmd, shell=True, check_exit_code=False,
run_as_root=True, root_helper='sudo')
- return bool(int(stdout))
+ found = bool(int(stdout))
- return (not is_directory and os.path.isfile(path) or
- (is_directory and os.path.isdir(path)))
+ return found
def _read_file_as_root(path, codec):
@@ -182,10 +187,14 @@ class FileMode(object):
"""
@classmethod
- def SET_FULL(cls):
+ def SET_ALL_RWX(cls):
return cls(reset=[stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO]) # =0777
@classmethod
+ def SET_FULL(cls):
+ return cls.SET_ALL_RWX()
+
+ @classmethod
def SET_GRP_RW_OTH_R(cls):
return cls(reset=[stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH]) # =0064
@@ -198,13 +207,37 @@ class FileMode(object):
return cls(reset=[stat.S_IRUSR | stat.S_IWUSR]) # =0600
@classmethod
- def ADD_READ_ALL(cls):
+ def ADD_ALL_R(cls):
return cls(add=[stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH]) # +0444
@classmethod
+ def ADD_READ_ALL(cls):
+ return cls.ADD_ALL_R()
+
+ @classmethod
+ def ADD_USR_RW_GRP_RW(cls):
+ return cls(add=[stat.S_IRUSR | stat.S_IWUSR |
+ stat.S_IRGRP | stat.S_IWGRP]) # +0660
+
+ @classmethod
+ def ADD_USR_RW_GRP_RW_OTH_R(cls):
+ return cls(add=[stat.S_IRUSR | stat.S_IWUSR |
+ stat.S_IRGRP | stat.S_IWGRP |
+ stat.S_IROTH]) # +0664
+
+ @classmethod
def ADD_GRP_RW(cls):
return cls(add=[stat.S_IRGRP | stat.S_IWGRP]) # +0060
+ @classmethod
+ def ADD_GRP_RX(cls):
+ return cls(add=[stat.S_IRGRP | stat.S_IXGRP]) # +0050
+
+ @classmethod
+ def ADD_GRP_RX_OTH_RX(cls):
+ return cls(add=[stat.S_IRGRP | stat.S_IXGRP |
+ stat.S_IROTH | stat.S_IXOTH]) # +0055
+
def __init__(self, reset=None, add=None, remove=None):
self._reset = list(reset) if reset is not None else []
self._add = list(add) if add is not None else []
diff --git a/trove/guestagent/common/sql_query.py b/trove/guestagent/common/sql_query.py
index 25fdd4d1..4ac55cb4 100644
--- a/trove/guestagent/common/sql_query.py
+++ b/trove/guestagent/common/sql_query.py
@@ -439,6 +439,8 @@ class SetServerVariable(object):
return "SET GLOBAL %s=%s" % (self.key, 0)
elif self.value is None:
return "SET GLOBAL %s" % (self.key)
+ elif isinstance(self.value, str):
+ return "SET GLOBAL %s='%s'" % (self.key, self.value)
else:
return "SET GLOBAL %s=%s" % (self.key, self.value)
diff --git a/trove/guestagent/datastore/experimental/postgresql/manager.py b/trove/guestagent/datastore/experimental/postgresql/manager.py
index bbd19086..ad13f5e6 100644
--- a/trove/guestagent/datastore/experimental/postgresql/manager.py
+++ b/trove/guestagent/datastore/experimental/postgresql/manager.py
@@ -24,13 +24,16 @@ from .service.install import PgSqlInstall
from .service.root import PgSqlRoot
from .service.status import PgSqlAppStatus
import pgutil
+from trove.common import cfg
from trove.common import utils
from trove.guestagent import backup
from trove.guestagent.datastore import manager
+from trove.guestagent import guest_log
from trove.guestagent import volume
LOG = logging.getLogger(__name__)
+CONF = cfg.CONF
class Manager(
@@ -54,6 +57,40 @@ class Manager(
def configuration_manager(self):
return self._configuration_manager
+ @property
+ def datastore_log_defs(self):
+ owner = 'postgres'
+ datastore_dir = '/var/log/postgresql/'
+ long_query_time = CONF.get(self.manager).get(
+ 'guest_log_long_query_time')
+ general_log_file = self.build_log_file_name(
+ self.GUEST_LOG_DEFS_GENERAL_LABEL, owner,
+ datastore_dir=datastore_dir)
+ general_log_dir, general_log_filename = os.path.split(general_log_file)
+ return {
+ self.GUEST_LOG_DEFS_GENERAL_LABEL: {
+ self.GUEST_LOG_TYPE_LABEL: guest_log.LogType.USER,
+ self.GUEST_LOG_USER_LABEL: owner,
+ self.GUEST_LOG_FILE_LABEL: general_log_file,
+ self.GUEST_LOG_ENABLE_LABEL: {
+ 'logging_collector': 'on',
+ 'log_destination': self._quote_str('stderr'),
+ 'log_directory': self._quote_str(general_log_dir),
+ 'log_filename': self._quote_str(general_log_filename),
+ 'log_statement': self._quote_str('all'),
+ 'debug_print_plan': 'on',
+ 'log_min_duration_statement': long_query_time,
+ },
+ self.GUEST_LOG_DISABLE_LABEL: {
+ 'logging_collector': 'off',
+ },
+ self.GUEST_LOG_RESTART_LABEL: True,
+ },
+ }
+
+ def _quote_str(self, value):
+ return "'%s'" % value
+
def do_prepare(self, context, packages, databases, memory_mb, users,
device_path, mount_point, backup_info, config_contents,
root_password, overrides, cluster_config, snapshot):
diff --git a/trove/guestagent/datastore/experimental/postgresql/service/process.py b/trove/guestagent/datastore/experimental/postgresql/service/process.py
index 19a09cac..1164f77e 100644
--- a/trove/guestagent/datastore/experimental/postgresql/service/process.py
+++ b/trove/guestagent/datastore/experimental/postgresql/service/process.py
@@ -21,6 +21,8 @@ from trove.common import cfg
from trove.guestagent.common import operating_system
from trove.guestagent.datastore.experimental.postgresql.service.status import (
PgSqlAppStatus)
+from trove.guestagent import guest_log
+
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
@@ -55,6 +57,7 @@ class PgSqlProcess(object):
def restart(self, context):
PgSqlAppStatus.get().restart_db_service(
self.SERVICE_CANDIDATES, CONF.state_change_wait_time)
+ self.set_guest_log_status(guest_log.LogStatus.Restart_Completed)
def start_db(self, context, enable_on_boot=True, update_db=False):
PgSqlAppStatus.get().start_db_service(
diff --git a/trove/guestagent/datastore/manager.py b/trove/guestagent/datastore/manager.py
index c4a8e1a7..61bc100a 100644
--- a/trove/guestagent/datastore/manager.py
+++ b/trove/guestagent/datastore/manager.py
@@ -16,13 +16,19 @@
import abc
+from oslo_config import cfg as oslo_cfg
from oslo_log import log as logging
from oslo_service import periodic_task
from trove.common import cfg
from trove.common import exception
from trove.common.i18n import _
+from trove.common import instance
+from trove.guestagent.common import guestagent_utils
+from trove.guestagent.common import operating_system
+from trove.guestagent.common.operating_system import FileMode
from trove.guestagent import dbaas
+from trove.guestagent import guest_log
from trove.guestagent.strategies import replication as repl_strategy
from trove.guestagent import volume
@@ -36,8 +42,23 @@ class Manager(periodic_task.PeriodicTasks):
functionality should be pulled back here from the existing managers.
"""
- def __init__(self, manager_name):
+ GUEST_LOG_TYPE_LABEL = 'type'
+ GUEST_LOG_USER_LABEL = 'user'
+ GUEST_LOG_FILE_LABEL = 'file'
+ GUEST_LOG_SECTION_LABEL = 'section'
+ GUEST_LOG_ENABLE_LABEL = 'enable'
+ GUEST_LOG_DISABLE_LABEL = 'disable'
+ GUEST_LOG_RESTART_LABEL = 'restart'
+
+ GUEST_LOG_BASE_DIR = '/var/log/trove'
+ GUEST_LOG_DATASTORE_DIRNAME = 'datastore'
+
+ GUEST_LOG_DEFS_GUEST_LABEL = 'guest'
+ GUEST_LOG_DEFS_GENERAL_LABEL = 'general'
+ GUEST_LOG_DEFS_ERROR_LABEL = 'error'
+ GUEST_LOG_DEFS_SLOW_QUERY_LABEL = 'slow_query'
+ def __init__(self, manager_name):
super(Manager, self).__init__(CONF)
# Manager properties
@@ -45,6 +66,12 @@ class Manager(periodic_task.PeriodicTasks):
self.__manager = None
self.__prepare_error = False
+ # Guest log
+ self._guest_log_context = None
+ self._guest_log_loaded_context = None
+ self._guest_log_cache = None
+ self._guest_log_defs = None
+
@property
def manager_name(self):
"""This returns the passed-in name of the manager."""
@@ -104,6 +131,104 @@ class Manager(periodic_task.PeriodicTasks):
"""
return None
+ @property
+ def datastore_log_defs(self):
+ """Any datastore-specific log files should be overridden in this dict
+ by the corresponding Manager class.
+
+ Format of a dict entry:
+
+ 'name_of_log': {self.GUEST_LOG_TYPE_LABEL:
+ Specified by the Enum in guest_log.LogType,
+ self.GUEST_LOG_USER_LABEL:
+ User that owns the file,
+ self.GUEST_LOG_FILE_LABEL:
+ Path on filesystem where the log resides,
+ self.GUEST_LOG_SECTION_LABEL:
+ Section where to put config (if ini style)
+ self.GUEST_LOG_ENABLE_LABEL: {
+ Dict of config_group settings to enable log},
+ self.GUEST_LOG_DISABLE_LABEL: {
+ Dict of config_group settings to disable log},
+
+ See guestagent_log_defs for an example.
+ """
+ return {}
+
+ @property
+ def guestagent_log_defs(self):
+ """These are log files that should be available on every Trove
+ instance. By definition, these should be of type LogType.SYS
+ """
+ log_dir = CONF.get('log_dir', '/var/log/trove/')
+ log_file = CONF.get('log_file', 'trove-guestagent.log')
+ guestagent_log = guestagent_utils.build_file_path(log_dir, log_file)
+ return {
+ self.GUEST_LOG_DEFS_GUEST_LABEL: {
+ self.GUEST_LOG_TYPE_LABEL: guest_log.LogType.SYS,
+ self.GUEST_LOG_USER_LABEL: None,
+ self.GUEST_LOG_FILE_LABEL: guestagent_log,
+ },
+ }
+
+ @property
+ def guest_log_defs(self):
+ """Return all the guest log defs."""
+ if not self._guest_log_defs:
+ self._guest_log_defs = dict(self.datastore_log_defs)
+ self._guest_log_defs.update(self.guestagent_log_defs)
+ return self._guest_log_defs
+
+ @property
+ def guest_log_context(self):
+ return self._guest_log_context
+
+ @guest_log_context.setter
+ def guest_log_context(self, context):
+ self._guest_log_context = context
+
+ @property
+ def guest_log_cache(self):
+ """Make sure the guest_log_cache is loaded and return it."""
+ self._refresh_guest_log_cache()
+ return self._guest_log_cache
+
+ def _refresh_guest_log_cache(self):
+ if self._guest_log_cache:
+ # Replace the context if it's changed
+ if self._guest_log_loaded_context != self.guest_log_context:
+ for log_name in self._guest_log_cache.keys():
+ self._guest_log_cache[log_name].context = (
+ self.guest_log_context)
+ else:
+ # Load the initial cache
+ self._guest_log_cache = {}
+ if self.guest_log_context:
+ gl_defs = self.guest_log_defs
+ try:
+ exposed_logs = CONF.get(self.manager).get(
+ 'guest_log_exposed_logs')
+ except oslo_cfg.NoSuchOptError:
+ exposed_logs = ''
+ LOG.debug("Available log defs: %s" % ",".join(gl_defs.keys()))
+ exposed_logs = exposed_logs.lower().replace(',', ' ').split()
+ LOG.debug("Exposing log defs: %s" % ",".join(exposed_logs))
+ expose_all = 'all' in exposed_logs
+ for log_name in gl_defs.keys():
+ gl_def = gl_defs[log_name]
+ exposed = expose_all or log_name in exposed_logs
+ LOG.debug("Building guest log '%s' from def: %s "
+ "(exposed: %s)" %
+ (log_name, gl_def, exposed))
+ self._guest_log_cache[log_name] = guest_log.GuestLog(
+ self.guest_log_context, log_name,
+ gl_def[self.GUEST_LOG_TYPE_LABEL],
+ gl_def[self.GUEST_LOG_USER_LABEL],
+ gl_def[self.GUEST_LOG_FILE_LABEL],
+ exposed)
+
+ self._guest_log_loaded_context = self.guest_log_context
+
################
# Status related
################
@@ -259,6 +384,175 @@ class Manager(periodic_task.PeriodicTasks):
LOG.debug("Cluster creation complete, starting status checks.")
self.status.end_install()
+ #############
+ # Log related
+ #############
+ def guest_log_list(self, context):
+ LOG.info(_("Getting list of guest logs."))
+ self.guest_log_context = context
+ gl_cache = self.guest_log_cache
+ result = filter(None, [gl_cache[log_name].show()
+ if gl_cache[log_name].exposed else None
+ for log_name in gl_cache.keys()])
+ LOG.info(_("Returning list of logs: %s") % result)
+ return result
+
+ def guest_log_action(self, context, log_name, enable, disable,
+ publish, discard):
+ if enable and disable:
+ raise exception.BadRequest("Cannot enable and disable log '%s'." %
+ log_name)
+ # Enable if we are publishing, unless told to disable
+ if publish and not disable:
+ enable = True
+ LOG.info(_("Processing guest log '%(log)s' "
+ "(enable=%(en)s, disable=%(dis)s, "
+ "publish=%(pub)s, discard=%(disc)s).") %
+ {'log': log_name, 'en': enable, 'dis': disable,
+ 'pub': publish, 'disc': discard})
+ self.guest_log_context = context
+ gl_cache = self.guest_log_cache
+ if log_name in gl_cache:
+ if ((gl_cache[log_name].type == guest_log.LogType.SYS) and
+ not publish):
+ if enable or disable:
+ if enable:
+ action_text = "enable"
+ else:
+ action_text = "disable"
+ raise exception.BadRequest("Cannot %s a SYSTEM log ('%s')."
+ % (action_text, log_name))
+ if gl_cache[log_name].type == guest_log.LogType.USER:
+ requires_change = (
+ (gl_cache[log_name].enabled and disable) or
+ (not gl_cache[log_name].enabled and enable))
+ if requires_change:
+ restart_required = self.guest_log_enable(
+ context, log_name, disable)
+ if restart_required:
+ self.set_guest_log_status(
+ guest_log.LogStatus.Restart_Required, log_name)
+ gl_cache[log_name].enabled = enable
+ log_details = gl_cache[log_name].show()
+ if discard:
+ log_details = gl_cache[log_name].discard_log()
+ if publish:
+ log_details = gl_cache[log_name].publish_log()
+ LOG.info(_("Details for log '%(log)s': %(det)s") %
+ {'log': log_name, 'det': log_details})
+ return log_details
+
+ raise exception.NotFound("Log '%s' is not defined." % log_name)
+
+ def guest_log_enable(self, context, log_name, disable):
+ """This method can be overridden by datastore implementations to
+ facilitate enabling and disabling USER type logs. If the logs
+ can be enabled with simple configuration group changes, however,
+ the code here will probably suffice.
+ Must return whether the datastore needs to be restarted in order for
+ the logging to begin.
+ """
+ restart_required = False
+ verb = ("Disabling" if disable else "Enabling")
+ if self.configuration_manager:
+ LOG.debug("%s log '%s'" % (verb, log_name))
+ gl_def = self.guest_log_defs[log_name]
+ enable_cfg_label = "%s_%s_log" % (self.GUEST_LOG_ENABLE_LABEL,
+ log_name)
+ disable_cfg_label = "%s_%s_log" % (self.GUEST_LOG_DISABLE_LABEL,
+ log_name)
+ restart_required = gl_def.get(self.GUEST_LOG_RESTART_LABEL,
+ restart_required)
+ if disable:
+ self._apply_log_overrides(
+ context, enable_cfg_label, disable_cfg_label,
+ gl_def.get(self.GUEST_LOG_DISABLE_LABEL),
+ gl_def.get(self.GUEST_LOG_SECTION_LABEL),
+ restart_required)
+ else:
+ self._apply_log_overrides(
+ context, disable_cfg_label, enable_cfg_label,
+ gl_def.get(self.GUEST_LOG_ENABLE_LABEL),
+ gl_def.get(self.GUEST_LOG_SECTION_LABEL),
+ restart_required)
+ else:
+ msg = (_("%(verb)s log '%(log)s' not supported - "
+ "no configuration manager defined!") %
+ {'verb': verb, 'log': log_name})
+ LOG.error(msg)
+ raise exception.GuestError(msg)
+
+ return restart_required
+
+ def _apply_log_overrides(self, context, remove_label,
+ apply_label, cfg_values, section_label,
+ restart_required):
+ self.configuration_manager.remove_system_override(
+ change_id=remove_label)
+ if cfg_values:
+ config_man_values = cfg_values
+ if section_label:
+ config_man_values = {section_label: cfg_values}
+ self.configuration_manager.apply_system_override(
+ config_man_values, change_id=apply_label)
+ if restart_required:
+ self.status.set_status(instance.ServiceStatuses.RESTART_REQUIRED)
+ else:
+ self.apply_overrides(context, cfg_values)
+
+ def set_guest_log_status(self, status, log_name=None):
+ """Sets the status of log_name to 'status' - if log_name is not
+ provided, sets the status on all logs.
+ """
+ gl_cache = self.guest_log_cache
+ names = [log_name]
+ if not log_name or log_name not in gl_cache:
+ names = gl_cache.keys()
+ for name in names:
+ # If we're already in restart mode and we're asked to set the
+ # status to restart, assume enable/disable has been flipped
+ # without a restart and set the status to restart done
+ if (gl_cache[name].status == guest_log.LogStatus.Restart_Required
+ and status == guest_log.LogStatus.Restart_Required):
+ gl_cache[name].status = guest_log.LogStatus.Restart_Completed
+ else:
+ gl_cache[name].status = status
+
+ def build_log_file_name(self, log_name, owner, datastore_dir=None):
+ """Build a log file name based on the log_name and make sure the
+ directories exist and are accessible by owner.
+ """
+ if datastore_dir is None:
+ base_dir = self.GUEST_LOG_BASE_DIR
+ if not operating_system.exists(base_dir, is_directory=True):
+ operating_system.create_directory(
+ base_dir, user=owner, group=owner, force=True,
+ as_root=True)
+ datastore_dir = guestagent_utils.build_file_path(
+ base_dir, self.GUEST_LOG_DATASTORE_DIRNAME)
+
+ if not operating_system.exists(datastore_dir, is_directory=True):
+ operating_system.create_directory(
+ datastore_dir, user=owner, group=owner, force=True,
+ as_root=True)
+ log_file_name = guestagent_utils.build_file_path(
+ datastore_dir, '%s-%s.log' % (self.manager, log_name))
+
+ return self.validate_log_file(log_file_name, owner)
+
+ def validate_log_file(self, log_file, owner):
+ """Make sure the log file exists and is accessible by owner.
+ """
+ if not operating_system.exists(log_file, as_root=True):
+ operating_system.write_file(log_file, '', as_root=True)
+
+ operating_system.chown(log_file, user=owner, group=owner,
+ as_root=True)
+ operating_system.chmod(log_file, FileMode.ADD_USR_RW_GRP_RW_OTH_R,
+ as_root=True)
+ LOG.debug("Set log file '%s' as readable" % log_file)
+ return log_file
+
###############
# Not Supported
###############
diff --git a/trove/guestagent/datastore/mysql_common/manager.py b/trove/guestagent/datastore/mysql_common/manager.py
index 0a31cac3..0ceaa51a 100644
--- a/trove/guestagent/datastore/mysql_common/manager.py
+++ b/trove/guestagent/datastore/mysql_common/manager.py
@@ -20,6 +20,8 @@ import os
from oslo_log import log as logging
+from trove.common import cfg
+from trove.common import configurations
from trove.common import exception
from trove.common.i18n import _
from trove.common import instance as rd_instance
@@ -27,10 +29,12 @@ from trove.guestagent import backup
from trove.guestagent.common import operating_system
from trove.guestagent.datastore import manager
from trove.guestagent.datastore.mysql_common import service
+from trove.guestagent import guest_log
from trove.guestagent import volume
LOG = logging.getLogger(__name__)
+CONF = cfg.CONF
class MySqlManager(manager.Manager):
@@ -66,6 +70,56 @@ class MySqlManager(manager.Manager):
return self.mysql_app(
self.mysql_app_status.get()).configuration_manager
+ @property
+ def datastore_log_defs(self):
+ owner = 'mysql'
+ datastore_dir = self.mysql_app.get_data_dir()
+ server_section = configurations.MySQLConfParser.SERVER_CONF_SECTION
+ long_query_time = CONF.get(self.manager).get(
+ 'guest_log_long_query_time') / 1000
+ general_log_file = self.build_log_file_name(
+ self.GUEST_LOG_DEFS_GENERAL_LABEL, owner,
+ datastore_dir=datastore_dir)
+ error_log_file = self.validate_log_file('/var/log/mysqld.log', owner)
+ slow_query_log_file = self.build_log_file_name(
+ self.GUEST_LOG_DEFS_SLOW_QUERY_LABEL, owner,
+ datastore_dir=datastore_dir)
+ return {
+ self.GUEST_LOG_DEFS_GENERAL_LABEL: {
+ self.GUEST_LOG_TYPE_LABEL: guest_log.LogType.USER,
+ self.GUEST_LOG_USER_LABEL: owner,
+ self.GUEST_LOG_FILE_LABEL: general_log_file,
+ self.GUEST_LOG_SECTION_LABEL: server_section,
+ self.GUEST_LOG_ENABLE_LABEL: {
+ 'general_log': 'on',
+ 'general_log_file': general_log_file,
+ 'log_output': 'file',
+ },
+ self.GUEST_LOG_DISABLE_LABEL: {
+ 'general_log': 'off',
+ },
+ },
+ self.GUEST_LOG_DEFS_SLOW_QUERY_LABEL: {
+ self.GUEST_LOG_TYPE_LABEL: guest_log.LogType.USER,
+ self.GUEST_LOG_USER_LABEL: owner,
+ self.GUEST_LOG_FILE_LABEL: slow_query_log_file,
+ self.GUEST_LOG_SECTION_LABEL: server_section,
+ self.GUEST_LOG_ENABLE_LABEL: {
+ 'slow_query_log': 'on',
+ 'slow_query_log_file': slow_query_log_file,
+ 'long_query_time': long_query_time,
+ },
+ self.GUEST_LOG_DISABLE_LABEL: {
+ 'slow_query_log': 'off',
+ },
+ },
+ self.GUEST_LOG_DEFS_ERROR_LABEL: {
+ self.GUEST_LOG_TYPE_LABEL: guest_log.LogType.SYS,
+ self.GUEST_LOG_USER_LABEL: owner,
+ self.GUEST_LOG_FILE_LABEL: error_log_file,
+ },
+ }
+
def change_passwords(self, context, users):
return self.mysql_admin().change_passwords(users)
diff --git a/trove/guestagent/guest_log.py b/trove/guestagent/guest_log.py
new file mode 100644
index 00000000..ba331c5a
--- /dev/null
+++ b/trove/guestagent/guest_log.py
@@ -0,0 +1,406 @@
+# Copyright 2015 Tesora Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from datetime import datetime
+import enum
+import hashlib
+import os
+
+from oslo_log import log as logging
+from swiftclient.client import ClientException
+
+from trove.common import cfg
+from trove.common import exception
+from trove.common.i18n import _
+from trove.common.remote import create_swift_client
+from trove.common import stream_codecs
+from trove.guestagent.common import operating_system
+from trove.guestagent.common.operating_system import FileMode
+
+
+LOG = logging.getLogger(__name__)
+CONF = cfg.CONF
+
+
+class LogType(enum.Enum):
+ """Represent the type of the log object."""
+
+ # System logs. These are always enabled.
+ SYS = 1
+
+ # User logs. These can be enabled or disabled.
+ USER = 2
+
+
+class LogStatus(enum.Enum):
+ """Represent the status of the log object."""
+
+ # The log is disabled and potentially no data is being written to
+ # the corresponding log file
+ Disabled = 1
+
+ # Logging is on, but no determination has been made about data availability
+ Enabled = 2
+
+ # Logging is on, but no log data is available to publish
+ Unavailable = 3
+
+ # Logging is on and data is available to be published
+ Ready = 4
+
+ # Logging is on and all data has been published
+ Published = 5
+
+ # Logging is on and some data has been published
+ Partial = 6
+
+ # Log file has been rotated, so next publish will discard log first
+ Rotated = 7
+
+ # Waiting for a datastore restart to begin logging
+ Restart_Required = 8
+
+ # Now that restart has completed, regular status can be reported again
+ # This is an internal status
+ Restart_Completed = 9
+
+
+class GuestLog(object):
+
+ MF_FILE_SUFFIX = '_metafile'
+ MF_LABEL_LOG_NAME = 'log_name'
+ MF_LABEL_LOG_TYPE = 'log_type'
+ MF_LABEL_LOG_FILE = 'log_file'
+ MF_LABEL_LOG_SIZE = 'log_size'
+ MF_LABEL_LOG_HEADER = 'log_header_digest'
+
+ def __init__(self, log_context, log_name, log_type, log_user, log_file,
+ log_exposed):
+ self._context = log_context
+ self._name = log_name
+ self._type = log_type
+ self._user = log_user
+ self._file = log_file
+ self._exposed = log_exposed
+ self._size = None
+ self._published_size = None
+ self._header_digest = 'abc'
+ self._published_header_digest = None
+ self._status = None
+ self._cached_context = None
+ self._cached_swift_client = None
+ self._enabled = log_type == LogType.SYS
+ self._file_readable = False
+ self._container_name = None
+ self._codec = stream_codecs.JsonCodec()
+
+ self._set_status(self._type == LogType.USER,
+ LogStatus.Disabled, LogStatus.Enabled)
+
+ # The directory should already exist - make sure we have access to it
+ log_dir = os.path.dirname(self._file)
+ operating_system.chmod(
+ log_dir, FileMode.ADD_GRP_RX_OTH_RX, as_root=True)
+
+ @property
+ def context(self):
+ return self._context
+
+ @context.setter
+ def context(self, context):
+ self._context = context
+
+ @property
+ def type(self):
+ return self._type
+
+ @property
+ def swift_client(self):
+ if not self._cached_swift_client or (
+ self._cached_context != self.context):
+ self._cached_swift_client = create_swift_client(self.context)
+ self._cached_context = self.context
+ return self._cached_swift_client
+
+ @property
+ def exposed(self):
+ return self._exposed or self.context.is_admin
+
+ @property
+ def enabled(self):
+ return self._enabled
+
+ @enabled.setter
+ def enabled(self, enabled):
+ self._enabled = enabled
+
+ @property
+ def status(self):
+ return self._status
+
+ @status.setter
+ def status(self, status):
+ # Keep the status in Restart_Required until we're set
+ # to Restart_Completed
+ if (self.status != LogStatus.Restart_Required or
+ (self.status == LogStatus.Restart_Required and
+ status == LogStatus.Restart_Completed)):
+ self._status = status
+ LOG.debug("Log status for '%s' set to %s" % (self._name, status))
+ else:
+ LOG.debug("Log status for '%s' *not* set to %s (currently %s)" %
+ (self._name, status, self.status))
+
+ def get_container_name(self, force=False):
+ if not self._container_name or force:
+ container_name = CONF.guest_log_container_name
+ try:
+ self.swift_client.get_container(container_name, prefix='dummy')
+ except ClientException as ex:
+ if ex.http_status == 404:
+ LOG.debug("Container '%s' not found; creating now" %
+ container_name)
+ self.swift_client.put_container(
+ container_name, headers=self._get_headers())
+ else:
+ LOG.exception(_("Could not retrieve container '%s'") %
+ container_name)
+ raise
+ self._container_name = container_name
+ return self._container_name
+
+ def _set_status(self, use_first, first_status, second_status):
+ if use_first:
+ self.status = first_status
+ else:
+ self.status = second_status
+
+ def show(self):
+ if self.exposed:
+ self._refresh_details()
+ container_name = 'None'
+ prefix = 'None'
+ if self._published_size:
+ container_name = self.get_container_name()
+ prefix = self._object_prefix()
+ pending = self._size - self._published_size
+ if self.status == LogStatus.Rotated:
+ pending = self._size
+ return {
+ 'name': self._name,
+ 'type': self._type.name,
+ 'status': self.status.name.replace('_', ' '),
+ 'published': self._published_size,
+ 'pending': pending,
+ 'container': container_name,
+ 'prefix': prefix,
+ 'metafile': self._metafile_name()
+ }
+ else:
+ raise exception.UnauthorizedRequest(_(
+ "Not authorized to show log '%s'.") % self._name)
+
+ def _refresh_details(self):
+
+ if self._published_size is None:
+ # Initializing, so get all the values
+ try:
+ meta_details = self._get_meta_details()
+ self._published_size = int(
+ meta_details[self.MF_LABEL_LOG_SIZE])
+ self._published_header_digest = (
+ meta_details[self.MF_LABEL_LOG_HEADER])
+ except ClientException as ex:
+ if ex.http_status == 404:
+ LOG.debug("No published metadata found for log '%s'" %
+ self._name)
+ self._published_size = 0
+ else:
+ LOG.exception(_("Could not get meta details for log '%s'")
+ % self._name)
+ raise
+
+ self._update_details()
+ LOG.debug("Log size for '%s' set to %d (published %d)" % (
+ self._name, self._size, self._published_size))
+
+ def _update_details(self):
+ # Make sure we can read the file
+ if not self._file_readable or not os.access(self._file, os.R_OK):
+ if not os.access(self._file, os.R_OK):
+ if operating_system.exists(self._file, as_root=True):
+ operating_system.chmod(
+ self._file, FileMode.ADD_ALL_R, as_root=True)
+ self._file_readable = True
+
+ if os.path.isfile(self._file):
+ logstat = os.stat(self._file)
+ self._size = logstat.st_size
+ self._update_log_header_digest(self._file)
+
+ if self._log_rotated():
+ self.status = LogStatus.Rotated
+ # See if we have stuff to publish
+ elif logstat.st_size > self._published_size:
+ self._set_status(self._published_size,
+ LogStatus.Partial, LogStatus.Ready)
+ # We've published everything so far
+ elif logstat.st_size == self._published_size:
+ self._set_status(self._published_size,
+ LogStatus.Published, LogStatus.Enabled)
+ # We've already handled this case (log rotated) so what gives?
+ else:
+ raise ("Bug in _log_rotated ?")
+ else:
+ self._published_size = 0
+ self._size = 0
+
+ if not self._size or not self.enabled:
+ user_status = LogStatus.Disabled
+ if self.enabled:
+ user_status = LogStatus.Enabled
+ self._set_status(self._type == LogType.USER,
+ user_status, LogStatus.Unavailable)
+
+ def _log_rotated(self):
+ """If the file is smaller than the last reported size
+ or the first line hash is different, we can probably assume
+ the file changed under our nose.
+ """
+ if (self._published_size > 0 and
+ (self._size < self._published_size or
+ self._published_header_digest != self._header_digest)):
+ return True
+
+ def _update_log_header_digest(self, log_file):
+ with open(log_file, 'r') as log:
+ self._header_digest = hashlib.md5(log.readline()).hexdigest()
+
+ def _get_headers(self):
+ return {'X-Delete-After': CONF.guest_log_expiry}
+
+ def publish_log(self):
+ if self.exposed:
+ if self._log_rotated():
+ LOG.debug("Log file rotation detected for '%s' - "
+ "discarding old log" % self._name)
+ self._delete_log_components()
+ if os.path.isfile(self._file):
+ self._publish_to_container(self._file)
+ else:
+ raise RuntimeError(_(
+ "Cannot publish log file '%s' as it does not exist.") %
+ self._file)
+ return self.show()
+ else:
+ raise exception.UnauthorizedRequest(_(
+ "Not authorized to publish log '%s'.") % self._name)
+
+ def discard_log(self):
+ if self.exposed:
+ self._delete_log_components()
+ return self.show()
+ else:
+ raise exception.UnauthorizedRequest(_(
+ "Not authorized to discard log '%s'.") % self._name)
+
+ def _delete_log_components(self):
+ container_name = self.get_container_name(force=True)
+ prefix = self._object_prefix()
+ swift_files = [swift_file['name']
+ for swift_file in self.swift_client.get_container(
+ container_name, prefix=prefix)[1]]
+ swift_files.append(self._metafile_name())
+ for swift_file in swift_files:
+ self.swift_client.delete_object(container_name, swift_file)
+ self._set_status(self._type == LogType.USER,
+ LogStatus.Disabled, LogStatus.Enabled)
+ self._published_size = 0
+
+ def _publish_to_container(self, log_filename):
+ log_component, log_lines = '', 0
+ chunk_size = CONF.guest_log_limit
+ container_name = self.get_container_name(force=True)
+
+ def _read_chunk(f):
+ while True:
+ current_chunk = f.read(chunk_size)
+ if not current_chunk:
+ break
+ yield current_chunk
+
+ def _write_log_component():
+ object_headers.update({'x-object-meta-lines': log_lines})
+ component_name = '%s%s' % (self._object_prefix(),
+ self._object_name())
+ self.swift_client.put_object(container_name,
+ component_name, log_component,
+ headers=object_headers)
+ self._published_size = (
+ self._published_size + len(log_component))
+ self._published_header_digest = self._header_digest
+
+ self._refresh_details()
+ self._put_meta_details()
+ object_headers = self._get_headers()
+ with open(log_filename, 'r') as log:
+ LOG.debug("seeking to %s", self._published_size)
+ log.seek(self._published_size)
+ for chunk in _read_chunk(log):
+ for log_line in chunk.splitlines():
+ if len(log_component) + len(log_line) > chunk_size:
+ _write_log_component()
+ log_component, log_lines = '', 0
+ log_component = log_component + log_line + '\n'
+ log_lines += 1
+ if log_lines > 0:
+ _write_log_component()
+ self._put_meta_details()
+
+ def _put_meta_details(self):
+ metafile_name = self._metafile_name()
+ metafile_details = {
+ self.MF_LABEL_LOG_NAME: self._name,
+ self.MF_LABEL_LOG_TYPE: self._type.name,
+ self.MF_LABEL_LOG_FILE: self._file,
+ self.MF_LABEL_LOG_SIZE: self._published_size,
+ self.MF_LABEL_LOG_HEADER: self._header_digest,
+ }
+ container_name = self.get_container_name()
+ self.swift_client.put_object(container_name, metafile_name,
+ self._codec.serialize(metafile_details),
+ headers=self._get_headers())
+ LOG.debug("_put_meta_details has published log size as %s",
+ self._published_size)
+
+ def _metafile_name(self):
+ return self._object_prefix().rstrip('/') + '_metafile'
+
+ def _object_prefix(self):
+ return '%(instance_id)s/%(datastore)s-%(log)s/' % {
+ 'instance_id': CONF.guest_id,
+ 'datastore': CONF.datastore_manager,
+ 'log': self._name}
+
+ def _object_name(self):
+ return 'log-%s' % str(datetime.utcnow()).replace(' ', 'T')
+
+ def _get_meta_details(self):
+ LOG.debug("Getting meta details for '%s'" % self._name)
+ metafile_name = self._metafile_name()
+ container_name = self.get_container_name()
+ headers, metafile_details = self.swift_client.get_object(
+ container_name, metafile_name)
+ LOG.debug("Found meta details for '%s'" % self._name)
+ return self._codec.deserialize(metafile_details)
diff --git a/trove/instance/models.py b/trove/instance/models.py
index 4ede15f6..60ea2c2a 100644
--- a/trove/instance/models.py
+++ b/trove/instance/models.py
@@ -297,6 +297,8 @@ class SimpleInstance(object):
return InstanceStatus.PROMOTE
if InstanceTasks.EJECTING.action == action:
return InstanceStatus.EJECT
+ if InstanceTasks.LOGGING.action == action:
+ return InstanceStatus.LOGGING
# Check for server status.
if self.db_info.server_status in ["BUILD", "ERROR", "REBOOT",
diff --git a/trove/instance/service.py b/trove/instance/service.py
index 3cb65dee..3b3040c6 100644
--- a/trove/instance/service.py
+++ b/trove/instance/service.py
@@ -25,6 +25,7 @@ from trove.common import exception
from trove.common.i18n import _
from trove.common.i18n import _LI
from trove.common import pagination
+from trove.common.remote import create_guest_client
from trove.common import utils
from trove.common import wsgi
from trove.datastore import models as datastore_models
@@ -331,3 +332,33 @@ class InstanceController(wsgi.Controller):
{'instance_id': id, 'config': config})
return wsgi.Result(views.DefaultConfigurationView(
config).data(), 200)
+
+ def guest_log_list(self, req, tenant_id, id):
+ """Return all information about all logs for an instance."""
+ LOG.debug("Listing logs for tenant %s" % tenant_id)
+ context = req.environ[wsgi.CONTEXT_KEY]
+ instance = models.Instance.load(context, id)
+ if not instance:
+ raise exception.NotFound(uuid=id)
+ client = create_guest_client(context, id)
+ guest_log_list = client.guest_log_list()
+ return wsgi.Result({'logs': guest_log_list}, 200)
+
+ def guest_log_action(self, req, body, tenant_id, id):
+ """Processes a guest log."""
+ LOG.info(_("Processing log for tenant %s"), tenant_id)
+ context = req.environ[wsgi.CONTEXT_KEY]
+ instance = models.Instance.load(context, id)
+ if not instance:
+ raise exception.NotFound(uuid=id)
+ log_name = body['name']
+ enable = body.get('enable', None)
+ disable = body.get('disable', None)
+ publish = body.get('publish', None)
+ discard = body.get('discard', None)
+ if enable and disable:
+ raise exception.BadRequest(_("Cannot enable and disable log."))
+ client = create_guest_client(context, id)
+ guest_log = client.guest_log_action(log_name, enable, disable,
+ publish, discard)
+ return wsgi.Result({'log': guest_log}, 200)
diff --git a/trove/instance/tasks.py b/trove/instance/tasks.py
index 885500f6..734df950 100644
--- a/trove/instance/tasks.py
+++ b/trove/instance/tasks.py
@@ -79,6 +79,7 @@ class InstanceTasks(object):
'Promoting the instance to replica source.')
EJECTING = InstanceTask(0x09, 'EJECTING',
'Ejecting the replica source.')
+ LOGGING = InstanceTask(0x0a, 'LOGGING', 'Transferring guest logs.')
BUILDING_ERROR_DNS = InstanceTask(0x50, 'BUILDING', 'Build error: DNS.',
is_error=True)
diff --git a/trove/instance/views.py b/trove/instance/views.py
index 59b11f81..fcc23f98 100644
--- a/trove/instance/views.py
+++ b/trove/instance/views.py
@@ -165,3 +165,30 @@ class DefaultConfigurationView(object):
for key, val in self.config:
config_dict[key] = val
return {"instance": {"configuration": config_dict}}
+
+
+class GuestLogView(object):
+
+ def __init__(self, guest_log):
+ self.guest_log = guest_log
+
+ def data(self):
+ return {
+ 'name': self.guest_log.name,
+ 'type': self.guest_log.type,
+ 'status': self.guest_log.status,
+ 'published': self.guest_log.published,
+ 'pending': self.guest_log.pending,
+ 'container': self.guest_log.container,
+ 'prefix': self.guest_log.prefix,
+ 'metafile': self.guest_log.metafile,
+ }
+
+
+class GuestLogsView(object):
+
+ def __init__(self, guest_logs):
+ self.guest_logs = guest_logs
+
+ def data(self):
+ return [GuestLogView(l).data() for l in self.guest_logs]
diff --git a/trove/taskmanager/models.py b/trove/taskmanager/models.py
index 1af3d548..3b0385dc 100755
--- a/trove/taskmanager/models.py
+++ b/trove/taskmanager/models.py
@@ -1263,6 +1263,27 @@ class BuiltInstanceTasks(BuiltInstance, NotifyMixin, ConfigurationMixin):
finally:
self.reset_task_status()
+ def guest_log_list(self):
+ LOG.info(_("Retrieving guest log list for instance %s.") % self.id)
+ try:
+ return self.guest.guest_log_list()
+ except GuestError:
+ LOG.error(_("Failed to retrieve guest log list for instance "
+ "%s.") % self.id)
+ finally:
+ self.reset_task_status()
+
+ def guest_log_action(self, log_name, enable, disable, publish, discard):
+ LOG.info(_("Processing guest log for instance %s.") % self.id)
+ try:
+ return self.guest.guest_log_action(log_name, enable, disable,
+ publish, discard)
+ except GuestError:
+ LOG.error(_("Failed to process guest log for instance %s.")
+ % self.id)
+ finally:
+ self.reset_task_status()
+
def refresh_compute_server_info(self):
"""Refreshes the compute server field."""
server = self.nova_client.servers.get(self.server.id)
diff --git a/trove/tests/config.py b/trove/tests/config.py
index f9ab438d..de4829b1 100644
--- a/trove/tests/config.py
+++ b/trove/tests/config.py
@@ -70,6 +70,7 @@ class TestConfig(object):
'dbaas_url': "http://localhost:8775/v1.0/dbaas",
'version_url': "http://localhost:8775/",
'nova_url': "http://localhost:8774/v2",
+ 'swift_url': "http://localhost:8080/v1/AUTH_",
'dbaas_datastore': "mysql",
'dbaas_datastore_id': "a00000a0-00a0-0a00-00a0-000a000000aa",
'dbaas_datastore_name_no_versions': "Test_Datastore_1",
diff --git a/trove/tests/int_tests.py b/trove/tests/int_tests.py
index c3d70608..da68ef90 100644
--- a/trove/tests/int_tests.py
+++ b/trove/tests/int_tests.py
@@ -35,6 +35,7 @@ from trove.tests.api import versions
from trove.tests.scenario.groups import backup_group
from trove.tests.scenario.groups import cluster_actions_group
from trove.tests.scenario.groups import database_actions_group
+from trove.tests.scenario.groups import guest_log_group
from trove.tests.scenario.groups import instance_actions_group
from trove.tests.scenario.groups import instance_create_group
from trove.tests.scenario.groups import instance_delete_group
@@ -126,6 +127,12 @@ base_groups = [
GROUP_SETUP
]
+# cluster groups
+cluster_actions_groups = list(base_groups)
+cluster_actions_groups.extend([cluster_actions_group.GROUP,
+ negative_cluster_actions_group.GROUP])
+
+# instance groups
instance_create_groups = list(base_groups)
instance_create_groups.extend([instance_create_group.GROUP,
instance_delete_group.GROUP])
@@ -139,9 +146,8 @@ user_actions_groups.extend([user_actions_group.GROUP])
database_actions_groups = list(instance_create_groups)
database_actions_groups.extend([database_actions_group.GROUP])
-cluster_actions_groups = list(base_groups)
-cluster_actions_groups.extend([cluster_actions_group.GROUP,
- negative_cluster_actions_group.GROUP])
+guest_log_groups = list(instance_create_groups)
+guest_log_groups.extend([guest_log_group.GROUP])
instance_actions_groups = list(instance_create_groups)
instance_actions_groups.extend([instance_actions_group.GROUP])
@@ -149,31 +155,40 @@ instance_actions_groups.extend([instance_actions_group.GROUP])
replication_groups = list(instance_create_groups)
replication_groups.extend([replication_group.GROUP])
+# groups common to all datastores
+common_groups = list(instance_actions_groups)
+common_groups.extend([guest_log_groups])
+
# Module based groups
register(["backup"], backup_groups)
register(["cluster"], cluster_actions_groups)
register(["database"], database_actions_groups)
-register(["instance_actions"], instance_actions_groups)
+register(["guest_log"], guest_log_groups)
+register(["instance", "instance_actions"], instance_actions_groups)
register(["instance_create"], instance_create_groups)
register(["user"], user_actions_groups)
register(["replication"], replication_groups)
# Datastore based groups - these should contain all functionality
# currently supported by the datastore
-register(["cassandra_supported"], backup_groups, instance_actions_groups)
-register(["couchbase_supported"], instance_actions_groups)
-register(["couchdb_supported"], instance_actions_groups)
-register(["postgresql_supported"], backup_groups, database_actions_groups,
- instance_actions_groups, user_actions_groups)
-register(["mongodb_supported"], backup_groups, cluster_actions_groups,
- database_actions_groups, instance_actions_groups, user_actions_groups)
+register(["cassandra_supported"], common_groups,
+ backup_groups)
+register(["couchbase_supported"], common_groups)
+register(["couchdb_supported"], common_groups)
+register(["postgresql_supported"], common_groups,
+ backup_groups, database_actions_groups, user_actions_groups)
+register(["mongodb_supported"], common_groups,
+ backup_groups, cluster_actions_groups,
+ database_actions_groups, user_actions_groups)
register(["mysql_supported", "mariadb_supported", "percona_supported"],
- backup_groups, database_actions_groups, instance_actions_groups,
+ common_groups,
+ backup_groups, database_actions_groups,
replication_groups, user_actions_groups)
-register(["redis_supported"], backup_groups, instance_actions_groups,
- replication_groups)
-register(["vertica_supported"], cluster_actions_groups,
- instance_actions_groups)
-register(["pxc_supported"], instance_actions_groups, cluster_actions_groups)
-register(["db2_supported"], database_actions_groups,
- instance_actions_groups, user_actions_groups)
+register(["redis_supported"], common_groups,
+ backup_groups, replication_groups)
+register(["vertica_supported"], common_groups,
+ cluster_actions_groups)
+register(["pxc_supported"], common_groups,
+ cluster_actions_groups)
+register(["db2_supported"], common_groups,
+ database_actions_groups, user_actions_groups)
diff --git a/trove/tests/scenario/groups/guest_log_group.py b/trove/tests/scenario/groups/guest_log_group.py
new file mode 100644
index 00000000..4d3493a3
--- /dev/null
+++ b/trove/tests/scenario/groups/guest_log_group.py
@@ -0,0 +1,246 @@
+# Copyright 2015 Tesora Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from proboscis import test
+
+from trove.tests.scenario.groups import instance_create_group
+from trove.tests.scenario.groups.test_group import TestGroup
+
+
+GROUP = "scenario.guest_log_group"
+
+
+@test(depends_on_groups=[instance_create_group.GROUP], groups=[GROUP])
+class GuestLogGroup(TestGroup):
+ """Test Guest Log functionality."""
+
+ def __init__(self):
+ super(GuestLogGroup, self).__init__(
+ 'guest_log_runners', 'GuestLogRunner')
+
+ @test
+ def test_log_list(self):
+ """Test that log-list works."""
+ self.test_runner.run_test_log_list()
+
+ @test
+ def test_admin_log_list(self):
+ """Test that log-list works for admin user."""
+ self.test_runner.run_test_admin_log_list()
+
+ @test
+ def test_log_show(self):
+ """Test that log-show works on USER log."""
+ self.test_runner.run_test_log_show()
+
+ @test
+ def test_log_enable_sys(self):
+ """Ensure log-enable on SYS log fails."""
+ self.test_runner.run_test_log_enable_sys()
+
+ @test
+ def test_log_disable_sys(self):
+ """Ensure log-disable on SYS log fails."""
+ self.test_runner.run_test_log_disable_sys()
+
+ @test
+ def test_log_show_unauth_user(self):
+ """Ensure log-show by unauth client on USER log fails."""
+ self.test_runner.run_test_log_show_unauth_user()
+
+ @test
+ def test_log_list_unauth_user(self):
+ """Ensure log-list by unauth client on USER log fails."""
+ self.test_runner.run_test_log_list_unauth_user()
+
+ @test
+ def test_log_generator_unauth_user(self):
+ """Ensure log-generator by unauth client on USER log fails."""
+ self.test_runner.run_test_log_generator_unauth_user()
+
+ @test
+ def test_log_generator_publish_unauth_user(self):
+ """Ensure log-generator by unauth client with publish fails."""
+ self.test_runner.run_test_log_generator_publish_unauth_user()
+
+ @test
+ def test_log_show_unexposed_user(self):
+ """Ensure log-show on unexposed log fails for auth client."""
+ self.test_runner.run_test_log_show_unexposed_user()
+
+ @test
+ def test_log_enable_unexposed_user(self):
+ """Ensure log-enable on unexposed log fails for auth client."""
+ self.test_runner.run_test_log_enable_unexposed_user()
+
+ @test
+ def test_log_disable_unexposed_user(self):
+ """Ensure log-disable on unexposed log fails for auth client."""
+ self.test_runner.run_test_log_disable_unexposed_user()
+
+ @test
+ def test_log_publish_unexposed_user(self):
+ """Ensure log-publish on unexposed log fails for auth client."""
+ self.test_runner.run_test_log_publish_unexposed_user()
+
+ @test
+ def test_log_discard_unexposed_user(self):
+ """Ensure log-discard on unexposed log fails for auth client."""
+ self.test_runner.run_test_log_discard_unexposed_user()
+
+ @test(runs_after=[test_log_show])
+ def test_log_enable_user(self):
+ """Test log-enable on USER log."""
+ self.test_runner.run_test_log_enable_user()
+
+ @test(runs_after=[test_log_enable_user])
+ def test_log_enable_flip_user(self):
+ """Test that flipping restart-required log-enable works."""
+ self.test_runner.run_test_log_enable_flip_user()
+
+ @test(runs_after=[test_log_enable_flip_user])
+ def test_restart_datastore(self):
+ """Test restart datastore if required."""
+ self.test_runner.run_test_restart_datastore()
+
+ @test(runs_after=[test_restart_datastore])
+ def test_wait_for_restart(self):
+ """Wait for restart to complete."""
+ self.test_runner.run_test_wait_for_restart()
+
+ @test(runs_after=[test_wait_for_restart])
+ def test_log_publish_user(self):
+ """Test log-publish on USER log."""
+ self.test_runner.run_test_log_publish_user()
+
+ @test(runs_after=[test_log_publish_user])
+ def test_add_data(self):
+ """Add data for second log-publish on USER log."""
+ self.test_runner.run_test_add_data()
+
+ @test(runs_after=[test_add_data])
+ def test_verify_data(self):
+ """Verify data for second log-publish on USER log."""
+ self.test_runner.run_test_verify_data()
+
+ @test(runs_after=[test_verify_data])
+ def test_log_publish_again_user(self):
+ """Test log-publish again on USER log."""
+ self.test_runner.run_test_log_publish_again_user()
+
+ @test(runs_after=[test_log_publish_again_user])
+ def test_log_generator_user(self):
+ """Test log-generator on USER log."""
+ self.test_runner.run_test_log_generator_user()
+
+ @test(runs_after=[test_log_generator_user])
+ def test_log_generator_publish_user(self):
+ """Test log-generator with publish on USER log."""
+ self.test_runner.run_test_log_generator_publish_user()
+
+ @test(runs_after=[test_log_generator_publish_user])
+ def test_log_generator_swift_client_user(self):
+ """Test log-generator on USER log with passed-in Swift client."""
+ self.test_runner.run_test_log_generator_swift_client_user()
+
+ @test(runs_after=[test_log_generator_swift_client_user])
+ def test_add_data_again(self):
+ """Add more data for log-generator row-by-row test on USER log."""
+ self.test_runner.run_test_add_data_again()
+
+ @test(runs_after=[test_add_data_again])
+ def test_verify_data_again(self):
+ """Verify data for log-generator row-by-row test on USER log."""
+ self.test_runner.run_test_verify_data_again()
+
+ @test(runs_after=[test_verify_data_again])
+ def test_log_generator_user_by_row(self):
+ """Test log-generator on USER log row-by-row."""
+ self.test_runner.run_test_log_generator_user_by_row()
+
+ @test(runs_after=[test_log_generator_user_by_row])
+ def test_log_save_user(self):
+ """Test log-save on USER log."""
+ self.test_runner.run_test_log_save_user()
+
+ @test(runs_after=[test_log_save_user])
+ def test_log_save_publish_user(self):
+ """Test log-save on USER log with publish."""
+ self.test_runner.run_test_log_save_publish_user()
+
+ @test(runs_after=[test_log_save_publish_user])
+ def test_log_discard_user(self):
+ """Test log-discard on USER log."""
+ self.test_runner.run_test_log_discard_user()
+
+ @test(runs_after=[test_log_discard_user])
+ def test_log_disable_user(self):
+ """Test log-disable on USER log."""
+ self.test_runner.run_test_log_disable_user()
+
+ @test(runs_after=[test_log_disable_user])
+ def test_restart_datastore_again(self):
+ """Test restart datastore again if required."""
+ self.test_runner.run_test_restart_datastore()
+
+ @test(runs_after=[test_restart_datastore_again])
+ def test_wait_for_restart_again(self):
+ """Wait for restart to complete again."""
+ self.test_runner.run_test_wait_for_restart()
+
+ @test
+ def test_log_show_sys(self):
+ """Test that log-show works for SYS log."""
+ self.test_runner.run_test_log_show_sys()
+
+ @test(runs_after=[test_log_show_sys])
+ def test_log_publish_sys(self):
+ """Test log-publish on SYS log."""
+ self.test_runner.run_test_log_publish_sys()
+
+ @test(runs_after=[test_log_publish_sys])
+ def test_log_publish_again_sys(self):
+ """Test log-publish again on SYS log."""
+ self.test_runner.run_test_log_publish_again_sys()
+
+ @test(depends_on=[test_log_publish_again_sys])
+ def test_log_generator_sys(self):
+ """Test log-generator on SYS log."""
+ self.test_runner.run_test_log_generator_sys()
+
+ @test(runs_after=[test_log_generator_sys])
+ def test_log_generator_publish_sys(self):
+ """Test log-generator with publish on SYS log."""
+ self.test_runner.run_test_log_generator_publish_sys()
+
+ @test(depends_on=[test_log_generator_publish_sys])
+ def test_log_generator_swift_client_sys(self):
+ """Test log-generator on SYS log with passed-in Swift client."""
+ self.test_runner.run_test_log_generator_swift_client_sys()
+
+ @test(depends_on=[test_log_generator_publish_sys],
+ runs_after=[test_log_generator_swift_client_sys])
+ def test_log_save_sys(self):
+ """Test log-save on SYS log."""
+ self.test_runner.run_test_log_save_sys()
+
+ @test(runs_after=[test_log_save_sys])
+ def test_log_save_publish_sys(self):
+ """Test log-save on SYS log with publish."""
+ self.test_runner.run_test_log_save_publish_sys()
+
+ @test(runs_after=[test_log_save_publish_sys])
+ def test_log_discard_sys(self):
+ """Test log-discard on SYS log."""
+ self.test_runner.run_test_log_discard_sys()
diff --git a/trove/tests/scenario/helpers/mysql_helper.py b/trove/tests/scenario/helpers/mysql_helper.py
index 0f80174c..475b826e 100644
--- a/trove/tests/scenario/helpers/mysql_helper.py
+++ b/trove/tests/scenario/helpers/mysql_helper.py
@@ -46,3 +46,9 @@ class MysqlHelper(SqlHelper):
def get_invalid_groups(self):
return [{'key_buffer_size': 4}, {"join_buffer_size": 'string_value'}]
+
+ def get_exposed_user_log_names(self):
+ return ['general', 'slow_query']
+
+ def get_unexposed_sys_log_names(self):
+ return ['guest', 'error']
diff --git a/trove/tests/scenario/helpers/postgresql_helper.py b/trove/tests/scenario/helpers/postgresql_helper.py
index aaaf8382..31b20e08 100644
--- a/trove/tests/scenario/helpers/postgresql_helper.py
+++ b/trove/tests/scenario/helpers/postgresql_helper.py
@@ -13,6 +13,8 @@
# License for the specific language governing permissions and limitations
# under the License.
+from proboscis import SkipTest
+
from trove.tests.scenario.helpers.sql_helper import SqlHelper
@@ -23,7 +25,7 @@ class PostgresqlHelper(SqlHelper):
'postgresql')
def get_helper_credentials(self):
- return {'name': 'lite', 'password': 'litepass', 'database': 'firstdb'}
+ return {'name': 'lite', 'password': 'litepass', 'database': 'lite'}
def get_valid_database_definitions(self):
return [{'name': 'db1'}, {'name': 'db2'}, {'name': 'db3'}]
@@ -35,6 +37,15 @@ class PostgresqlHelper(SqlHelper):
{'name': 'user3', 'password': 'password1',
'databases': [{'name': 'db1'}, {'name': 'db2'}]}]
+ def add_actual_data(self, *args, **kwargs):
+ raise SkipTest("Adding data to PostgreSQL is broken")
+
+ def verify_actual_data(self, *args, **kwargs):
+ raise SkipTest("Verifying data in PostgreSQL is broken")
+
+ def remove_actual_data(self, *args, **kwargs):
+ raise SkipTest("Removing data from PostgreSQL is broken")
+
def get_dynamic_group(self):
return {'max_worker_processes': 11}
@@ -45,3 +56,9 @@ class PostgresqlHelper(SqlHelper):
return [{'timezone': 997},
{"max_worker_processes": 'string_value'},
{"standard_conforming_strings": 'string_value'}]
+
+ def get_exposed_user_log_names(self):
+ return ['general']
+
+ def log_enable_requires_restart(self):
+ return True
diff --git a/trove/tests/scenario/helpers/test_helper.py b/trove/tests/scenario/helpers/test_helper.py
index bad74483..db112bd6 100644
--- a/trove/tests/scenario/helpers/test_helper.py
+++ b/trove/tests/scenario/helpers/test_helper.py
@@ -29,16 +29,20 @@ class DataType(Enum):
_fn_data dictionary defined in TestHelper.
"""
+ # micro amount of data, useful for testing datastore logging, etc.
+ micro = 1
+ # another micro dataset (also for datastore logging)
+ micro2 = 2
# very tiny amount of data, useful for testing replication
# propagation, etc.
- tiny = 1
+ tiny = 3
# another tiny dataset (also for replication propagation)
- tiny2 = 2
+ tiny2 = 4
# small amount of data (this can be added to each instance
# after creation, for example).
- small = 3
+ small = 5
# large data, enough to make creating a backup take 20s or more.
- large = 4
+ large = 6
class TestHelper(object):
@@ -98,14 +102,20 @@ class TestHelper(object):
self.DATA_START = 'start'
self.DATA_SIZE = 'size'
self._fn_data = {
+ DataType.micro.name: {
+ self.DATA_START: 100,
+ self.DATA_SIZE: 10},
+ DataType.micro2.name: {
+ self.DATA_START: 200,
+ self.DATA_SIZE: 10},
DataType.tiny.name: {
- self.DATA_START: 1,
+ self.DATA_START: 1000,
self.DATA_SIZE: 100},
DataType.tiny2.name: {
- self.DATA_START: 500,
+ self.DATA_START: 2000,
self.DATA_SIZE: 100},
DataType.small.name: {
- self.DATA_START: 1000,
+ self.DATA_START: 10000,
self.DATA_SIZE: 1000},
DataType.large.name: {
self.DATA_START: 100000,
@@ -328,3 +338,69 @@ class TestHelper(object):
"""Return a list of configuration groups with invalid values.
"""
return []
+
+ ###################
+ # Guest Log related
+ ###################
+ def get_exposed_log_list(self):
+ """Return the list of exposed logs for the datastore. This
+ method shouldn't need to be overridden.
+ """
+ logs = []
+ try:
+ logs.extend(self.get_exposed_user_log_names())
+ except SkipTest:
+ pass
+ try:
+ logs.extend(self.get_exposed_sys_log_names())
+ except SkipTest:
+ pass
+
+ return logs
+
+ def get_full_log_list(self):
+ """Return the full list of all logs for the datastore. This
+ method shouldn't need to be overridden.
+ """
+ logs = self.get_exposed_log_list()
+ try:
+ logs.extend(self.get_unexposed_user_log_names())
+ except SkipTest:
+ pass
+ try:
+ logs.extend(self.get_unexposed_sys_log_names())
+ except SkipTest:
+ pass
+
+ return logs
+
+ # Override these guest log methods if needed
+ def get_exposed_user_log_names(self):
+ """Return the names of the user logs that are visible to all users.
+ The first log name will be used for tests.
+ """
+ raise SkipTest("No exposed user log names defined.")
+
+ def get_unexposed_user_log_names(self):
+ """Return the names of the user logs that not visible to all users.
+ The first log name will be used for tests.
+ """
+ raise SkipTest("No unexposed user log names defined.")
+
+ def get_exposed_sys_log_names(self):
+ """Return the names of SYS logs that are visible to all users.
+ The first log name will be used for tests.
+ """
+ raise SkipTest("No exposed sys log names defined.")
+
+ def get_unexposed_sys_log_names(self):
+ """Return the names of the sys logs that not visible to all users.
+ The first log name will be used for tests.
+ """
+ return ['guest']
+
+ def log_enable_requires_restart(self):
+ """Returns whether enabling or disabling a USER log requires a
+ restart of the datastore.
+ """
+ return False
diff --git a/trove/tests/scenario/runners/backup_runners.py b/trove/tests/scenario/runners/backup_runners.py
index 2bd81779..351569fe 100644
--- a/trove/tests/scenario/runners/backup_runners.py
+++ b/trove/tests/scenario/runners/backup_runners.py
@@ -19,11 +19,8 @@ from troveclient.compat import exceptions
from trove.common.utils import generate_uuid
from trove.common.utils import poll_until
-from trove.tests.config import CONFIG
from trove.tests.scenario.helpers.test_helper import DataType
from trove.tests.scenario.runners.test_runners import TestRunner
-from trove.tests.util import create_dbaas_client
-from trove.tests.util.users import Requirements
class BackupRunner(TestRunner):
@@ -47,7 +44,6 @@ class BackupRunner(TestRunner):
self.incremental_backup_info = None
self.restore_instance_id = 0
self.restore_host = None
- self.other_client = None
def run_backup_create_instance_invalid(
self, expected_exception=exceptions.BadRequest,
@@ -235,21 +231,13 @@ class BackupRunner(TestRunner):
def run_backup_get_unauthorized_user(
self, expected_exception=exceptions.NotFound,
expected_http_code=404):
- self._create_other_client()
self.assert_raises(
expected_exception, None,
- self.other_client.backups.get, self.backup_info.id)
+ self.unauth_client.backups.get, self.backup_info.id)
# we're using a different client, so we'll check the return code
# on it explicitly, instead of depending on 'assert_raises'
self.assert_client_code(expected_http_code=expected_http_code,
- client=self.other_client)
-
- def _create_other_client(self):
- if not self.other_client:
- requirements = Requirements(is_admin=False)
- other_user = CONFIG.users.find_user(
- requirements, black_list=[self.instance_info.user.auth_user])
- self.other_client = create_dbaas_client(other_user)
+ client=self.unauth_client)
def run_restore_from_backup(self):
self.assert_restore_from_backup(self.backup_info.id)
@@ -312,14 +300,13 @@ class BackupRunner(TestRunner):
def run_delete_backup_unauthorized_user(
self, expected_exception=exceptions.NotFound,
expected_http_code=404):
- self._create_other_client()
self.assert_raises(
expected_exception, None,
- self.other_client.backups.delete, self.backup_info.id)
+ self.unauth_client.backups.delete, self.backup_info.id)
# we're using a different client, so we'll check the return code
# on it explicitly, instead of depending on 'assert_raises'
self.assert_client_code(expected_http_code=expected_http_code,
- client=self.other_client)
+ client=self.unauth_client)
def run_delete_backup(self, expected_http_code=202):
self.assert_delete_backup(self.backup_info.id, expected_http_code)
diff --git a/trove/tests/scenario/runners/guest_log_runners.py b/trove/tests/scenario/runners/guest_log_runners.py
new file mode 100644
index 00000000..29ac8937
--- /dev/null
+++ b/trove/tests/scenario/runners/guest_log_runners.py
@@ -0,0 +1,674 @@
+# Copyright 2015 Tesora Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from swiftclient.client import ClientException
+import tempfile
+
+from troveclient.compat import exceptions
+
+from trove.common import cfg
+from trove.guestagent.common import operating_system
+from trove.guestagent import guest_log
+from trove.tests.config import CONFIG
+from trove.tests.scenario.helpers.test_helper import DataType
+from trove.tests.scenario.runners.test_runners import TestRunner
+
+
+CONF = cfg.CONF
+
+
+class GuestLogRunner(TestRunner):
+
+ def __init__(self):
+ super(GuestLogRunner, self).__init__()
+
+ self.container = CONF.guest_log_container_name
+ self.prefix_pattern = '%(instance_id)s/%(datastore)s-%(log)s/'
+ self._last_log_published = {}
+ self._last_log_contents = {}
+
+ def _get_last_log_published(self, log_name):
+ return self._last_log_published.get(log_name, None)
+
+ def _set_last_log_published(self, log_name, published):
+ self._last_log_published[log_name] = published
+
+ def _get_last_log_contents(self, log_name):
+ return self._last_log_contents.get(log_name, [])
+
+ def _set_last_log_contents(self, log_name, published):
+ self._last_log_contents[log_name] = published
+
+ def _get_exposed_user_log_names(self):
+ """Returns the full list of exposed user logs."""
+ return self.test_helper.get_exposed_user_log_names()
+
+ def _get_exposed_user_log_name(self):
+ """Return the first exposed user log name."""
+ return self.test_helper.get_exposed_user_log_names()[0]
+
+ def _get_unexposed_sys_log_name(self):
+ """Return the first unexposed sys log name."""
+ return self.test_helper.get_unexposed_sys_log_names()[0]
+
+ def run_test_log_list(self):
+ self.assert_log_list(self.auth_client,
+ self.test_helper.get_exposed_log_list())
+
+ def assert_log_list(self, client, expected_list):
+ log_list = list(client.instances.log_list(self.instance_info.id))
+ log_names = list(ll.name for ll in log_list)
+ self.assert_list_elements_equal(expected_list, log_names)
+
+ def run_test_admin_log_list(self):
+ self.assert_log_list(self.admin_client,
+ self.test_helper.get_full_log_list())
+
+ def run_test_log_show(self):
+ log_pending = self._set_zero_or_none()
+ self.assert_log_show(self.auth_client,
+ self._get_exposed_user_log_name(),
+ expected_published=0,
+ expected_pending=log_pending)
+
+ def _set_zero_or_none(self):
+ """This attempts to handle the case where an existing instance
+ is used. Values that would normally be '0' are not, and must
+ be ignored.
+ """
+ value = 0
+ if self.is_using_existing_instance:
+ value = None
+ return value
+
+ def assert_log_show(self, client, log_name,
+ expected_http_code=200,
+ expected_type=guest_log.LogType.USER.name,
+ expected_status=guest_log.LogStatus.Disabled.name,
+ expected_published=None, expected_pending=None):
+ self.report.log("Executing log_show for log '%s'" % log_name)
+ log_details = client.instances.log_show(
+ self.instance_info.id, log_name)
+ self.assert_client_code(expected_http_code)
+ self.assert_log_details(
+ log_details, log_name,
+ expected_type=expected_type,
+ expected_status=expected_status,
+ expected_published=expected_published,
+ expected_pending=expected_pending)
+
+ def assert_log_details(self, log_details, expected_log_name,
+ expected_type=guest_log.LogType.USER.name,
+ expected_status=guest_log.LogStatus.Disabled.name,
+ expected_published=None, expected_pending=None):
+ """Check that the action generates the proper response data.
+ For log_published and log_pending, setting the value to 'None'
+ will skip that check (useful when using an existing instance,
+ as there may be pending things in user logs right from the get-go)
+ and setting it to a value other than '0' will verify that the actual
+ value is '>=value' (since it's impossible to know what the actual
+ value will be at any given time). '0' will still match exclusively.
+ """
+ self.report.log("Validating log details for log '%s'" %
+ expected_log_name)
+ self._set_last_log_published(expected_log_name, log_details.published)
+ self.assert_equal(expected_log_name, log_details.name,
+ "Wrong log name for '%s' log" % expected_log_name)
+ self.assert_equal(expected_type, log_details.type,
+ "Wrong log type for '%s' log" % expected_log_name)
+ current_status = log_details.status.replace(' ', '_')
+ self.assert_equal(expected_status, current_status,
+ "Wrong log status for '%s' log" % expected_log_name)
+ if expected_published is None:
+ pass
+ elif expected_published == 0:
+ self.assert_equal(0, log_details.published,
+ "Wrong log published for '%s' log" %
+ expected_log_name)
+ else:
+ self.assert_true(log_details.published >= expected_published,
+ "Missing log published for '%s' log: "
+ "expected %d, got %d" %
+ (expected_log_name, expected_published,
+ log_details.published))
+ if expected_pending is None:
+ pass
+ elif expected_pending == 0:
+ self.assert_equal(0, log_details.pending,
+ "Wrong log pending for '%s' log" %
+ expected_log_name)
+ else:
+ self.assert_true(log_details.pending >= expected_pending,
+ "Missing log pending for '%s' log: "
+ "expected %d, got %d" %
+ (expected_log_name, expected_pending,
+ log_details.pending))
+ container = self.container
+ prefix = self.prefix_pattern % {
+ 'instance_id': self.instance_info.id,
+ 'datastore': CONFIG.dbaas_datastore,
+ 'log': expected_log_name}
+ metafile = prefix.rstrip('/') + '_metafile'
+ if expected_published == 0:
+ self.assert_storage_gone(container, prefix, metafile)
+ container = 'None'
+ prefix = 'None'
+ else:
+ self.assert_storage_exists(container, prefix, metafile)
+ self.assert_equal(container, log_details.container,
+ "Wrong log container for '%s' log" %
+ expected_log_name)
+ self.assert_equal(prefix, log_details.prefix,
+ "Wrong log prefix for '%s' log" % expected_log_name)
+ self.assert_equal(metafile, log_details.metafile,
+ "Wrong log metafile for '%s' log" %
+ expected_log_name)
+
+ def assert_log_enable(self, client, log_name,
+ expected_http_code=200,
+ expected_type=guest_log.LogType.USER.name,
+ expected_status=guest_log.LogStatus.Disabled.name,
+ expected_published=None, expected_pending=None):
+ self.report.log("Executing log_enable for log '%s'" % log_name)
+ log_details = client.instances.log_enable(
+ self.instance_info.id, log_name)
+ self.assert_client_code(expected_http_code)
+ self.assert_log_details(
+ log_details, log_name,
+ expected_type=expected_type,
+ expected_status=expected_status,
+ expected_published=expected_published,
+ expected_pending=expected_pending)
+
+ def assert_log_disable(self, client, log_name, discard=None,
+ expected_http_code=200,
+ expected_type=guest_log.LogType.USER.name,
+ expected_status=guest_log.LogStatus.Disabled.name,
+ expected_published=None, expected_pending=None):
+ self.report.log("Executing log_disable for log '%s' (discard: %s)" %
+ (log_name, discard))
+ log_details = client.instances.log_disable(
+ self.instance_info.id, log_name, discard=discard)
+ self.assert_client_code(expected_http_code)
+ self.assert_log_details(
+ log_details, log_name,
+ expected_type=expected_type,
+ expected_status=expected_status,
+ expected_published=expected_published,
+ expected_pending=expected_pending)
+
+ def assert_log_publish(self, client, log_name, disable=None, discard=None,
+ expected_http_code=200,
+ expected_type=guest_log.LogType.USER.name,
+ expected_status=guest_log.LogStatus.Disabled.name,
+ expected_published=None, expected_pending=None):
+ self.report.log("Executing log_publish for log '%s' (disable: %s "
+ "discard: %s)" %
+ (log_name, disable, discard))
+ log_details = client.instances.log_publish(
+ self.instance_info.id, log_name, disable=disable, discard=discard)
+ self.assert_client_code(expected_http_code)
+ self.assert_log_details(
+ log_details, log_name,
+ expected_type=expected_type,
+ expected_status=expected_status,
+ expected_published=expected_published,
+ expected_pending=expected_pending)
+
+ def assert_log_discard(self, client, log_name,
+ expected_http_code=200,
+ expected_type=guest_log.LogType.USER.name,
+ expected_status=guest_log.LogStatus.Disabled.name,
+ expected_published=None, expected_pending=None):
+ self.report.log("Executing log_discard for log '%s'" % log_name)
+ log_details = client.instances.log_discard(
+ self.instance_info.id, log_name)
+ self.assert_client_code(expected_http_code)
+ self.assert_log_details(
+ log_details, log_name,
+ expected_type=expected_type,
+ expected_status=expected_status,
+ expected_published=expected_published,
+ expected_pending=expected_pending)
+
+ def assert_storage_gone(self, container, prefix, metafile):
+ try:
+ headers, container_files = self.swift_client.get_container(
+ container, prefix=prefix)
+ self.assert_equal(0, len(container_files),
+ "Found files in %s/%s: %s" %
+ (container, prefix, container_files))
+ except ClientException as ex:
+ if ex.http_status == 404:
+ self.report.log("Container '%s' does not exist" %
+ container)
+ pass
+ else:
+ raise
+ try:
+ self.swift_client.get_object(container, metafile)
+ self.fail("Found metafile after discard: %s" % metafile)
+ except ClientException as ex:
+ if ex.http_status == 404:
+ self.report.log("Metafile '%s' gone as expected" %
+ metafile)
+ pass
+ else:
+ raise
+
+ def assert_storage_exists(self, container, prefix, metafile):
+ try:
+ headers, container_files = self.swift_client.get_container(
+ container, prefix=prefix)
+ self.assert_true(len(container_files) > 0,
+ "No files found in %s/%s" %
+ (container, prefix))
+ except ClientException as ex:
+ if ex.http_status == 404:
+ self.fail("Container '%s' does not exist" % container)
+ else:
+ raise
+ try:
+ self.swift_client.get_object(container, metafile)
+ except ClientException as ex:
+ if ex.http_status == 404:
+ self.fail("Missing metafile: %s" % metafile)
+ else:
+ raise
+
+ def run_test_log_enable_sys(self,
+ expected_exception=exceptions.BadRequest,
+ expected_http_code=400):
+ self.assert_log_enable_fails(
+ self.admin_client,
+ expected_exception, expected_http_code,
+ self._get_unexposed_sys_log_name())
+
+ def assert_log_enable_fails(self, client,
+ expected_exception, expected_http_code,
+ log_name):
+ self.assert_raises(expected_exception, None,
+ client.instances.log_enable,
+ self.instance_info.id, log_name)
+ # we may not be using the main client, so check explicitly here
+ self.assert_client_code(expected_http_code, client)
+
+ def run_test_log_disable_sys(self,
+ expected_exception=exceptions.BadRequest,
+ expected_http_code=400):
+ self.assert_log_disable_fails(
+ self.admin_client,
+ expected_exception, expected_http_code,
+ self._get_unexposed_sys_log_name())
+
+ def assert_log_disable_fails(self, client,
+ expected_exception, expected_http_code,
+ log_name, discard=None):
+ self.assert_raises(expected_exception, None,
+ client.instances.log_disable,
+ self.instance_info.id, log_name,
+ discard=discard)
+ # we may not be using the main client, so check explicitly here
+ self.assert_client_code(expected_http_code, client)
+
+ def run_test_log_show_unauth_user(self,
+ expected_exception=exceptions.NotFound,
+ expected_http_code=404):
+ self.assert_log_show_fails(
+ self.unauth_client,
+ expected_exception, expected_http_code,
+ self._get_exposed_user_log_name())
+
+ def assert_log_show_fails(self, client,
+ expected_exception, expected_http_code,
+ log_name):
+ self.assert_raises(expected_exception, None,
+ client.instances.log_show,
+ self.instance_info.id, log_name)
+ # we may not be using the main client, so check explicitly here
+ self.assert_client_code(expected_http_code, client)
+
+ def run_test_log_list_unauth_user(self,
+ expected_exception=exceptions.NotFound,
+ expected_http_code=404):
+ self.assert_raises(expected_exception, None,
+ self.unauth_client.instances.log_list,
+ self.instance_info.id)
+ # we're not using the main client, so check explicitly here
+ self.assert_client_code(expected_http_code, self.unauth_client)
+
+ def run_test_log_generator_unauth_user(self):
+ self.assert_log_generator_unauth_user(
+ self.unauth_client, self._get_exposed_user_log_name())
+
+ def assert_log_generator_unauth_user(self, client, log_name, publish=None):
+ try:
+ client.instances.log_generator(
+ self.instance_info.id, log_name, publish=publish)
+ raise("Client allowed unauthorized access to log_generator")
+ except Exception:
+ pass
+
+ def run_test_log_generator_publish_unauth_user(self):
+ self.assert_log_generator_unauth_user(
+ self.unauth_client, self._get_exposed_user_log_name(),
+ publish=True)
+
+ def run_test_log_show_unexposed_user(
+ self, expected_exception=exceptions.BadRequest,
+ expected_http_code=400):
+ self.assert_log_show_fails(
+ self.auth_client,
+ expected_exception, expected_http_code,
+ self._get_unexposed_sys_log_name())
+
+ def run_test_log_enable_unexposed_user(
+ self, expected_exception=exceptions.BadRequest,
+ expected_http_code=400):
+ self.assert_log_enable_fails(
+ self.auth_client,
+ expected_exception, expected_http_code,
+ self._get_unexposed_sys_log_name())
+
+ def run_test_log_disable_unexposed_user(
+ self, expected_exception=exceptions.BadRequest,
+ expected_http_code=400):
+ self.assert_log_disable_fails(
+ self.auth_client,
+ expected_exception, expected_http_code,
+ self._get_unexposed_sys_log_name())
+
+ def run_test_log_publish_unexposed_user(
+ self, expected_exception=exceptions.BadRequest,
+ expected_http_code=400):
+ self.assert_log_publish_fails(
+ self.auth_client,
+ expected_exception, expected_http_code,
+ self._get_unexposed_sys_log_name())
+
+ def assert_log_publish_fails(self, client,
+ expected_exception, expected_http_code,
+ log_name,
+ disable=None, discard=None):
+ self.assert_raises(expected_exception, None,
+ client.instances.log_publish,
+ self.instance_info.id, log_name,
+ disable=disable, discard=discard)
+ # we may not be using the main client, so check explicitly here
+ self.assert_client_code(expected_http_code, client)
+
+ def run_test_log_discard_unexposed_user(
+ self, expected_exception=exceptions.BadRequest,
+ expected_http_code=400):
+ self.assert_log_discard_fails(
+ self.auth_client,
+ expected_exception, expected_http_code,
+ self._get_unexposed_sys_log_name())
+
+ def assert_log_discard_fails(self, client,
+ expected_exception, expected_http_code,
+ log_name):
+ self.assert_raises(expected_exception, None,
+ client.instances.log_discard,
+ self.instance_info.id, log_name)
+ # we may not be using the main client, so check explicitly here
+ self.assert_client_code(expected_http_code, client)
+
+ def run_test_log_enable_user(self):
+ expected_status = guest_log.LogStatus.Ready.name
+ expected_pending = 1
+ if self.test_helper.log_enable_requires_restart():
+ expected_status = guest_log.LogStatus.Restart_Required.name
+ # if using an existing instance, there may already be something
+ expected_pending = self._set_zero_or_none()
+
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_enable(
+ self.auth_client,
+ log_name,
+ expected_status=expected_status,
+ expected_published=0, expected_pending=expected_pending)
+
+ def run_test_log_enable_flip_user(self):
+ # for restart required datastores, test that flipping them
+ # back to disabled returns the status to 'Disabled'
+ # from 'Restart_Required'
+ if self.test_helper.log_enable_requires_restart():
+ # if using an existing instance, there may already be something
+ expected_pending = self._set_zero_or_none()
+
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_disable(
+ self.auth_client,
+ log_name,
+ expected_status=guest_log.LogStatus.Disabled.name,
+ expected_published=0, expected_pending=expected_pending)
+ self.assert_log_enable(
+ self.auth_client,
+ log_name,
+ expected_status=guest_log.LogStatus.Restart_Required.name,
+ expected_published=0, expected_pending=expected_pending)
+
+ def run_test_restart_datastore(self, expected_http_code=202):
+ if self.test_helper.log_enable_requires_restart():
+ instance_id = self.instance_info.id
+ # we need to wait until the heartbeat flips the instance
+ # back into 'ACTIVE' before we issue the restart command
+ expected_states = ['RESTART_REQUIRED', 'ACTIVE']
+ self.assert_instance_action(instance_id, expected_states, None)
+ self.auth_client.instances.restart(instance_id)
+ self.assert_client_code(expected_http_code)
+
+ def run_test_wait_for_restart(self, expected_states=['REBOOT', 'ACTIVE']):
+ if self.test_helper.log_enable_requires_restart():
+ self.assert_instance_action(self.instance_info.id,
+ expected_states, None)
+
+ def run_test_log_publish_user(self):
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_publish(
+ self.auth_client,
+ log_name,
+ expected_status=guest_log.LogStatus.Published.name,
+ expected_published=1, expected_pending=0)
+
+ def run_test_add_data(self):
+ self.test_helper.add_data(DataType.micro, self.get_instance_host())
+
+ def run_test_verify_data(self):
+ self.test_helper.verify_data(DataType.micro, self.get_instance_host())
+
+ def run_test_log_publish_again_user(self):
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_publish(
+ self.admin_client,
+ log_name,
+ expected_status=guest_log.LogStatus.Published.name,
+ expected_published=self._get_last_log_published(log_name),
+ expected_pending=0)
+
+ def run_test_log_generator_user(self):
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_generator(
+ self.auth_client,
+ log_name,
+ lines=2, expected_lines=2)
+
+ def assert_log_generator(self, client, log_name, publish=False,
+ lines=4, expected_lines=None,
+ swift_client=None):
+ self.report.log("Executing log_generator for log '%s' (publish: %s)" %
+ (log_name, publish))
+ log_gen = client.instances.log_generator(
+ self.instance_info.id, log_name,
+ publish=publish, lines=lines, swift=swift_client)
+ log_contents = "".join([chunk for chunk in log_gen()])
+ self.report.log("Returned %d lines for log '%s': %s" % (
+ len(log_contents.splitlines()), log_name, log_contents))
+ self._set_last_log_contents(log_name, log_contents)
+ if expected_lines:
+ self.assert_equal(expected_lines,
+ len(log_contents.splitlines()),
+ "Wrong line count for '%s' log" % log_name)
+ else:
+ self.assert_true(len(log_contents.splitlines()) <= lines,
+ "More than %d lines found for '%s' log" %
+ (lines, log_name))
+
+ def run_test_log_generator_publish_user(self):
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_generator(
+ self.auth_client,
+ log_name, publish=True,
+ lines=3, expected_lines=3)
+
+ def run_test_log_generator_swift_client_user(self):
+ swift_client = self.swift_client
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_generator(
+ self.auth_client,
+ log_name, publish=True,
+ lines=3, expected_lines=3,
+ swift_client=swift_client)
+
+ def run_test_add_data_again(self):
+ # Add some more data so we have at least 3 log data files
+ self.test_helper.add_data(DataType.micro2, self.get_instance_host())
+
+ def run_test_verify_data_again(self):
+ self.test_helper.verify_data(DataType.micro2, self.get_instance_host())
+
+ def run_test_log_generator_user_by_row(self):
+ log_name = self._get_exposed_user_log_name()
+ self.assert_log_publish(
+ self.auth_client,
+ log_name,
+ expected_status=guest_log.LogStatus.Published.name,
+ expected_published=self._get_last_log_published(log_name),
+ expected_pending=0)
+ # Now get the full contents of the log
+ self.assert_log_generator(self.auth_client, log_name, lines=100000)
+ log_lines = len(self._get_last_log_contents(log_name).splitlines())
+ # Make sure we get the right number of log lines back each time
+ for lines in range(1, log_lines):
+ self.assert_log_generator(
+ self.auth_client,
+ log_name, lines=lines, expected_lines=lines)
+
+ def run_test_log_save_user(self):
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_test_log_save(self.auth_client, log_name)
+
+ def run_test_log_save_publish_user(self):
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_test_log_save(self.auth_client, log_name, publish=True)
+
+ def assert_test_log_save(self, client, log_name, publish=False):
+ # generate the file
+ self.report.log("Executing log_save for log '%s' (publish: %s)" %
+ (log_name, publish))
+ with tempfile.NamedTemporaryFile() as temp_file:
+ client.instances.log_save(self.instance_info.id,
+ log_name=log_name, publish=publish,
+ filename=temp_file.name)
+ file_contents = operating_system.read_file(temp_file.name)
+ # now grab the contents ourselves
+ self.assert_log_generator(client, log_name, lines=100000)
+ # and compare them
+ self.assert_equal(self._get_last_log_contents(log_name),
+ file_contents)
+
+ def run_test_log_discard_user(self):
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_discard(
+ self.auth_client,
+ log_name,
+ expected_status=guest_log.LogStatus.Ready.name,
+ expected_published=0, expected_pending=1)
+
+ def run_test_log_disable_user(self):
+ expected_status = guest_log.LogStatus.Disabled.name
+ if self.test_helper.log_enable_requires_restart():
+ expected_status = guest_log.LogStatus.Restart_Required.name
+ for log_name in self._get_exposed_user_log_names():
+ self.assert_log_disable(
+ self.auth_client,
+ log_name,
+ expected_status=expected_status,
+ expected_published=0, expected_pending=1)
+
+ def run_test_log_show_sys(self):
+ self.assert_log_show(
+ self.admin_client,
+ self._get_unexposed_sys_log_name(),
+ expected_type=guest_log.LogType.SYS.name,
+ expected_status=guest_log.LogStatus.Ready.name,
+ expected_published=0, expected_pending=1)
+
+ def run_test_log_publish_sys(self):
+ log_name = self._get_unexposed_sys_log_name()
+ self.assert_log_publish(
+ self.admin_client,
+ log_name,
+ expected_type=guest_log.LogType.SYS.name,
+ expected_status=guest_log.LogStatus.Partial.name,
+ expected_published=1, expected_pending=1)
+
+ def run_test_log_publish_again_sys(self):
+ log_name = self._get_unexposed_sys_log_name()
+ self.assert_log_publish(
+ self.admin_client,
+ log_name,
+ expected_type=guest_log.LogType.SYS.name,
+ expected_status=guest_log.LogStatus.Partial.name,
+ expected_published=self._get_last_log_published(log_name) + 1,
+ expected_pending=1)
+
+ def run_test_log_generator_sys(self):
+ self.assert_log_generator(
+ self.admin_client,
+ self._get_unexposed_sys_log_name(),
+ lines=4, expected_lines=4)
+
+ def run_test_log_generator_publish_sys(self):
+ self.assert_log_generator(
+ self.admin_client,
+ self._get_unexposed_sys_log_name(), publish=True,
+ lines=4, expected_lines=4)
+
+ def run_test_log_generator_swift_client_sys(self):
+ self.assert_log_generator(
+ self.admin_client,
+ self._get_unexposed_sys_log_name(), publish=True,
+ lines=4, expected_lines=4,
+ swift_client=self.swift_client)
+
+ def run_test_log_save_sys(self):
+ self.assert_test_log_save(
+ self.admin_client,
+ self._get_unexposed_sys_log_name())
+
+ def run_test_log_save_publish_sys(self):
+ self.assert_test_log_save(
+ self.admin_client,
+ self._get_unexposed_sys_log_name(),
+ publish=True)
+
+ def run_test_log_discard_sys(self):
+ self.assert_log_discard(
+ self.admin_client,
+ self._get_unexposed_sys_log_name(),
+ expected_type=guest_log.LogType.SYS.name,
+ expected_status=guest_log.LogStatus.Ready.name,
+ expected_published=0, expected_pending=1)
diff --git a/trove/tests/scenario/runners/instance_create_runners.py b/trove/tests/scenario/runners/instance_create_runners.py
index 0f482117..094683a3 100644
--- a/trove/tests/scenario/runners/instance_create_runners.py
+++ b/trove/tests/scenario/runners/instance_create_runners.py
@@ -36,8 +36,6 @@ class InstanceCreateRunner(TestRunner):
def run_empty_instance_create(
self, expected_states=['BUILD', 'ACTIVE'], expected_http_code=200):
- # TODO(pmalik): Instance create should return 202 Accepted (cast)
- # rather than 200 OK (call).
name = self.instance_info.name
flavor = self._get_instance_flavor()
trove_volume_size = CONFIG.get('trove_volume_size', 1)
@@ -79,9 +77,7 @@ class InstanceCreateRunner(TestRunner):
self, with_dbs=True, with_users=True, configuration_id=None,
expected_states=['BUILD', 'ACTIVE'], expected_http_code=200,
create_helper_user=True):
- # TODO(pmalik): Instance create should return 202 Accepted (cast)
- # rather than 200 OK (call).
- name = self.instance_info.name
+ name = self.instance_info.name + '_init'
flavor = self._get_instance_flavor()
trove_volume_size = CONFIG.get('trove_volume_size', 1)
self.init_inst_dbs = (self.test_helper.get_valid_database_definitions()
@@ -91,6 +87,9 @@ class InstanceCreateRunner(TestRunner):
if configuration_id:
self.init_config_group_id = configuration_id
+ if self.is_using_existing_instance:
+ raise SkipTest("Using existing instance.")
+
if (self.init_inst_dbs or self.init_inst_users or
self.init_config_group_id):
info = self.assert_instance_create(
@@ -231,6 +230,8 @@ class InstanceCreateRunner(TestRunner):
instances = [self.instance_info.id]
if self.init_inst_id:
instances.append(self.init_inst_id)
+ if self.is_using_existing_instance:
+ expected_states = ['ACTIVE']
self.assert_all_instance_states(instances, expected_states)
def run_add_initialized_instance_data(self):
diff --git a/trove/tests/scenario/runners/test_runners.py b/trove/tests/scenario/runners/test_runners.py
index 41ac8f95..fa4888d6 100644
--- a/trove/tests/scenario/runners/test_runners.py
+++ b/trove/tests/scenario/runners/test_runners.py
@@ -16,14 +16,15 @@
import os
import time as timer
+from oslo_config.cfg import NoSuchOptError
from proboscis import asserts
+import swiftclient
from troveclient.compat import exceptions
-from oslo_config.cfg import NoSuchOptError
from trove.common import cfg
+from trove.common import exception
from trove.common import utils
from trove.common.utils import poll_until, build_polling_task
-from trove.common import exception
from trove.tests.api.instances import instance_info
from trove.tests.config import CONFIG
from trove.tests.util import create_dbaas_client
@@ -78,7 +79,9 @@ class TestRunner(object):
instance_info.volume = None
self.auth_client = create_dbaas_client(self.instance_info.user)
- self.unauth_client = None
+ self._unauth_client = None
+ self._admin_client = None
+ self._swift_client = None
self._test_helper = None
@classmethod
@@ -148,12 +151,13 @@ class TestRunner(object):
def test_helper(self, test_helper):
self._test_helper = test_helper
- def get_unauth_client(self):
- if not self.unauth_client:
- self.unauth_client = self._create_unauthorized_client()
- return self.unauth_client
+ @property
+ def unauth_client(self):
+ if not self._unauth_client:
+ self._unauth_client = self._create_unauthorized_client()
+ return self._unauth_client
- def _create_unauthorized_client(self, force=False):
+ def _create_unauthorized_client(self):
"""Create a client from a different 'unauthorized' user
to facilitate negative testing.
"""
@@ -162,6 +166,37 @@ class TestRunner(object):
requirements, black_list=[self.instance_info.user.auth_user])
return create_dbaas_client(other_user)
+ @property
+ def admin_client(self):
+ if not self._admin_client:
+ self._admin_client = self._create_admin_client()
+ return self._admin_client
+
+ def _create_admin_client(self):
+ """Create a client from an admin user."""
+ requirements = Requirements(is_admin=True, services=["swift"])
+ admin_user = CONFIG.users.find_user(requirements)
+ return create_dbaas_client(admin_user)
+
+ @property
+ def swift_client(self):
+ if not self._swift_client:
+ self._swift_client = self._create_swift_client()
+ return self._swift_client
+
+ def _create_swift_client(self):
+ """Create a swift client from the admin user details."""
+ requirements = Requirements(is_admin=True, services=["swift"])
+ user = CONFIG.users.find_user(requirements)
+ os_options = {'region_name': os.getenv("OS_REGION_NAME")}
+ return swiftclient.client.Connection(
+ authurl=CONFIG.nova_client['auth_url'],
+ user=user.auth_user,
+ key=user.auth_key,
+ tenant_name=user.tenant,
+ auth_version='2.0',
+ os_options=os_options)
+
def assert_raises(self, expected_exception, expected_http_code,
client_cmd, *cmd_args, **cmd_kwargs):
asserts.assert_raises(expected_exception, client_cmd,
diff --git a/trove/tests/unittests/guestagent/test_manager.py b/trove/tests/unittests/guestagent/test_manager.py
new file mode 100644
index 00000000..a38d9fc2
--- /dev/null
+++ b/trove/tests/unittests/guestagent/test_manager.py
@@ -0,0 +1,314 @@
+# Copyright 2015 Tesora Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import getpass
+import os
+
+from mock import DEFAULT
+from mock import MagicMock
+from mock import patch
+from proboscis.asserts import assert_equal
+from proboscis.asserts import assert_true
+
+from trove.common.context import TroveContext
+from trove.common import exception
+from trove.guestagent.common import operating_system
+from trove.guestagent.datastore import manager
+from trove.guestagent import guest_log
+from trove.tests.unittests import trove_testtools
+
+
+class MockManager(manager.Manager):
+ def __init__(self):
+ super(MockManager, self).__init__('mysql')
+ self._app = MagicMock()
+ self._status = MagicMock()
+ self._configuration_manager = MagicMock()
+
+ @property
+ def app(self):
+ return self._app
+
+ @property
+ def status(self):
+ return self._status
+
+ @property
+ def configuration_manager(self):
+ return self._configuration_manager
+
+
+class ManagerTest(trove_testtools.TestCase):
+ def setUp(self):
+ super(ManagerTest, self).setUp()
+
+ self.chmod_patch = patch.object(operating_system, 'chmod')
+ self.chmod_mock = self.chmod_patch.start()
+ self.addCleanup(self.chmod_patch.stop)
+
+ self.manager = MockManager()
+ self.context = TroveContext()
+
+ self.log_name_sys = 'guest'
+ self.log_name_user = 'general'
+ self.prefix = 'log_prefix'
+ self.container = 'log_container'
+ self.size = 1024
+ self.published = 128
+ self.guest_log_user = guest_log.GuestLog(
+ self.context, self.log_name_user, guest_log.LogType.USER, None,
+ '/tmp/gen.log', True)
+ self.guest_log_sys = guest_log.GuestLog(
+ self.context, self.log_name_sys, guest_log.LogType.SYS, None,
+ '/tmp/guest.log', True)
+ for gl in [self.guest_log_user, self.guest_log_sys]:
+ gl._container_name = self.container
+ gl._refresh_details = MagicMock()
+ gl._log_rotated = MagicMock(return_value=False)
+ gl._publish_to_container = MagicMock()
+ gl._delete_log_components = MagicMock()
+ gl._object_prefix = MagicMock(return_value=self.prefix)
+ gl._size = self.size
+ gl._published_size = self.published
+ self.manager._guest_log_cache = {
+ self.log_name_user: self.guest_log_user,
+ self.log_name_sys: self.guest_log_sys}
+ self.expected_details_user = {
+ 'status': 'Disabled',
+ 'prefix': self.prefix,
+ 'container': self.container,
+ 'name': self.log_name_user,
+ 'published': self.published,
+ 'metafile': self.prefix + '_metafile',
+ 'type': 'USER',
+ 'pending': self.size - self.published}
+ self.expected_details_sys = dict(self.expected_details_user)
+ self.expected_details_sys['type'] = 'SYS'
+ self.expected_details_sys['status'] = 'Enabled'
+ self.expected_details_sys['name'] = self.log_name_sys
+
+ def tearDown(self):
+ super(ManagerTest, self).tearDown()
+
+ def test_update_status(self):
+ self.manager.update_status(self.context)
+ self.manager.status.update.assert_any_call()
+
+ def test_guest_log_list(self):
+ log_list = self.manager.guest_log_list(self.context)
+ expected = [self.expected_details_sys, self.expected_details_user]
+ assert_equal(self._flatten_list_of_dicts(expected),
+ self._flatten_list_of_dicts(log_list),
+ "Wrong list: %s (Expected: %s)" % (
+ self._flatten_list_of_dicts(log_list),
+ self._flatten_list_of_dicts(expected)))
+
+ def _flatten_list_of_dicts(self, lod):
+ value = sorted("".join("%s%s" % (k, d[k]) for k in sorted(d.keys()))
+ for d in lod)
+ return "".join(sorted(value))
+
+ def test_guest_log_action_enable_disable(self):
+ self.assertRaisesRegexp(exception.BadRequest,
+ "Cannot enable and disable",
+ self.manager.guest_log_action,
+ self.context,
+ self.log_name_sys,
+ True, True, False, False)
+
+ def test_guest_log_action_enable_sys(self):
+ self.assertRaisesRegexp(exception.BadRequest,
+ "Cannot enable a SYSTEM log",
+ self.manager.guest_log_action,
+ self.context,
+ self.log_name_sys,
+ True, False, False, False)
+
+ def test_guest_log_action_disable_sys(self):
+ self.assertRaisesRegexp(exception.BadRequest,
+ "Cannot disable a SYSTEM log",
+ self.manager.guest_log_action,
+ self.context,
+ self.log_name_sys,
+ False, True, False, False)
+
+ def test_guest_log_action_publish_sys(self):
+ with patch.object(os.path, 'isfile', return_value=True):
+ log_details = self.manager.guest_log_action(self.context,
+ self.log_name_sys,
+ False, False,
+ True, False)
+ assert_equal(log_details, self.expected_details_sys,
+ "Wrong details: %s (expected %s)" %
+ (log_details, self.expected_details_sys))
+ assert_equal(
+ 1, self.guest_log_sys._publish_to_container.call_count)
+
+ def test_guest_log_action_discard_sys(self):
+ log_details = self.manager.guest_log_action(self.context,
+ self.log_name_sys,
+ False, False,
+ False, True)
+ assert_equal(log_details, self.expected_details_sys,
+ "Wrong details: %s (expected %s)" %
+ (log_details, self.expected_details_sys))
+ assert_equal(
+ 1, self.guest_log_sys._delete_log_components.call_count)
+
+ def test_guest_log_action_enable_user(self):
+ with patch.object(manager.Manager, 'guest_log_enable',
+ return_value=False) as mock_enable:
+ log_details = self.manager.guest_log_action(self.context,
+ self.log_name_user,
+ True, False,
+ False, False)
+ assert_equal(log_details, self.expected_details_user,
+ "Wrong details: %s (expected %s)" %
+ (log_details, self.expected_details_user))
+ assert_equal(1, mock_enable.call_count)
+
+ def test_guest_log_action_disable_user(self):
+ with patch.object(manager.Manager, 'guest_log_enable',
+ return_value=False) as mock_enable:
+ self.guest_log_user._enabled = True
+ log_details = self.manager.guest_log_action(self.context,
+ self.log_name_user,
+ False, True,
+ False, False)
+ assert_equal(log_details, self.expected_details_user,
+ "Wrong details: %s (expected %s)" %
+ (log_details, self.expected_details_user))
+ assert_equal(1, mock_enable.call_count)
+
+ def test_guest_log_action_publish_user(self):
+ with patch.object(manager.Manager, 'guest_log_enable',
+ return_value=False) as mock_enable:
+ with patch.object(os.path, 'isfile', return_value=True):
+ log_details = self.manager.guest_log_action(self.context,
+ self.log_name_user,
+ False, False,
+ True, False)
+ assert_equal(log_details, self.expected_details_user,
+ "Wrong details: %s (expected %s)" %
+ (log_details, self.expected_details_user))
+ assert_equal(1, mock_enable.call_count)
+
+ def test_guest_log_action_discard_user(self):
+ log_details = self.manager.guest_log_action(self.context,
+ self.log_name_user,
+ False, False,
+ False, True)
+ assert_equal(log_details, self.expected_details_user,
+ "Wrong details: %s (expected %s)" %
+ (log_details, self.expected_details_user))
+ assert_equal(1, self.guest_log_user._delete_log_components.call_count)
+
+ def test_set_guest_log_status_disabled(self):
+ data = [
+ {'orig': guest_log.LogStatus.Enabled,
+ 'new': guest_log.LogStatus.Disabled,
+ 'expect': guest_log.LogStatus.Disabled},
+ {'orig': guest_log.LogStatus.Restart_Required,
+ 'new': guest_log.LogStatus.Enabled,
+ 'expect': guest_log.LogStatus.Restart_Required},
+ {'orig': guest_log.LogStatus.Restart_Required,
+ 'new': guest_log.LogStatus.Restart_Completed,
+ 'expect': guest_log.LogStatus.Restart_Completed},
+ {'orig': guest_log.LogStatus.Published,
+ 'new': guest_log.LogStatus.Partial,
+ 'expect': guest_log.LogStatus.Partial},
+ ]
+ for datum in data:
+ self.assert_guest_log_status(datum['orig'],
+ datum['new'],
+ datum['expect'])
+
+ def assert_guest_log_status(self, original_status, new_status,
+ expected_final_status):
+ gl_cache = self.manager.guest_log_cache
+ gl_cache[self.log_name_sys]._status = original_status
+ self.manager.set_guest_log_status(new_status, self.log_name_sys)
+ assert_equal(gl_cache[self.log_name_sys].status, expected_final_status,
+ "Unexpected status for '%s': %s' (Expected %s)" %
+ (self.log_name_sys, gl_cache[self.log_name_sys].status,
+ expected_final_status))
+
+ def test_build_log_file_name(self):
+ current_owner = getpass.getuser()
+ with patch.multiple(operating_system,
+ exists=MagicMock(return_value=False),
+ write_file=DEFAULT,
+ create_directory=DEFAULT,
+ chown=DEFAULT,
+ chmod=DEFAULT) as os_mocks:
+ log_file = self.manager.build_log_file_name(self.log_name_sys,
+ current_owner)
+ expected_filename = '%s/%s/%s-%s.log' % (
+ self.manager.GUEST_LOG_BASE_DIR,
+ self.manager.GUEST_LOG_DATASTORE_DIRNAME,
+ self.manager.manager, self.log_name_sys)
+ expected_call_counts = {'exists': 1,
+ 'write_file': 1,
+ 'create_directory': 2,
+ 'chown': 1,
+ 'chmod': 1}
+ self.assert_build_log_file_name(expected_filename, log_file,
+ os_mocks, expected_call_counts)
+
+ def assert_build_log_file_name(self, expected_filename, filename,
+ mocks, call_counts):
+ assert_equal(expected_filename, filename,
+ "Unexpected filename: %s (expected %s)" %
+ (filename, expected_filename))
+ for key in mocks.keys():
+ assert_true(
+ mocks[key].call_count == call_counts[key],
+ "%s called %d time(s)" % (key, mocks[key].call_count))
+
+ def test_build_log_file_name_with_dir(self):
+ current_owner = getpass.getuser()
+ log_dir = '/tmp'
+ with patch.multiple(operating_system,
+ exists=MagicMock(return_value=False),
+ write_file=DEFAULT,
+ create_directory=DEFAULT,
+ chown=DEFAULT,
+ chmod=DEFAULT) as os_mocks:
+ log_file = self.manager.build_log_file_name(self.log_name_sys,
+ current_owner,
+ datastore_dir=log_dir)
+ expected_filename = '%s/%s-%s.log' % (
+ log_dir,
+ self.manager.manager, self.log_name_sys)
+ expected_call_counts = {'exists': 1,
+ 'write_file': 1,
+ 'create_directory': 1,
+ 'chown': 1,
+ 'chmod': 1}
+ self.assert_build_log_file_name(expected_filename, log_file,
+ os_mocks, expected_call_counts)
+
+ def test_validate_log_file(self):
+ file_name = '/tmp/non-existent-file'
+ current_owner = getpass.getuser()
+ with patch.multiple(operating_system,
+ exists=MagicMock(return_value=False),
+ write_file=DEFAULT,
+ chown=DEFAULT,
+ chmod=DEFAULT) as os_mocks:
+ log_file = self.manager.validate_log_file(file_name, current_owner)
+ assert_equal(file_name, log_file, "Unexpected filename")
+ for key in os_mocks.keys():
+ assert_true(os_mocks[key].call_count == 1,
+ "%s not called" % key)
diff --git a/trove/tests/unittests/guestagent/test_operating_system.py b/trove/tests/unittests/guestagent/test_operating_system.py
index b2655c43..8728baec 100644
--- a/trove/tests/unittests/guestagent/test_operating_system.py
+++ b/trove/tests/unittests/guestagent/test_operating_system.py
@@ -808,7 +808,7 @@ class TestOperatingSystem(trove_testtools.TestCase):
as_root=True)
def _assert_execute_call(self, exec_args, exec_kwargs,
- fun, return_value, *args, **kwargs):
+ func, return_value, *args, **kwargs):
"""
Execute a function with given arguments.
Assert a return value and appropriate sequence of calls to the
@@ -826,8 +826,8 @@ class TestOperatingSystem(trove_testtools.TestCase):
'utils.execute_with_timeout'.
:type exec_kwargs: list-of-dicts
- :param fun: Tested function call.
- :type fun: callable
+ :param func: Tested function call.
+ :type func: callable
:param return_value: Expected return value or exception
from the tested call if any.
@@ -844,9 +844,9 @@ class TestOperatingSystem(trove_testtools.TestCase):
return_value=('0', '')) as exec_call:
if isinstance(return_value, ExpectedException):
with return_value:
- fun(*args, **kwargs)
+ func(*args, **kwargs)
else:
- actual_value = fun(*args, **kwargs)
+ actual_value = func(*args, **kwargs)
if return_value is not None:
self.assertEqual(return_value, actual_value,
"Return value mismatch.")