summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cloudinit/apport.py105
-rw-r--r--cloudinit/cmd/devel/logs.py101
-rw-r--r--cloudinit/cmd/devel/tests/__init__.py0
-rw-r--r--cloudinit/cmd/devel/tests/test_logs.py120
-rw-r--r--cloudinit/cmd/main.py11
-rwxr-xr-xpackages/debian/rules.in1
-rw-r--r--tests/unittests/test_cli.py22
7 files changed, 354 insertions, 6 deletions
diff --git a/cloudinit/apport.py b/cloudinit/apport.py
new file mode 100644
index 00000000..221f341c
--- /dev/null
+++ b/cloudinit/apport.py
@@ -0,0 +1,105 @@
+# Copyright (C) 2017 Canonical Ltd.
+#
+# This file is part of cloud-init. See LICENSE file for license information.
+
+'''Cloud-init apport interface'''
+
+try:
+ from apport.hookutils import (
+ attach_file, attach_root_command_outputs, root_command_output)
+ has_apport = True
+except ImportError:
+ has_apport = False
+
+
+KNOWN_CLOUD_NAMES = [
+ 'Amazon - Ec2', 'AliYun', 'AltCloud', 'Azure', 'Bigstep', 'CloudSigma',
+ 'CloudStack', 'DigitalOcean', 'GCE - Google Compute Engine', 'MAAS',
+ 'NoCloud', 'OpenNebula', 'OpenStack', 'OVF', 'Scaleway', 'SmartOS',
+ 'VMware', 'Other']
+
+# Potentially clear text collected logs
+CLOUDINIT_LOG = '/var/log/cloud-init.log'
+CLOUDINIT_OUTPUT_LOG = '/var/log/cloud-init-output.log'
+USER_DATA_FILE = '/var/lib/cloud/instance/user-data.txt' # Optional
+
+
+def attach_cloud_init_logs(report, ui=None):
+ '''Attach cloud-init logs and tarfile from 'cloud-init collect-logs'.'''
+ attach_root_command_outputs(report, {
+ 'cloud-init-log-warnings':
+ 'egrep -i "warn|error" /var/log/cloud-init.log',
+ 'cloud-init-output.log.txt': 'cat /var/log/cloud-init-output.log'})
+ root_command_output(
+ ['cloud-init', 'collect-logs', '-t', '/tmp/cloud-init-logs.tgz'])
+ attach_file(report, '/tmp/cloud-init-logs.tgz', 'logs.tgz')
+
+
+def attach_hwinfo(report, ui=None):
+ '''Optionally attach hardware info from lshw.'''
+ prompt = (
+ 'Your device details (lshw) may be useful to developers when'
+ ' addressing this bug, but gathering it requires admin privileges.'
+ ' Would you like to include this info?')
+ if ui and ui.yesno(prompt):
+ attach_root_command_outputs(report, {'lshw.txt': 'lshw'})
+
+
+def attach_cloud_info(report, ui=None):
+ '''Prompt for cloud details if available.'''
+ if ui:
+ prompt = 'Is this machine running in a cloud environment?'
+ response = ui.yesno(prompt)
+ if response is None:
+ raise StopIteration # User cancelled
+ if response:
+ prompt = ('Please select the cloud vendor or environment in which'
+ ' this instance is running')
+ response = ui.choice(prompt, KNOWN_CLOUD_NAMES)
+ if response:
+ report['CloudName'] = KNOWN_CLOUD_NAMES[response[0]]
+ else:
+ report['CloudName'] = 'None'
+
+
+def attach_user_data(report, ui=None):
+ '''Optionally provide user-data if desired.'''
+ if ui:
+ prompt = (
+ 'Your user-data or cloud-config file can optionally be provided'
+ ' from {0} and could be useful to developers when addressing this'
+ ' bug. Do you wish to attach user-data to this bug?'.format(
+ USER_DATA_FILE))
+ response = ui.yesno(prompt)
+ if response is None:
+ raise StopIteration # User cancelled
+ if response:
+ attach_file(report, USER_DATA_FILE, 'user_data.txt')
+
+
+def add_bug_tags(report):
+ '''Add any appropriate tags to the bug.'''
+ if 'JournalErrors' in report.keys():
+ errors = report['JournalErrors']
+ if 'Breaking ordering cycle' in errors:
+ report['Tags'] = 'systemd-ordering'
+
+
+def add_info(report, ui):
+ '''This is an entry point to run cloud-init's apport functionality.
+
+ Distros which want apport support will have a cloud-init package-hook at
+ /usr/share/apport/package-hooks/cloud-init.py which defines an add_info
+ function and returns the result of cloudinit.apport.add_info(report, ui).
+ '''
+ if not has_apport:
+ raise RuntimeError(
+ 'No apport imports discovered. Apport functionality disabled')
+ attach_cloud_init_logs(report, ui)
+ attach_hwinfo(report, ui)
+ attach_cloud_info(report, ui)
+ attach_user_data(report, ui)
+ add_bug_tags(report)
+ return True
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/logs.py b/cloudinit/cmd/devel/logs.py
new file mode 100644
index 00000000..35ca478f
--- /dev/null
+++ b/cloudinit/cmd/devel/logs.py
@@ -0,0 +1,101 @@
+# Copyright (C) 2017 Canonical Ltd.
+#
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Define 'collect-logs' utility and handler to include in cloud-init cmd."""
+
+import argparse
+from cloudinit.util import (
+ ProcessExecutionError, chdir, copy, ensure_dir, subp, write_file)
+from cloudinit.temp_utils import tempdir
+from datetime import datetime
+import os
+import shutil
+
+
+CLOUDINIT_LOGS = ['/var/log/cloud-init.log', '/var/log/cloud-init-output.log']
+CLOUDINIT_RUN_DIR = '/run/cloud-init'
+USER_DATA_FILE = '/var/lib/cloud/instance/user-data.txt' # Optional
+
+
+def get_parser(parser=None):
+ """Build or extend and arg parser for collect-logs utility.
+
+ @param parser: Optional existing ArgumentParser instance representing the
+ collect-logs subcommand which will be extended to support the args of
+ this utility.
+
+ @returns: ArgumentParser with proper argument configuration.
+ """
+ if not parser:
+ parser = argparse.ArgumentParser(
+ prog='collect-logs',
+ description='Collect and tar all cloud-init debug info')
+ parser.add_argument(
+ "--tarfile", '-t', default='cloud-init.tar.gz',
+ help=('The tarfile to create containing all collected logs.'
+ ' Default: cloud-init.tar.gz'))
+ parser.add_argument(
+ "--include-userdata", '-u', default=False, action='store_true',
+ dest='userdata', help=(
+ 'Optionally include user-data from {0} which could contain'
+ ' sensitive information.'.format(USER_DATA_FILE)))
+ return parser
+
+
+def _write_command_output_to_file(cmd, filename):
+ """Helper which runs a command and writes output or error to filename."""
+ try:
+ out, _ = subp(cmd)
+ except ProcessExecutionError as e:
+ write_file(filename, str(e))
+ else:
+ write_file(filename, out)
+
+
+def collect_logs(tarfile, include_userdata):
+ """Collect all cloud-init logs and tar them up into the provided tarfile.
+
+ @param tarfile: The path of the tar-gzipped file to create.
+ @param include_userdata: Boolean, true means include user-data.
+ """
+ tarfile = os.path.abspath(tarfile)
+ date = datetime.utcnow().date().strftime('%Y-%m-%d')
+ log_dir = 'cloud-init-logs-{0}'.format(date)
+ with tempdir(dir='/tmp') as tmp_dir:
+ log_dir = os.path.join(tmp_dir, log_dir)
+ _write_command_output_to_file(
+ ['dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'],
+ os.path.join(log_dir, 'version'))
+ _write_command_output_to_file(
+ ['dmesg'], os.path.join(log_dir, 'dmesg.txt'))
+ _write_command_output_to_file(
+ ['journalctl', '-o', 'short-precise'],
+ os.path.join(log_dir, 'journal.txt'))
+ for log in CLOUDINIT_LOGS:
+ copy(log, log_dir)
+ if include_userdata:
+ copy(USER_DATA_FILE, log_dir)
+ run_dir = os.path.join(log_dir, 'run')
+ ensure_dir(run_dir)
+ shutil.copytree(CLOUDINIT_RUN_DIR, os.path.join(run_dir, 'cloud-init'))
+ with chdir(tmp_dir):
+ subp(['tar', 'czvf', tarfile, log_dir.replace(tmp_dir + '/', '')])
+
+
+def handle_collect_logs_args(name, args):
+ """Handle calls to 'cloud-init collect-logs' as a subcommand."""
+ collect_logs(args.tarfile, args.userdata)
+
+
+def main():
+ """Tool to collect and tar all cloud-init related logs."""
+ parser = get_parser()
+ handle_collect_logs_args('collect-logs', parser.parse_args())
+ return 0
+
+
+if __name__ == '__main__':
+ main()
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/tests/__init__.py b/cloudinit/cmd/devel/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/cloudinit/cmd/devel/tests/__init__.py
diff --git a/cloudinit/cmd/devel/tests/test_logs.py b/cloudinit/cmd/devel/tests/test_logs.py
new file mode 100644
index 00000000..dc4947cc
--- /dev/null
+++ b/cloudinit/cmd/devel/tests/test_logs.py
@@ -0,0 +1,120 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.cmd.devel import logs
+from cloudinit.util import ensure_dir, load_file, subp, write_file
+from cloudinit.tests.helpers import FilesystemMockingTestCase, wrap_and_call
+from datetime import datetime
+import os
+
+
+class TestCollectLogs(FilesystemMockingTestCase):
+
+ def setUp(self):
+ super(TestCollectLogs, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.run_dir = self.tmp_path('run', self.new_root)
+
+ def test_collect_logs_creates_tarfile(self):
+ """collect-logs creates a tarfile with all related cloud-init info."""
+ log1 = self.tmp_path('cloud-init.log', self.new_root)
+ write_file(log1, 'cloud-init-log')
+ log2 = self.tmp_path('cloud-init-output.log', self.new_root)
+ write_file(log2, 'cloud-init-output-log')
+ ensure_dir(self.run_dir)
+ write_file(self.tmp_path('results.json', self.run_dir), 'results')
+ output_tarfile = self.tmp_path('logs.tgz')
+
+ date = datetime.utcnow().date().strftime('%Y-%m-%d')
+ date_logdir = 'cloud-init-logs-{0}'.format(date)
+
+ expected_subp = {
+ ('dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'):
+ '0.7fake\n',
+ ('dmesg',): 'dmesg-out\n',
+ ('journalctl', '-o', 'short-precise'): 'journal-out\n',
+ ('tar', 'czvf', output_tarfile, date_logdir): ''
+ }
+
+ def fake_subp(cmd):
+ cmd_tuple = tuple(cmd)
+ if cmd_tuple not in expected_subp:
+ raise AssertionError(
+ 'Unexpected command provided to subp: {0}'.format(cmd))
+ if cmd == ['tar', 'czvf', output_tarfile, date_logdir]:
+ subp(cmd) # Pass through tar cmd so we can check output
+ return expected_subp[cmd_tuple], ''
+
+ wrap_and_call(
+ 'cloudinit.cmd.devel.logs',
+ {'subp': {'side_effect': fake_subp},
+ 'CLOUDINIT_LOGS': {'new': [log1, log2]},
+ 'CLOUDINIT_RUN_DIR': {'new': self.run_dir}},
+ logs.collect_logs, output_tarfile, include_userdata=False)
+ # unpack the tarfile and check file contents
+ subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
+ out_logdir = self.tmp_path(date_logdir, self.new_root)
+ self.assertEqual(
+ '0.7fake\n',
+ load_file(os.path.join(out_logdir, 'version')))
+ self.assertEqual(
+ 'cloud-init-log',
+ load_file(os.path.join(out_logdir, 'cloud-init.log')))
+ self.assertEqual(
+ 'cloud-init-output-log',
+ load_file(os.path.join(out_logdir, 'cloud-init-output.log')))
+ self.assertEqual(
+ 'dmesg-out\n',
+ load_file(os.path.join(out_logdir, 'dmesg.txt')))
+ self.assertEqual(
+ 'journal-out\n',
+ load_file(os.path.join(out_logdir, 'journal.txt')))
+ self.assertEqual(
+ 'results',
+ load_file(
+ os.path.join(out_logdir, 'run', 'cloud-init', 'results.json')))
+
+ def test_collect_logs_includes_optional_userdata(self):
+ """collect-logs include userdata when --include-userdata is set."""
+ log1 = self.tmp_path('cloud-init.log', self.new_root)
+ write_file(log1, 'cloud-init-log')
+ log2 = self.tmp_path('cloud-init-output.log', self.new_root)
+ write_file(log2, 'cloud-init-output-log')
+ userdata = self.tmp_path('user-data.txt', self.new_root)
+ write_file(userdata, 'user-data')
+ ensure_dir(self.run_dir)
+ write_file(self.tmp_path('results.json', self.run_dir), 'results')
+ output_tarfile = self.tmp_path('logs.tgz')
+
+ date = datetime.utcnow().date().strftime('%Y-%m-%d')
+ date_logdir = 'cloud-init-logs-{0}'.format(date)
+
+ expected_subp = {
+ ('dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'):
+ '0.7fake',
+ ('dmesg',): 'dmesg-out\n',
+ ('journalctl', '-o', 'short-precise'): 'journal-out\n',
+ ('tar', 'czvf', output_tarfile, date_logdir): ''
+ }
+
+ def fake_subp(cmd):
+ cmd_tuple = tuple(cmd)
+ if cmd_tuple not in expected_subp:
+ raise AssertionError(
+ 'Unexpected command provided to subp: {0}'.format(cmd))
+ if cmd == ['tar', 'czvf', output_tarfile, date_logdir]:
+ subp(cmd) # Pass through tar cmd so we can check output
+ return expected_subp[cmd_tuple], ''
+
+ wrap_and_call(
+ 'cloudinit.cmd.devel.logs',
+ {'subp': {'side_effect': fake_subp},
+ 'CLOUDINIT_LOGS': {'new': [log1, log2]},
+ 'CLOUDINIT_RUN_DIR': {'new': self.run_dir},
+ 'USER_DATA_FILE': {'new': userdata}},
+ logs.collect_logs, output_tarfile, include_userdata=True)
+ # unpack the tarfile and check file contents
+ subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
+ out_logdir = self.tmp_path(date_logdir, self.new_root)
+ self.assertEqual(
+ 'user-data',
+ load_file(os.path.join(out_logdir, 'user-data.txt')))
diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py
index 68563e0c..6fb9d9e7 100644
--- a/cloudinit/cmd/main.py
+++ b/cloudinit/cmd/main.py
@@ -764,16 +764,25 @@ def main(sysv_args=None):
parser_devel = subparsers.add_parser(
'devel', help='Run development tools')
+ parser_collect_logs = subparsers.add_parser(
+ 'collect-logs', help='Collect and tar all cloud-init debug info')
+
if sysv_args:
# Only load subparsers if subcommand is specified to avoid load cost
if sysv_args[0] == 'analyze':
from cloudinit.analyze.__main__ import get_parser as analyze_parser
# Construct analyze subcommand parser
analyze_parser(parser_analyze)
- if sysv_args[0] == 'devel':
+ elif sysv_args[0] == 'devel':
from cloudinit.cmd.devel.parser import get_parser as devel_parser
# Construct devel subcommand parser
devel_parser(parser_devel)
+ elif sysv_args[0] == 'collect-logs':
+ from cloudinit.cmd.devel.logs import (
+ get_parser as logs_parser, handle_collect_logs_args)
+ logs_parser(parser_collect_logs)
+ parser_collect_logs.set_defaults(
+ action=('collect-logs', handle_collect_logs_args))
args = parser.parse_args(args=sysv_args)
diff --git a/packages/debian/rules.in b/packages/debian/rules.in
index b87a5e84..4aa907e3 100755
--- a/packages/debian/rules.in
+++ b/packages/debian/rules.in
@@ -10,6 +10,7 @@ PYVER ?= python${pyver}
override_dh_install:
dh_install
install -d debian/cloud-init/etc/rsyslog.d
+ install -d debian/cloud-init/usr/share/apport/package-hooks
cp tools/21-cloudinit.conf debian/cloud-init/etc/rsyslog.d/21-cloudinit.conf
install -D ./tools/Z99-cloud-locale-test.sh debian/cloud-init/etc/profile.d/Z99-cloud-locale-test.sh
install -D ./tools/Z99-cloudinit-warnings.sh debian/cloud-init/etc/profile.d/Z99-cloudinit-warnings.sh
diff --git a/tests/unittests/test_cli.py b/tests/unittests/test_cli.py
index 495bdc9f..258a9f08 100644
--- a/tests/unittests/test_cli.py
+++ b/tests/unittests/test_cli.py
@@ -72,18 +72,22 @@ class TestCLI(test_helpers.FilesystemMockingTestCase):
def test_conditional_subcommands_from_entry_point_sys_argv(self):
"""Subcommands from entry-point are properly parsed from sys.argv."""
+ stdout = six.StringIO()
+ self.patchStdoutAndStderr(stdout=stdout)
+
expected_errors = [
- 'usage: cloud-init analyze', 'usage: cloud-init devel']
- conditional_subcommands = ['analyze', 'devel']
+ 'usage: cloud-init analyze', 'usage: cloud-init collect-logs',
+ 'usage: cloud-init devel']
+ conditional_subcommands = ['analyze', 'collect-logs', 'devel']
# The cloud-init entrypoint calls main without passing sys_argv
for subcommand in conditional_subcommands:
- with mock.patch('sys.argv', ['cloud-init', subcommand]):
+ with mock.patch('sys.argv', ['cloud-init', subcommand, '-h']):
try:
cli.main()
except SystemExit as e:
- self.assertEqual(2, e.code) # exit 2 on proper usage docs
+ self.assertEqual(0, e.code) # exit 2 on proper -h usage
for error_message in expected_errors:
- self.assertIn(error_message, self.stderr.getvalue())
+ self.assertIn(error_message, stdout.getvalue())
def test_analyze_subcommand_parser(self):
"""The subcommand cloud-init analyze calls the correct subparser."""
@@ -94,6 +98,14 @@ class TestCLI(test_helpers.FilesystemMockingTestCase):
for subcommand in expected_subcommands:
self.assertIn(subcommand, error)
+ def test_collect_logs_subcommand_parser(self):
+ """The subcommand cloud-init collect-logs calls the subparser."""
+ # Provide -h param to collect-logs to avoid having to mock behavior.
+ stdout = six.StringIO()
+ self.patchStdoutAndStderr(stdout=stdout)
+ self._call_main(['cloud-init', 'collect-logs', '-h'])
+ self.assertIn('usage: cloud-init collect-log', stdout.getvalue())
+
def test_devel_subcommand_parser(self):
"""The subcommand cloud-init devel calls the correct subparser."""
self._call_main(['cloud-init', 'devel'])