summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTausif Rahman <tausif.rahman@mongodb.com>2022-11-28 17:15:48 +0000
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2022-11-29 18:58:50 +0000
commite7b8e61a29c8152fe916b8ce7339f7767ac538e8 (patch)
tree6ac38363118a6c0f76059dcf3ee06e96cf450943
parent8045c4a7772ab588679cdbf604a2875c27d97251 (diff)
downloadmongo-e7b8e61a29c8152fe916b8ce7339f7767ac538e8.tar.gz
SERVER-71506 Refactor tooling metrics
(cherry picked from commit b06927da238e4565c3088377a8d4fed39d4d8f71)
-rw-r--r--SConstruct21
-rw-r--r--buildscripts/metrics/metrics_datatypes.py147
-rw-r--r--buildscripts/metrics/resmoke_tooling_metrics.py19
-rw-r--r--buildscripts/metrics/scons_tooling_metrics.py71
-rw-r--r--buildscripts/metrics/tooling_exit_hook.py36
-rw-r--r--buildscripts/metrics/tooling_metrics_utils.py63
-rw-r--r--buildscripts/resmokelib/cli.py13
-rw-r--r--buildscripts/tests/tooling_metrics/test_metrics_datatypes.py93
-rw-r--r--buildscripts/tests/tooling_metrics/test_resmoke_tooling_metrics.py60
-rw-r--r--buildscripts/tests/tooling_metrics/test_scons_tooling_metrics.py74
-rw-r--r--buildscripts/tests/tooling_metrics/test_tooling_metrics_utils.py72
-rwxr-xr-xevergreen/publish_metrics.py93
12 files changed, 348 insertions, 414 deletions
diff --git a/SConstruct b/SConstruct
index 83d42997c8e..456cac408ce 100644
--- a/SConstruct
+++ b/SConstruct
@@ -22,6 +22,10 @@ from pkg_resources import parse_version
import SCons
import SCons.Script
+from buildscripts.metrics.metrics_datatypes import SConsToolingMetrics
+from buildscripts.metrics.tooling_exit_hook import initialize_exit_hook
+from buildscripts.metrics.tooling_metrics_utils import register_metrics_collection_atexit
+from site_scons.mongo import build_profiles
# This must be first, even before EnsureSConsVersion, if
# we are to avoid bulk loading all tools in the DefaultEnvironment.
@@ -52,8 +56,6 @@ SCons.Node.FS.File.release_target_info = release_target_info_noop
from buildscripts import utils
from buildscripts import moduleconfig
-from buildscripts.metrics.scons_tooling_metrics import setup_scons_metrics_collection_atexit
-
import psutil
scons_invocation = '{} {}'.format(sys.executable, ' '.join(sys.argv))
@@ -1551,10 +1553,17 @@ env = Environment(variables=env_vars, **envDict)
del envDict
env.AddMethod(lambda env, name, **kwargs: add_option(name, **kwargs), 'AddOption')
-# Setup atexit method to store tooling metrics
-# The placement of this is intentional. We should only register this function atexit after
-# env, env_vars and the parser have been properly initialized.
-setup_scons_metrics_collection_atexit(utc_starttime, env_vars, env, _parser, sys.argv)
+# The placement of this is intentional. Here we setup an atexit method to store tooling metrics.
+# We should only register this function after env, env_vars and the parser have been properly initialized.
+register_metrics_collection_atexit(
+ SConsToolingMetrics.generate_metrics, {
+ "utc_starttime": datetime.utcnow(),
+ "env_vars": env_vars,
+ "env": env,
+ "parser": _parser,
+ "args": sys.argv,
+ "exit_hook": initialize_exit_hook(),
+ })
if get_option('build-metrics'):
env['BUILD_METRICS_ARTIFACTS_DIR'] = '$BUILD_ROOT/$VARIANT_DIR'
diff --git a/buildscripts/metrics/metrics_datatypes.py b/buildscripts/metrics/metrics_datatypes.py
index 146afd81d38..898da153bd7 100644
--- a/buildscripts/metrics/metrics_datatypes.py
+++ b/buildscripts/metrics/metrics_datatypes.py
@@ -5,12 +5,13 @@ import multiprocessing
import os
import socket
import sys
-import traceback
from typing import Any, Dict, List, Optional
import distro
import git
from pydantic import BaseModel
+from buildscripts.metrics.tooling_exit_hook import _ExitHook
+
# pylint: disable=bare-except
SCONS_ENV_FILE = "scons_env.env"
@@ -20,6 +21,12 @@ SCONS_SECTION_HEADER = "SCONS_ENV"
class BaseMetrics(BaseModel):
"""Base class for an metrics object."""
+ @classmethod
+ @abstractmethod
+ def generate_metrics(cls, **kwargs):
+ """Generate metrics."""
+ raise NotImplementedError
+
@abstractmethod
def is_malformed(self) -> bool:
"""Confirm whether this instance has all expected fields."""
@@ -35,14 +42,14 @@ class BuildInfo(BaseMetrics):
artifact_dir: Optional[str]
@classmethod
- def get_scons_build_info(
+ def generate_metrics(
cls,
utc_starttime: datetime,
env_vars: "SCons.Variables.Variables",
env: "SCons.Script.SConscript.SConsEnvironment",
parser: "SCons.Script.SConsOptions.SConsOptionParser",
args: List[str],
- ):
+ ): # pylint: disable=arguments-differ
"""Get SCons build info to the best of our ability."""
artifact_dir = cls._get_scons_artifact_dir(env)
return cls(
@@ -59,19 +66,22 @@ class BuildInfo(BaseMetrics):
) -> Optional[Dict[str, Any]]:
"""Get the environment variables options that can be set by users."""
+ artifact_dir = BuildInfo._get_scons_artifact_dir(env)
+ artifact_dir = artifact_dir if artifact_dir else '.'
+ scons_env_filepath = f'{artifact_dir}/{SCONS_ENV_FILE}'
try:
# Use SCons built-in method to save environment variables to a file
- env_vars.Save(SCONS_ENV_FILE, env)
+ env_vars.Save(scons_env_filepath, env)
# Add a section header to the file so we can easily parse with ConfigParser
- with open(SCONS_ENV_FILE, 'r') as original:
+ with open(scons_env_filepath, 'r') as original:
data = original.read()
- with open(SCONS_ENV_FILE, 'w') as modified:
+ with open(scons_env_filepath, 'w') as modified:
modified.write(f"[{SCONS_SECTION_HEADER}]\n" + data)
# Parse file using config parser
config = configparser.ConfigParser()
- config.read(SCONS_ENV_FILE)
+ config.read(scons_env_filepath)
str_dict = dict(config[SCONS_SECTION_HEADER])
return {key: eval(val) for key, val in str_dict.items()} # pylint: disable=eval-used
except:
@@ -118,52 +128,27 @@ class BuildInfo(BaseMetrics):
return None in [self.artifact_dir, self.env, self.options, self.build_artifacts]
-class ExitInfo(BaseMetrics):
- """Class to store tooling exit information."""
-
- exit_code: Optional[int]
- exception: Optional[str]
- stacktrace: Optional[str]
-
- @classmethod
- def get_resmoke_exit_info(cls):
- """Get the current exit info."""
- exc = sys.exc_info()[1]
- return cls(
- exit_code=0 if not exc else exc.code if exc.__class__ == SystemExit else 1,
- exception=exc.__class__.__name__ if exc else None,
- stacktrace=traceback.format_exc() if exc else None,
- )
-
- @classmethod
- def get_scons_exit_info(cls, exit_code):
- """Get the current exit info using the given exit code."""
- return cls(
- exit_code=exit_code if isinstance(exit_code, int) else None,
- exception=None,
- stacktrace=None,
- )
-
- def is_malformed(self):
- """Return True if this object is missing an exit code."""
- return self.exit_code is None
-
-
class HostInfo(BaseMetrics):
"""Class to store host information."""
+ ip_address: Optional[str]
host_os: str
num_cores: int
memory: Optional[float]
@classmethod
- def get_host_info(cls):
+ def generate_metrics(cls): # pylint: disable=arguments-differ
"""Get the host info to the best of our ability."""
try:
+ ip_address = socket.gethostbyname(socket.gethostname())
+ except:
+ ip_address = None
+ try:
memory = cls._get_memory()
except:
memory = None
return cls(
+ ip_address=ip_address,
host_os=distro.name(pretty=True),
num_cores=multiprocessing.cpu_count(),
memory=memory,
@@ -174,9 +159,9 @@ class HostInfo(BaseMetrics):
"""Get total memory of the host system."""
return os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024.**3)
- def is_malformed(self):
+ def is_malformed(self) -> bool:
"""Confirm whether this instance has all expected fields."""
- return self.memory is None
+ return None in [self.memory, self.ip_address]
class GitInfo(BaseMetrics):
@@ -188,7 +173,7 @@ class GitInfo(BaseMetrics):
repo_name: Optional[str]
@classmethod
- def get_git_info(cls, filepath: str):
+ def generate_metrics(cls, filepath: str): # pylint: disable=arguments-differ
"""Get the git info for a repo to the best of our ability."""
try:
commit_hash = git.Repo(filepath).head.commit.hexsha
@@ -212,7 +197,7 @@ class GitInfo(BaseMetrics):
repo_name=repo_name,
)
- def is_malformed(self):
+ def is_malformed(self) -> bool:
"""Confirm whether this instance has all expected fields."""
return None in [self.commit_hash, self.branch_name, self.repo_name]
@@ -225,7 +210,7 @@ def _get_modules_git_info():
module_git_info = []
try:
module_git_info = [
- GitInfo.get_git_info(os.path.join(MODULES_FILEPATH, module))
+ GitInfo.generate_metrics(os.path.join(MODULES_FILEPATH, module))
for module in os.listdir(MODULES_FILEPATH)
if os.path.isdir(os.path.join(MODULES_FILEPATH, module))
]
@@ -234,73 +219,79 @@ def _get_modules_git_info():
return module_git_info
-class ToolingMetrics(BaseMetrics):
- """Class to store tooling metrics."""
+class ResmokeToolingMetrics(BaseMetrics):
+ """Class to store resmoke tooling metrics."""
source: str
utc_starttime: datetime
utc_endtime: datetime
host_info: HostInfo
git_info: GitInfo
- exit_info: ExitInfo
- build_info: Optional[BuildInfo]
+ exit_code: Optional[int]
command: List[str]
module_info: List[GitInfo]
- ip_address: Optional[str]
@classmethod
- def get_resmoke_metrics(
+ def generate_metrics(
cls,
utc_starttime: datetime,
- ):
+ exit_hook: _ExitHook,
+ ): # pylint: disable=arguments-differ
"""Get resmoke metrics to the best of our ability."""
- try:
- ip_address = socket.gethostbyname(socket.gethostname())
- except:
- ip_address = None
return cls(
source='resmoke',
utc_starttime=utc_starttime,
utc_endtime=datetime.utcnow(),
- host_info=HostInfo.get_host_info(),
- git_info=GitInfo.get_git_info('.'),
- exit_info=ExitInfo.get_resmoke_exit_info(),
- build_info=None,
- module_info=_get_modules_git_info(),
+ host_info=HostInfo.generate_metrics(),
+ git_info=GitInfo.generate_metrics('.'),
+ exit_code=exit_hook.exit_code if isinstance(exit_hook.exit_code, int) else None,
command=sys.argv,
- ip_address=ip_address,
+ module_info=_get_modules_git_info(),
)
+ def is_malformed(self) -> bool:
+ """Confirm whether this instance has all expected fields."""
+ sub_metrics = self.module_info + [self.git_info] + [self.host_info]
+ return self.exit_code is None or any(metrics.is_malformed() for metrics in sub_metrics)
+
+
+class SConsToolingMetrics(BaseMetrics):
+ """Class to store scons tooling metrics."""
+
+ source: str
+ utc_starttime: datetime
+ utc_endtime: datetime
+ host_info: HostInfo
+ git_info: GitInfo
+ exit_code: Optional[int]
+ build_info: BuildInfo
+ command: List[str]
+ module_info: List[GitInfo]
+
@classmethod
- def get_scons_metrics(
+ def generate_metrics(
cls,
utc_starttime: datetime,
env_vars: "SCons.Variables.Variables",
env: "SCons.Script.SConscript.SConsEnvironment",
parser: "SCons.Script.SConsOptions.SConsOptionParser",
args: List[str],
- exit_code: int,
- ):
+ exit_hook: _ExitHook,
+ ): # pylint: disable=arguments-differ
"""Get scons metrics to the best of our ability."""
- try:
- ip_address = socket.gethostbyname(socket.gethostname())
- except:
- ip_address = None
return cls(
source='scons',
utc_starttime=utc_starttime,
utc_endtime=datetime.utcnow(),
- host_info=HostInfo.get_host_info(),
- git_info=GitInfo.get_git_info('.'),
- exit_info=ExitInfo.get_scons_exit_info(exit_code),
- build_info=BuildInfo.get_scons_build_info(utc_starttime, env_vars, env, parser, args),
- module_info=_get_modules_git_info(),
+ host_info=HostInfo.generate_metrics(),
+ git_info=GitInfo.generate_metrics('.'),
+ build_info=BuildInfo.generate_metrics(utc_starttime, env_vars, env, parser, args),
+ exit_code=exit_hook.exit_code if isinstance(exit_hook.exit_code, int) else None,
command=sys.argv,
- ip_address=ip_address,
+ module_info=_get_modules_git_info(),
)
- def is_malformed(self):
+ def is_malformed(self) -> bool:
"""Confirm whether this instance has all expected fields."""
- sub_metrics = [self.build_info] if self.source == 'scons' else []
- sub_metrics += self.module_info + [self.git_info] + [self.host_info] + [self.exit_info]
- return self.ip_address is None or any(metrics.is_malformed() for metrics in sub_metrics)
+ sub_metrics = self.module_info + [self.git_info] + [self.host_info] + [self.build_info]
+ return self.exit_code is None or any(metrics.is_malformed() for metrics in sub_metrics)
diff --git a/buildscripts/metrics/resmoke_tooling_metrics.py b/buildscripts/metrics/resmoke_tooling_metrics.py
deleted file mode 100644
index 71c7b51bb49..00000000000
--- a/buildscripts/metrics/resmoke_tooling_metrics.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from datetime import datetime
-import logging
-
-from buildscripts.metrics.metrics_datatypes import ToolingMetrics
-from buildscripts.metrics.tooling_metrics_utils import save_tooling_metrics, should_collect_metrics
-
-logger = logging.getLogger('resmoke_tooling_metrics')
-
-
-def save_resmoke_tooling_metrics(utc_starttime: datetime):
- try:
- if not should_collect_metrics():
- return
- tooling_metrics = ToolingMetrics.get_resmoke_metrics(utc_starttime)
- save_tooling_metrics(tooling_metrics)
- except Exception as exc: # pylint: disable=broad-except
- logger.warning(
- "%s\nResmoke Metrics Collection Failed -- this is a non-issue.\nIf this message persists, feel free to reach out to #server-development-platform",
- exc)
diff --git a/buildscripts/metrics/scons_tooling_metrics.py b/buildscripts/metrics/scons_tooling_metrics.py
deleted file mode 100644
index cdece084631..00000000000
--- a/buildscripts/metrics/scons_tooling_metrics.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import atexit
-import datetime
-import logging
-import sys
-from typing import List
-
-from buildscripts.metrics.metrics_datatypes import ToolingMetrics
-from buildscripts.metrics.tooling_metrics_utils import save_tooling_metrics, should_collect_metrics
-
-logger = logging.getLogger('scons_tooling_metrics')
-
-
-class SConsExitHook(object):
- """Plumb all sys.exit through this object so that we can access the exit code in atexit."""
-
- def __init__(self):
- self.exit_code = None
- self._orig_exit = sys.exit
-
- def __del__(self):
- sys.exit = self._orig_exit
-
- def initialize(self):
- sys.exit = self.exit
-
- def exit(self, code=0):
- self.exit_code = code
- self._orig_exit(code)
-
-
-# This method should only be used when registered on atexit
-def _save_scons_tooling_metrics(
- utc_starttime: datetime,
- env_vars: "SCons.Variables.Variables",
- env: "SCons.Script.SConscript.SConsEnvironment",
- parser: "SCons.Script.SConsOptions.SConsOptionParser",
- args: List[str],
- exit_hook: SConsExitHook,
-):
- """Save SCons tooling metrics to atlas cluster."""
- try:
- if not should_collect_metrics():
- return
- tooling_metrics = ToolingMetrics.get_scons_metrics(utc_starttime, env_vars, env, parser,
- args, exit_hook.exit_code)
- save_tooling_metrics(tooling_metrics)
- except Exception as exc: # pylint: disable=broad-except
- logger.warning(
- "%sSCons Metrics Collection Failed -- this is a non-issue.\nIf this message persists, feel free to reach out to #server-development-platform",
- exc)
-
-
-def setup_scons_metrics_collection_atexit(
- utc_starttime: datetime,
- env_vars: "SCons.Variables.Variables",
- env: "SCons.Script.SConscript.SConsEnvironment",
- parser: "SCons.Script.SConsOptions.SConsOptionParser",
- args: List[str],
-) -> None:
- """Register an atexit method for scons metrics collection."""
- scons_exit_hook = SConsExitHook()
- scons_exit_hook.initialize()
- atexit.register(
- _save_scons_tooling_metrics,
- utc_starttime,
- env_vars,
- env,
- parser,
- args,
- scons_exit_hook,
- )
diff --git a/buildscripts/metrics/tooling_exit_hook.py b/buildscripts/metrics/tooling_exit_hook.py
new file mode 100644
index 00000000000..cdf2844519f
--- /dev/null
+++ b/buildscripts/metrics/tooling_exit_hook.py
@@ -0,0 +1,36 @@
+import sys
+
+# pylint: disable=invalid-name
+# pylint: disable=redefined-outer-name
+
+
+# DO NOT INITIALIZE DIRECTLY -- This is intended to be a singleton.
+class _ExitHook(object):
+ """Plumb all sys.exit through this object so that we can access the exit code in atexit."""
+
+ def __init__(self):
+ self.exit_code = 0
+ self._orig_exit = sys.exit
+ sys.exit = self.exit
+
+ def __del__(self):
+ sys.exit = self._orig_exit
+
+ def exit(self, code=0):
+ self.exit_code = code
+ self._orig_exit(code)
+
+
+SINGLETON_TOOLING_METRICS_EXIT_HOOK = None
+
+
+# Always use this method when initializing _ExitHook -- This guarantees you are using the singleton
+# initialize the exit hook as early as possible to ensure we capture the error.
+def initialize_exit_hook() -> None:
+ """Initialize the exit hook."""
+ try:
+ if not SINGLETON_TOOLING_METRICS_EXIT_HOOK:
+ SINGLETON_TOOLING_METRICS_EXIT_HOOK = _ExitHook()
+ except UnboundLocalError as _:
+ SINGLETON_TOOLING_METRICS_EXIT_HOOK = _ExitHook()
+ return SINGLETON_TOOLING_METRICS_EXIT_HOOK
diff --git a/buildscripts/metrics/tooling_metrics_utils.py b/buildscripts/metrics/tooling_metrics_utils.py
index 29e4e475bc9..f668e457909 100644
--- a/buildscripts/metrics/tooling_metrics_utils.py
+++ b/buildscripts/metrics/tooling_metrics_utils.py
@@ -1,19 +1,17 @@
+import atexit
import logging
import os
-from typing import Optional
-from git import Repo
+from typing import Any, Callable, Dict
import pymongo
-from buildscripts.metrics.metrics_datatypes import ToolingMetrics
-
-logger = logging.getLogger('tooling_metrics_utils')
+logger = logging.getLogger('tooling_metrics')
INTERNAL_TOOLING_METRICS_HOSTNAME = "mongodb+srv://dev-metrics-pl-0.kewhj.mongodb.net"
INTERNAL_TOOLING_METRICS_USERNAME = "internal_tooling_user"
INTERNAL_TOOLING_METRICS_PASSWORD = "internal_tooling_user"
-def _get_internal_tooling_metrics_client():
+def _get_internal_tooling_metrics_client() -> pymongo.MongoClient:
"""Retrieve client for internal MongoDB tooling metrics cluster."""
return pymongo.MongoClient(
host=INTERNAL_TOOLING_METRICS_HOSTNAME,
@@ -27,25 +25,16 @@ def _get_internal_tooling_metrics_client():
)
-EXPECTED_TOOLCHAIN_LOCATION = "/opt/mongodbtoolchain"
-
-
-def _toolchain_exists() -> bool:
- """Check if the internal MongoDB toolchain exists."""
- return os.path.exists(EXPECTED_TOOLCHAIN_LOCATION)
-
-
-def _git_user_exists() -> Optional[str]:
- """Check if a git user email exists."""
- try:
- return Repo('.').config_reader().get_value("user", "email", None)
- except Exception: # pylint: disable=broad-except
- return None
+MONGOD_INTENRAL_DISTRO_FILEPATH = '/etc/mongodb-distro-name'
def _is_virtual_workstation() -> bool:
"""Detect whether this is a MongoDB internal virtual workstation."""
- return _toolchain_exists() and _git_user_exists()
+ try:
+ with open(MONGOD_INTENRAL_DISTRO_FILEPATH, 'r') as file:
+ return file.read().strip() == 'ubuntu1804-workstation'
+ except Exception as _: # pylint: disable=broad-except
+ return False
TOOLING_METRICS_OPT_OUT = "TOOLING_METRICS_OPT_OUT"
@@ -56,22 +45,32 @@ def _has_metrics_opt_out() -> bool:
return os.environ.get(TOOLING_METRICS_OPT_OUT, None) == '1'
-def should_collect_metrics() -> bool:
+def _should_collect_metrics() -> bool:
"""Determine whether to collect tooling metrics."""
return _is_virtual_workstation() and not _has_metrics_opt_out()
-def _save_metrics(metrics: ToolingMetrics) -> None:
- """Save tooling metrics data."""
- client = _get_internal_tooling_metrics_client()
- client.metrics.tooling_metrics.insert_one(metrics.dict())
-
-
-def save_tooling_metrics(tooling_metrics: ToolingMetrics) -> None:
- """Persist tooling metrics data to MongoDB Internal Atlas Cluster."""
+# DO NOT USE DIRECTLY -- This is only to be used when metrics collection is registered atexit
+def _save_metrics(
+ generate_metrics_function: Callable,
+ generate_metrics_args: Dict[str, Any],
+) -> None:
+ """Save metrics to the atlas cluster."""
try:
- _save_metrics(tooling_metrics)
+ client = _get_internal_tooling_metrics_client()
+ metrics = generate_metrics_function(**generate_metrics_args)
+ client.metrics.tooling_metrics.insert_one(metrics.dict())
except Exception as exc: # pylint: disable=broad-except
logger.warning(
- "\n%s\n\nUnexpected: Tooling metrics collection is not available -- this is a non-issue.\nIf this message persists, feel free to reach out to #server-development-platform",
+ "%s\n\nInternal Metrics Collection Failed -- this is a non-issue.\nIf this message persists, feel free to reach out to #server-dev-platform",
exc)
+
+
+# This is the only util that should be used externally
+def register_metrics_collection_atexit(
+ generate_metrics_function: Callable,
+ generate_metrics_args: Dict[str, Any],
+) -> None:
+ """Register metrics collection on atexit."""
+ if _should_collect_metrics():
+ atexit.register(_save_metrics, generate_metrics_function, generate_metrics_args)
diff --git a/buildscripts/resmokelib/cli.py b/buildscripts/resmokelib/cli.py
index b84f07e0cf0..deaf57a3eb5 100644
--- a/buildscripts/resmokelib/cli.py
+++ b/buildscripts/resmokelib/cli.py
@@ -4,7 +4,9 @@ from datetime import datetime
import time
import os
import psutil
-from buildscripts.metrics.resmoke_tooling_metrics import save_resmoke_tooling_metrics
+from buildscripts.metrics.metrics_datatypes import ResmokeToolingMetrics
+from buildscripts.metrics.tooling_exit_hook import initialize_exit_hook
+from buildscripts.metrics.tooling_metrics_utils import register_metrics_collection_atexit
from buildscripts.resmokelib import parser
@@ -25,7 +27,8 @@ def main(argv):
"For example: resmoke.py run -h\n"
"Note: bisect and setup-multiversion subcommands have been moved to db-contrib-tool (https://github.com/10gen/db-contrib-tool#readme).\n"
)
- try:
- subcommand.execute()
- finally:
- save_resmoke_tooling_metrics(datetime.utcfromtimestamp(__start_time))
+ register_metrics_collection_atexit(ResmokeToolingMetrics.generate_metrics, {
+ "utc_starttime": datetime.utcfromtimestamp(__start_time),
+ "exit_hook": initialize_exit_hook()
+ })
+ subcommand.execute()
diff --git a/buildscripts/tests/tooling_metrics/test_metrics_datatypes.py b/buildscripts/tests/tooling_metrics/test_metrics_datatypes.py
index b584b77e647..3e181eb4971 100644
--- a/buildscripts/tests/tooling_metrics/test_metrics_datatypes.py
+++ b/buildscripts/tests/tooling_metrics/test_metrics_datatypes.py
@@ -1,5 +1,7 @@
"""Unit tests for metrics_datatypes.py."""
from datetime import datetime
+import os
+import sys
import unittest
from unittest.mock import patch
@@ -9,6 +11,12 @@ import buildscripts.metrics.metrics_datatypes as under_test
# pylint: disable=unused-argument
+# Metrics collection is not supported for Windows
+if os.name == "nt":
+ sys.exit()
+
+MOCK_EXIT_HOOK = MagicMock(exit_code=0)
+
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_artifact_dir",
return_value='/test')
@@ -18,94 +26,89 @@ class TestBuildInfo(unittest.TestCase):
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_options_dict",
return_value={'opt': 'opt'})
def test_build_info_valid(self, mock_env, mock_options, mock_artifact_dir):
- build_info = under_test.BuildInfo.get_scons_build_info(datetime.utcnow(), MagicMock(),
- MagicMock(), MagicMock(),
- MagicMock())
+ build_info = under_test.BuildInfo.generate_metrics(datetime.utcnow(), MagicMock(),
+ MagicMock(), MagicMock(), MagicMock())
assert not build_info.is_malformed()
def test_build_info_malformed(self, mock_artifact_dir):
- build_info = under_test.BuildInfo.get_scons_build_info(datetime.utcnow(), MagicMock(),
- MagicMock(), MagicMock(),
- MagicMock())
+ build_info = under_test.BuildInfo.generate_metrics(datetime.utcnow(), MagicMock(),
+ MagicMock(), MagicMock(), MagicMock())
assert build_info.is_malformed()
-class TestExitInfo(unittest.TestCase):
- @patch("sys.exc_info", return_value=(None, None, None))
- def test_resmoke_no_exc_info(self, mock_exc_info):
- exit_info = under_test.ExitInfo.get_resmoke_exit_info()
- assert not exit_info.is_malformed()
-
- @patch("sys.exc_info", return_value=(None, ValueError(), None))
- def test_resmoke_with_exc_info(self, mock_exc_info):
- exit_info = under_test.ExitInfo.get_resmoke_exit_info()
- assert not exit_info.is_malformed()
-
- def test_scons_exit_info_valid(self):
- exit_info = under_test.ExitInfo.get_scons_exit_info(0)
- assert not exit_info.is_malformed()
-
- def test_scons_exit_info_malformed(self):
- exit_info = under_test.ExitInfo.get_scons_exit_info('string')
- assert exit_info.is_malformed()
-
-
class TestHostInfo(unittest.TestCase):
@patch("buildscripts.metrics.metrics_datatypes.HostInfo._get_memory", side_effect=Exception())
def test_host_info_with_exc(self, mock_get_memory):
- host_info = under_test.HostInfo.get_host_info()
+ host_info = under_test.HostInfo.generate_metrics()
assert host_info.is_malformed()
# Mock this so that it passes when running the 'buildscripts_test' suite on Windows
@patch("buildscripts.metrics.metrics_datatypes.HostInfo._get_memory", return_value=30)
def test_host_info_no_exc(self, mock_get_memory):
- host_info = under_test.HostInfo.get_host_info()
+ host_info = under_test.HostInfo.generate_metrics()
assert not host_info.is_malformed()
class TestGitInfo(unittest.TestCase):
@patch("git.Repo", side_effect=Exception())
def test_git_info_with_exc(self, mock_repo):
- git_info = under_test.GitInfo.get_git_info('.')
+ git_info = under_test.GitInfo.generate_metrics('.')
assert git_info.is_malformed()
def test_git_info_no_exc(self):
- git_info = under_test.GitInfo.get_git_info('.')
+ git_info = under_test.GitInfo.generate_metrics('.')
assert not git_info.is_malformed()
@patch("git.refs.symbolic.SymbolicReference.is_detached", True)
def test_git_info_detached_head(self):
- git_info = under_test.GitInfo.get_git_info('.')
+ git_info = under_test.GitInfo.generate_metrics('.')
assert not git_info.is_malformed()
-# Mock this so that it passes when running the 'buildscripts_test' suite on Windows
-@patch("buildscripts.metrics.metrics_datatypes.HostInfo._get_memory", return_value=30)
-class TestToolingMetrics(unittest.TestCase):
+class TestResmokeToolingMetrics(unittest.TestCase):
@patch("socket.gethostname", side_effect=Exception())
- def test_resmoke_tooling_metrics_with_exc(self, mock_gethostname, mock_get_memory):
- tooling_metrics = under_test.ToolingMetrics.get_resmoke_metrics(datetime.utcnow())
+ def test_resmoke_tooling_metrics_valid(self, mock_gethostname):
+ tooling_metrics = under_test.ResmokeToolingMetrics.generate_metrics(
+ datetime.utcnow(),
+ MOCK_EXIT_HOOK,
+ )
assert tooling_metrics.is_malformed()
- def test_resmoke_tooling_metrics_no_exc(self, mock_get_memory):
- tooling_metrics = under_test.ToolingMetrics.get_resmoke_metrics(datetime.utcnow())
+ def test_resmoke_tooling_metrics_malformed(self):
+ tooling_metrics = under_test.ResmokeToolingMetrics.generate_metrics(
+ datetime.utcnow(),
+ MOCK_EXIT_HOOK,
+ )
assert not tooling_metrics.is_malformed()
+
+class TestSConsToolingMetrics(unittest.TestCase):
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_artifact_dir",
return_value='/test')
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_env_vars_dict",
return_value={'env': 'env'})
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_options_dict",
return_value={'opt': 'opt'})
- def test_scons_tooling_metrics_valid(self, mock_options, mock_env, mock_artifact_dir,
- mock_get_memory):
+ def test_scons_tooling_metrics_valid(self, mock_options, mock_env, mock_artifact_dir):
parser = MagicMock()
parser.parse_args = MagicMock(return_value={"opt1": "val1"})
- tooling_metrics = under_test.ToolingMetrics.get_scons_metrics(
- datetime.utcnow(), {'env': 'env'}, {'opts': 'opts'}, parser, ['test1', 'test2'], 0)
+ tooling_metrics = under_test.SConsToolingMetrics.generate_metrics(
+ datetime.utcnow(),
+ {'env': 'env'},
+ {'opts': 'opts'},
+ parser,
+ ['test1', 'test2'],
+ MOCK_EXIT_HOOK,
+ )
assert not tooling_metrics.is_malformed()
- def test_scons_tooling_metrics_malformed(self, mock_get_memory):
- tooling_metrics = under_test.ToolingMetrics.get_scons_metrics(
- datetime.utcnow(), {'env': 'env'}, {'opts': 'opts'}, None, [], 0)
+ def test_scons_tooling_metrics_malformed(self):
+ tooling_metrics = under_test.SConsToolingMetrics.generate_metrics(
+ datetime.utcnow(),
+ {'env': 'env'},
+ {'opts': 'opts'},
+ None,
+ [],
+ MOCK_EXIT_HOOK,
+ )
assert tooling_metrics.is_malformed()
diff --git a/buildscripts/tests/tooling_metrics/test_resmoke_tooling_metrics.py b/buildscripts/tests/tooling_metrics/test_resmoke_tooling_metrics.py
index cc3b909381f..c3d7468e90a 100644
--- a/buildscripts/tests/tooling_metrics/test_resmoke_tooling_metrics.py
+++ b/buildscripts/tests/tooling_metrics/test_resmoke_tooling_metrics.py
@@ -3,11 +3,8 @@ import os
import sys
import unittest
from unittest.mock import patch
-import mongomock
-import pymongo
-import buildscripts.metrics.resmoke_tooling_metrics as under_test
-from buildscripts.resmoke import entrypoint as resmoke_entrypoint
+import buildscripts.resmoke as under_test
TEST_INTERNAL_TOOLING_METRICS_HOSTNAME = 'mongodb://testing:27017'
CURRENT_DATE_TIME = datetime(2022, 10, 4)
@@ -19,37 +16,32 @@ if os.name == "nt":
sys.exit()
-@patch("buildscripts.metrics.tooling_metrics_utils.INTERNAL_TOOLING_METRICS_HOSTNAME",
- TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
@patch("buildscripts.resmokelib.logging.flush._FLUSH_THREAD", None)
-class TestResmokeMetricsCollection(unittest.TestCase):
- @mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
- @patch("buildscripts.metrics.resmoke_tooling_metrics.should_collect_metrics", return_value=True)
+@patch("atexit.register")
+class TestResmokeAtExitMetricsCollection(unittest.TestCase):
+ @patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
+ @patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
+ def test_resmoke_at_exit_metrics_collection(self, mock_should_collect_metrics,
+ mock_atexit_register):
+ under_test.entrypoint()
+ atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
+ assert "_save_metrics" in atexit_functions
+
+ @patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
+ @patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=False)
+ def test_no_resmoke_at_exit_metrics_collection(self, mock_should_collect_metrics,
+ mock_atexit_register):
+ under_test.entrypoint()
+ atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
+ assert "_save_metrics" not in atexit_functions
+
@patch("sys.argv", ['buildscripts/resmoke.py', 'run', '--suite', 'buildscripts_test'])
+ @patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
@patch("buildscripts.resmokelib.testing.executor.TestSuiteExecutor._run_tests",
side_effect=Exception())
- def test_resmoke_metrics_collection_exc(self, mock_executor_run, mock_should_collect_metrics):
- client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
- assert not client.metrics.tooling_metrics.find_one()
- with self.assertRaises(SystemExit):
- resmoke_entrypoint()
- assert client.metrics.tooling_metrics.find_one()
-
- @mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
- @patch("buildscripts.metrics.resmoke_tooling_metrics.should_collect_metrics", return_value=True)
- @patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
- def test_resmoke_metrics_collection(self, mock_should_collect_metrics):
- client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
- assert not client.metrics.tooling_metrics.find_one()
- resmoke_entrypoint()
- assert client.metrics.tooling_metrics.find_one()
-
- @mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
- @patch("buildscripts.metrics.resmoke_tooling_metrics.should_collect_metrics",
- return_value=False)
- @patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
- def test_no_resmoke_metrics_collection(self, mock_should_collect_metrics):
- client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
- assert not client.metrics.tooling_metrics.find_one()
- resmoke_entrypoint()
- assert not client.metrics.tooling_metrics.find_one()
+ def test_resmoke_at_exit_metrics_collection_exc(
+ self, mock_exc_method, mock_should_collect_metrics, mock_atexit_register):
+ with self.assertRaises(SystemExit) as _:
+ under_test.entrypoint()
+ atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
+ assert "_save_metrics" in atexit_functions
diff --git a/buildscripts/tests/tooling_metrics/test_scons_tooling_metrics.py b/buildscripts/tests/tooling_metrics/test_scons_tooling_metrics.py
index c50c89f85ba..ab23c951f2e 100644
--- a/buildscripts/tests/tooling_metrics/test_scons_tooling_metrics.py
+++ b/buildscripts/tests/tooling_metrics/test_scons_tooling_metrics.py
@@ -1,15 +1,8 @@
-from datetime import datetime
import os
import sys
import unittest
-from unittest.mock import MagicMock, patch
-import mongomock
-import pymongo
-import buildscripts.metrics.scons_tooling_metrics as under_test
-from buildscripts.scons import entrypoint as scons_entrypoint
-
-TEST_INTERNAL_TOOLING_METRICS_HOSTNAME = 'mongodb://testing:27017'
-CURRENT_DATE_TIME = datetime(2022, 10, 4)
+from unittest.mock import patch
+import buildscripts.scons as under_test
# pylint: disable=unused-argument
# pylint: disable=protected-access
@@ -23,52 +16,29 @@ if os.name == "nt":
'buildscripts/scons.py', "CC=/opt/mongodbtoolchain/v3/bin/gcc",
"CXX=/opt/mongodbtoolchain/v3/bin/g++", "NINJA_PREFIX=test_success", "--ninja"
])
-@patch("buildscripts.metrics.scons_tooling_metrics.should_collect_metrics", return_value=True)
@patch("atexit.register")
class TestSconsAtExitMetricsCollection(unittest.TestCase):
- def test_scons_at_exit_metrics_collection(self, mock_atexit_register,
- mock_should_collect_metrics):
- with self.assertRaises(SystemExit) as context:
- scons_entrypoint()
- assert context.exception.code == 0
+ @patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
+ def test_scons_at_exit_metrics_collection(self, mock_should_collect_metrics,
+ mock_atexit_register):
+ with self.assertRaises(SystemExit) as _:
+ under_test.entrypoint()
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
- assert "_save_scons_tooling_metrics" in atexit_functions
+ assert "_save_metrics" in atexit_functions
- @patch("buildscripts.moduleconfig.get_module_sconscripts", side_effect=Exception())
- def test_scons_at_exit_metrics_collection_exc(self, mock_method, mock_atexit_register,
- mock_should_collect_metrics):
- with self.assertRaises(SystemExit) as context:
- scons_entrypoint()
- assert context.exception.code == 2
+ @patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=False)
+ def test_no_scons_at_exit_metrics_collection(self, mock_should_collect_metrics,
+ mock_atexit_register):
+ with self.assertRaises(SystemExit) as _:
+ under_test.entrypoint()
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
- assert "_save_scons_tooling_metrics" in atexit_functions
-
-
-@patch("buildscripts.metrics.tooling_metrics_utils.INTERNAL_TOOLING_METRICS_HOSTNAME",
- TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
-class TestSconsMetricsCollection(unittest.TestCase):
- @mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
- @patch("buildscripts.metrics.scons_tooling_metrics.should_collect_metrics", return_value=True)
- def test_scons_metrics_collection_success(self, mock_should_collect_metrics):
- client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
- assert not client.metrics.tooling_metrics.find_one()
- under_test._save_scons_tooling_metrics(CURRENT_DATE_TIME, None, None, None, None,
- MagicMock(exit_code=0))
- assert client.metrics.tooling_metrics.find_one()
+ assert "_save_metrics" not in atexit_functions
- @patch("buildscripts.metrics.scons_tooling_metrics.should_collect_metrics", return_value=True)
- @mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
- def test_scons_metrics_collection_fail(self, mock_should_collect_metrics):
- client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
- assert not client.metrics.tooling_metrics.find_one()
- under_test._save_scons_tooling_metrics(None, None, None, None, None, None)
- assert not client.metrics.tooling_metrics.find_one()
-
- @patch("buildscripts.metrics.scons_tooling_metrics.should_collect_metrics", return_value=False)
- @mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
- def test_no_scons_metrics_collection(self, mock_should_collect_metrics):
- client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
- assert not client.metrics.tooling_metrics.find_one()
- under_test._save_scons_tooling_metrics(CURRENT_DATE_TIME, None, None, None, None,
- MagicMock(exit_code=0))
- assert not client.metrics.tooling_metrics.find_one()
+ @patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
+ @patch("buildscripts.moduleconfig.get_module_sconscripts", side_effect=Exception())
+ def test_scons_at_exit_metrics_collection_exc(
+ self, mock_exc_method, mock_should_collect_metrics, mock_atexit_register):
+ with self.assertRaises(SystemExit) as _:
+ under_test.entrypoint()
+ atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
+ assert "_save_metrics" in atexit_functions
diff --git a/buildscripts/tests/tooling_metrics/test_tooling_metrics_utils.py b/buildscripts/tests/tooling_metrics/test_tooling_metrics_utils.py
index b540a529d78..c705059194b 100644
--- a/buildscripts/tests/tooling_metrics/test_tooling_metrics_utils.py
+++ b/buildscripts/tests/tooling_metrics/test_tooling_metrics_utils.py
@@ -1,67 +1,75 @@
"""Unit tests for tooling_metrics.py."""
-import asyncio
from datetime import datetime
import os
import sys
import unittest
-from unittest.mock import patch
+from unittest.mock import mock_open, patch
+from mock import MagicMock
import mongomock
import pymongo
-from buildscripts.metrics.metrics_datatypes import ToolingMetrics
+from buildscripts.metrics.metrics_datatypes import ResmokeToolingMetrics, SConsToolingMetrics
import buildscripts.metrics.tooling_metrics_utils as under_test
# pylint: disable=unused-argument
# pylint: disable=protected-access
TEST_INTERNAL_TOOLING_METRICS_HOSTNAME = 'mongodb://testing:27017'
-CURRENT_DATE_TIME = datetime(2022, 10, 4)
+RESMOKE_METRICS_ARGS = {
+ "utc_starttime": datetime(2022, 10, 4),
+ "exit_hook": MagicMock(exit_code=0),
+}
# Metrics collection is not supported for Windows
if os.name == "nt":
sys.exit()
+@patch("atexit.register")
+class TestRegisterMetricsCollectionAtExit(unittest.TestCase):
+ @patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
+ def test_register_metrics_collection(self, mock_should_collect_metrics, mock_atexit):
+ under_test.register_metrics_collection_atexit(ResmokeToolingMetrics.generate_metrics,
+ RESMOKE_METRICS_ARGS)
+ atexit_functions = [call[0][0].__name__ for call in mock_atexit.call_args_list]
+ assert "_save_metrics" in atexit_functions
+
+ @patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=False)
+ def test_no_register_metrics_collection(self, mock_should_collect_metrics, mock_atexit):
+ under_test.register_metrics_collection_atexit(ResmokeToolingMetrics.generate_metrics,
+ RESMOKE_METRICS_ARGS)
+ atexit_functions = [call[0][0].__name__ for call in mock_atexit.call_args_list]
+ assert "_save_metrics" not in atexit_functions
+
+
@patch("buildscripts.metrics.tooling_metrics_utils.INTERNAL_TOOLING_METRICS_HOSTNAME",
TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
class TestSaveToolingMetrics(unittest.TestCase):
@mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
- def test_on_virtual_workstation(self):
- under_test.save_tooling_metrics(ToolingMetrics.get_resmoke_metrics(CURRENT_DATE_TIME))
+ def test_save_resmoke_metrics(self):
+ under_test._save_metrics(ResmokeToolingMetrics.generate_metrics, RESMOKE_METRICS_ARGS)
client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
assert client.metrics.tooling_metrics.find_one()
@mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
- @patch("buildscripts.metrics.tooling_metrics_utils._save_metrics",
- side_effect=pymongo.errors.WriteError(error="Error Information"))
- def test_exception_caught(self, mock_save_metrics):
- with self.assertLogs('tooling_metrics_utils') as cm:
- under_test.save_tooling_metrics(ToolingMetrics.get_resmoke_metrics(CURRENT_DATE_TIME))
+ @patch("buildscripts.metrics.tooling_metrics_utils._get_internal_tooling_metrics_client",
+ side_effect=pymongo.errors.ServerSelectionTimeoutError(message="Error Information"))
+ def test_save_metrics_with_exc(self, mock_save_metrics):
+ with self.assertLogs('tooling_metrics') as cm:
+ under_test._save_metrics(ResmokeToolingMetrics.generate_metrics, RESMOKE_METRICS_ARGS)
assert "Error Information" in cm.output[0]
- assert "Unexpected: Tooling metrics collection is not available" in cm.output[0]
+ assert "Internal Metrics Collection Failed" in cm.output[0]
client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
assert not client.metrics.tooling_metrics.find_one()
class TestIsVirtualWorkstation(unittest.TestCase):
- @patch("buildscripts.metrics.tooling_metrics_utils._toolchain_exists", return_value=False)
- @patch("buildscripts.metrics.tooling_metrics_utils._git_user_exists", return_value=True)
- def test_no_toolchain_has_email(self, mock_git_user_exists, mock_toolchain_exists):
- assert not under_test._is_virtual_workstation()
-
- @patch("buildscripts.metrics.tooling_metrics_utils._toolchain_exists", return_value=True)
- @patch("buildscripts.metrics.tooling_metrics_utils._git_user_exists", return_value=True)
- def test_has_toolchain_has_email(self, mock_git_user_exists, mock_toolchain_exists):
- assert under_test._is_virtual_workstation()
-
- @patch("buildscripts.metrics.tooling_metrics_utils._toolchain_exists", return_value=True)
- @patch("buildscripts.metrics.tooling_metrics_utils._git_user_exists", return_value=False)
- def test_has_toolchain_no_email(self, mock_git_user_exists, mock_toolchain_exists):
- assert not under_test._is_virtual_workstation()
+ @patch("builtins.open", mock_open(read_data="ubuntu1804-workstation"))
+ def test_is_virtual_workstation(self):
+ assert under_test._is_virtual_workstation() is True
- @patch("buildscripts.metrics.tooling_metrics_utils._toolchain_exists", return_value=False)
- @patch("buildscripts.metrics.tooling_metrics_utils._git_user_exists", return_value=False)
- def test_no_toolchain_no_email(self, mock_git_user_exists, mock_toolchain_exists):
- assert not under_test._is_virtual_workstation()
+ @patch("builtins.open", mock_open(read_data="test"))
+ def test_is_not_virtual_workstation(self):
+ assert under_test._is_virtual_workstation() is False
class TestHasMetricsOptOut(unittest.TestCase):
@@ -78,9 +86,9 @@ class TestShouldCollectMetrics(unittest.TestCase):
@patch("buildscripts.metrics.tooling_metrics_utils._is_virtual_workstation", return_value=True)
@patch("buildscripts.metrics.tooling_metrics_utils._has_metrics_opt_out", return_value=False)
def test_should_collect_metrics(self, mock_opt_out, mock_is_virtual_env):
- assert under_test.should_collect_metrics()
+ assert under_test._should_collect_metrics()
@patch("buildscripts.metrics.tooling_metrics_utils._is_virtual_workstation", return_value=True)
@patch("buildscripts.metrics.tooling_metrics_utils._has_metrics_opt_out", return_value=True)
def test_no_collect_metrics_opt_out(self, mock_opt_out, mock_is_virtual_env):
- assert not under_test.should_collect_metrics()
+ assert not under_test._should_collect_metrics()
diff --git a/evergreen/publish_metrics.py b/evergreen/publish_metrics.py
index c5db53b0220..5790bca42e8 100755
--- a/evergreen/publish_metrics.py
+++ b/evergreen/publish_metrics.py
@@ -8,7 +8,7 @@ from pydantic import ValidationError
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-from buildscripts.metrics.metrics_datatypes import ToolingMetrics
+from buildscripts.metrics.metrics_datatypes import ResmokeToolingMetrics, SConsToolingMetrics
from buildscripts.metrics.tooling_metrics_utils import _get_internal_tooling_metrics_client
from evergreen.api import RetryingEvergreenApi
@@ -20,43 +20,56 @@ except Exception as exc:
print("Could not connect to Atlas cluster")
raise exc
-try:
- # Get metrics for the last week
- one_week_ago_datetime = datetime.datetime.utcnow() - datetime.timedelta(days=7)
- last_week_metrics = client.metrics.tooling_metrics.find(
- {"utc_starttime": {"$gt": one_week_ago_datetime}})
-
- malformed_metrics = []
- invalid_metrics = []
- total_docs = 0
-
- # Find any malformed/invalid documents in the cluster
- for doc in last_week_metrics:
- total_docs += 1
- try:
- metrics = ToolingMetrics(**doc)
- if metrics.is_malformed():
- malformed_metrics.append(doc['_id'])
- except ValidationError:
- invalid_metrics.append(doc['_id'])
-
- metrics_detailed = ("METRICS DETAILED:\n"
- f"malformed_metrics_last_week: {malformed_metrics}\n"
- f"invalid_metrics_last_week: {invalid_metrics}\n"
- f"total_docs_last_week: {total_docs}")
- metrics_overview = (
- "METRICS OVERVIEW:\n"
- f"malformed_metrics_last_week: {len(malformed_metrics)} ({len(malformed_metrics)/total_docs*100:.2f}%)\n"
- f"invalid_metrics_last_week: {len(invalid_metrics)} ({len(invalid_metrics)/total_docs*100:.2f}%)\n"
- f"total_docs_last_week: {total_docs}")
-
- print(metrics_overview)
- print(metrics_detailed)
-
- # Publish metrics to SDP Slack Channel
- evg_api = RetryingEvergreenApi.get_api(config_file="./.evergreen.yml")
- evg_api.send_slack_message(target="#server-sdp-bfs", msg=metrics_overview)
-except Exception as exc:
- print("Unexpected failure while getting metrics")
- raise exc
+def get_metrics_data(source, MetricsClass, lookback=7):
+ try:
+ # Get SCons metrics for the lookback period
+ lookback_datetime = datetime.datetime.utcnow() - datetime.timedelta(days=lookback)
+ last_week_metrics = client.metrics.tooling_metrics.find({
+ "source": source,
+ "utc_starttime": {"$gt": lookback_datetime},
+ })
+
+ malformed_metrics = []
+ invalid_metrics = []
+ total_docs = 0
+
+ # Find any malformed/invalid documents in the cluster
+ for doc in last_week_metrics:
+ total_docs += 1
+ try:
+ metrics = MetricsClass(**doc)
+ if metrics.is_malformed():
+ malformed_metrics.append(doc['_id'])
+ except ValidationError:
+ invalid_metrics.append(doc['_id'])
+
+ metrics_detailed = (f"METRICS DETAILED ({source}):\n"
+ f"malformed_metrics_last_week: {malformed_metrics}\n"
+ f"invalid_metrics_last_week: {invalid_metrics}\n"
+ f"total_docs_last_week: {total_docs}\n")
+ metrics_overview = (
+ f"METRICS OVERVIEW ({source}):\n"
+ f"malformed_metrics_last_week: {len(malformed_metrics)} ({len(malformed_metrics)/total_docs*100:.2f}%)\n"
+ f"invalid_metrics_last_week: {len(invalid_metrics)} ({len(invalid_metrics)/total_docs*100:.2f}%)\n"
+ f"total_docs_last_week: {total_docs}\n")
+
+ print(metrics_overview)
+ print(metrics_detailed)
+
+ return metrics_overview
+
+ except Exception as exc:
+ print("Unexpected failure while getting metrics")
+ raise exc
+
+
+scons_metrics_overview = get_metrics_data("scons", SConsToolingMetrics)
+resmoke_metrics_overview = get_metrics_data("resmoke", ResmokeToolingMetrics)
+
+# Publish metrics to SDP Slack Channel
+evg_api = RetryingEvergreenApi.get_api(config_file="./.evergreen.yml")
+evg_api.send_slack_message(
+ target="#server-sdp-bfs",
+ msg=scons_metrics_overview + resmoke_metrics_overview,
+)