summaryrefslogtreecommitdiff
path: root/src/third_party/wiredtiger/test
diff options
context:
space:
mode:
Diffstat (limited to 'src/third_party/wiredtiger/test')
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/Makefile.am1
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.cxx7
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.h2
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/core/component.h2
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.cxx17
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.h4
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.cxx6
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.h12
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/test.cxx8
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/test.h19
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/timestamp_manager.h3
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_connection.cxx46
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_connection.h56
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_types.h6
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_model.h2
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.cxx2
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.h2
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/workload/thread_context.cxx6
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/workload/workload_tracking.h3
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/test_harness/workload_generator.h4
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/tests/csuite_style_example_test.cxx10
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/tests/example_test.cxx2
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/tests/hs_cleanup.cxx9
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/tests/search_near_01.cxx6
-rw-r--r--src/third_party/wiredtiger/test/cppsuite/tests/search_near_02.cxx13
-rw-r--r--src/third_party/wiredtiger/test/csuite/wt7989_compact_checkpoint/main.c17
-rwxr-xr-xsrc/third_party/wiredtiger/test/evergreen.yml370
-rw-r--r--src/third_party/wiredtiger/test/suite/test_cursor17.py41
-rw-r--r--src/third_party/wiredtiger/test/suite/test_hs28.py86
-rwxr-xr-xsrc/third_party/wiredtiger/test/suite/test_rollback_to_stable01.py12
-rwxr-xr-xsrc/third_party/wiredtiger/test/suite/test_rollback_to_stable10.py23
-rwxr-xr-xsrc/third_party/wiredtiger/test/suite/test_rollback_to_stable14.py46
-rw-r--r--src/third_party/wiredtiger/test/suite/test_rollback_to_stable21.py5
-rwxr-xr-xsrc/third_party/wiredtiger/test/suite/test_rollback_to_stable28.py149
-rwxr-xr-xsrc/third_party/wiredtiger/test/suite/wttest.py7
35 files changed, 831 insertions, 173 deletions
diff --git a/src/third_party/wiredtiger/test/cppsuite/Makefile.am b/src/third_party/wiredtiger/test/cppsuite/Makefile.am
index 5e896ce2e04..8221f882014 100644
--- a/src/third_party/wiredtiger/test/cppsuite/Makefile.am
+++ b/src/third_party/wiredtiger/test/cppsuite/Makefile.am
@@ -15,6 +15,7 @@ test_harness = test_harness/core/component.cxx \
test_harness/core/configuration.cxx \
test_harness/core/throttle.cxx \
test_harness/util/logger.cxx \
+ test_harness/util/scoped_connection.cxx \
test_harness/util/scoped_types.cxx \
test_harness/workload/database_model.cxx \
test_harness/workload/database_operation.cxx \
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.cxx b/src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.cxx
index 8a8b75b7b8f..292cfbbfeab 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.cxx
@@ -29,6 +29,7 @@
#include "connection_manager.h"
#include "util/api_const.h"
#include "util/logger.h"
+#include "util/scoped_connection.h"
namespace test_harness {
connection_manager &
@@ -73,9 +74,8 @@ connection_manager::create_session()
testutil_die(EINVAL, "Connection is NULL");
}
- _conn_mutex.lock();
+ std::lock_guard<std::mutex> lg(_conn_mutex);
scoped_session session(_conn);
- _conn_mutex.unlock();
return (session);
}
@@ -92,9 +92,8 @@ connection_manager::get_connection()
void
connection_manager::set_timestamp(const std::string &config)
{
- _conn_mutex.lock();
+ std::lock_guard<std::mutex> lg(_conn_mutex);
testutil_check(_conn->set_timestamp(_conn, config.c_str()));
- _conn_mutex.unlock();
}
connection_manager::connection_manager() {}
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.h
index a5d44903717..2fef81d2af8 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/connection_manager.h
@@ -61,7 +61,7 @@ class connection_manager {
connection_manager &operator=(connection_manager const &) = delete;
void close();
- void create(const std::string &config, const std::string &home = DEFAULT_DIR);
+ void create(const std::string &config, const std::string &home);
scoped_session create_session();
WT_CONNECTION *get_connection();
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/core/component.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/core/component.h
index 398ca11e442..65a2215cdac 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/core/component.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/core/component.h
@@ -39,7 +39,7 @@ namespace test_harness {
*/
class component {
public:
- component(const std::string &name, configuration *config);
+ explicit component(const std::string &name, configuration *config);
virtual ~component();
/* Delete the copy constructor and the assignment operator. */
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.cxx b/src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.cxx
index b6c53397955..3a785f395ad 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.cxx
@@ -258,6 +258,14 @@ configuration::split_config(const std::string &config)
in_subconfig = !parens.empty();
}
if (cut_config[i] == '=' && !in_subconfig) {
+ if (len == 0) {
+ testutil_die(EINVAL, "error parsing config: detected empty key");
+ }
+ if (expect_value) {
+ testutil_die(EINVAL,
+ "error parsing config: syntax error parsing value for key ['%s']: '%s'",
+ key.c_str(), cut_config.substr(start, len).c_str());
+ }
expect_value = true;
key = cut_config.substr(start, len);
start += len + 1;
@@ -265,6 +273,15 @@ configuration::split_config(const std::string &config)
continue;
}
if (cut_config[i] == ',' && !in_subconfig) {
+ if (len == 0) {
+ testutil_die(
+ EINVAL, "error parsing config: detected empty value for key:'%s'", key.c_str());
+ }
+ if (!expect_value) {
+ testutil_die(EINVAL,
+ "error parsing config: syntax error parsing key value pair: '%s'",
+ cut_config.substr(start, len).c_str());
+ }
expect_value = false;
if (start + len >= cut_config.size())
break;
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.h
index f34465904ad..4c7aeb0ac3f 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/core/configuration.h
@@ -60,8 +60,8 @@ split_string(const std::string &str, const char delim)
class configuration {
public:
- configuration(const std::string &test_config_name, const std::string &config);
- configuration(const WT_CONFIG_ITEM &nested);
+ explicit configuration(const std::string &test_config_name, const std::string &config);
+ explicit configuration(const WT_CONFIG_ITEM &nested);
~configuration();
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.cxx b/src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.cxx
index 0e454a4f4f0..d123ae9f7c8 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.cxx
@@ -80,7 +80,7 @@ runtime_statistic::enabled() const
/* cache_limit_statistic class implementation */
cache_limit_statistic::cache_limit_statistic(configuration *config) : runtime_statistic(config)
{
- limit = config->get_int(LIMIT);
+ _limit = config->get_int(LIMIT);
}
void
@@ -98,9 +98,9 @@ cache_limit_statistic::check(scoped_cursor &cursor)
* point conversion errors.
*/
use_percent = ((cache_bytes_image + cache_bytes_other + 0.0) / cache_bytes_max) * 100;
- if (use_percent > limit) {
+ if (use_percent > _limit) {
const std::string error_string =
- "runtime_monitor: Cache usage exceeded during test! Limit: " + std::to_string(limit) +
+ "runtime_monitor: Cache usage exceeded during test! Limit: " + std::to_string(_limit) +
" usage: " + std::to_string(use_percent);
testutil_die(-1, error_string.c_str());
} else
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.h
index 0f63585290d..4ee0d1055ba 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/runtime_monitor.h
@@ -48,13 +48,11 @@ namespace test_harness {
class runtime_statistic {
public:
explicit runtime_statistic(configuration *config);
+ virtual ~runtime_statistic() = default;
/* Check that the given statistic is within bounds. */
virtual void check(scoped_cursor &cursor) = 0;
- /* Suppress warning about destructor being non-virtual. */
- virtual ~runtime_statistic() {}
-
bool enabled() const;
protected:
@@ -64,16 +62,17 @@ class runtime_statistic {
class cache_limit_statistic : public runtime_statistic {
public:
explicit cache_limit_statistic(configuration *config);
+ virtual ~cache_limit_statistic() = default;
void check(scoped_cursor &cursor) override final;
private:
- int64_t limit;
+ int64_t _limit;
};
class db_size_statistic : public runtime_statistic {
public:
- db_size_statistic(configuration *config, database &database);
+ explicit db_size_statistic(configuration *config, database &database);
virtual ~db_size_statistic() = default;
/* Don't need the stat cursor for this. */
@@ -90,7 +89,6 @@ class db_size_statistic : public runtime_statistic {
class postrun_statistic_check {
public:
explicit postrun_statistic_check(configuration *config);
- virtual ~postrun_statistic_check() = default;
void check(scoped_cursor &cursor) const;
@@ -119,7 +117,7 @@ class runtime_monitor : public component {
static void get_stat(scoped_cursor &, int, int64_t *);
public:
- runtime_monitor(configuration *config, database &database);
+ explicit runtime_monitor(configuration *config, database &database);
~runtime_monitor();
/* Delete the copy constructor and the assignment operator. */
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/test.cxx b/src/third_party/wiredtiger/test/cppsuite/test_harness/test.cxx
index 4ce0de2e381..8cdea3170df 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/test.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/test.cxx
@@ -119,8 +119,12 @@ test::run()
/* Add the user supplied wiredtiger open config. */
db_create_config += _args.wt_open_config;
- /* Set up the test environment. */
- connection_manager::instance().create(db_create_config);
+ /*
+ * Set up the test environment. A smart pointer is used here so that the connection can
+ * automatically be closed by the scoped_connection's destructor when the test finishes and the
+ * pointer goes out of scope.
+ */
+ _scoped_conn = std::make_shared<scoped_connection>(db_create_config);
/* Initiate the load stage of each component. */
for (const auto &it : _components)
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/test.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/test.h
index 8c5e2d17434..ba899339bb0 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/test.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/test.h
@@ -39,6 +39,7 @@ extern "C" {
#include "checkpoint_manager.h"
#include "connection_manager.h"
#include "runtime_monitor.h"
+#include "util/scoped_connection.h"
#include "workload/database_operation.h"
#include "workload_generator.h"
@@ -59,8 +60,8 @@ class test_args {
*/
class test : public database_operation {
public:
- test(const test_args &args);
- ~test();
+ explicit test(const test_args &args);
+ virtual ~test();
/* Delete the copy constructor and the assignment operator. */
test(const test &) = delete;
@@ -92,18 +93,8 @@ class test : public database_operation {
timestamp_manager *_timestamp_manager = nullptr;
workload_generator *_workload_generator = nullptr;
workload_tracking *_workload_tracking = nullptr;
- /*
- * FIX-ME-Test-Framework: We can't put this code in the destructor of `test` since it will run
- * before the destructors of each of our members (meaning that sessions will get closed after
- * the connection gets closed). To work around this, we've added a member with a destructor that
- * closes the connection.
- */
- struct connection_closer {
- ~connection_closer()
- {
- connection_manager::instance().close();
- }
- } _connection_closer;
+
+ std::shared_ptr<scoped_connection> _scoped_conn;
database _database;
};
} // namespace test_harness
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/timestamp_manager.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/timestamp_manager.h
index e510614d077..c10d10df5c0 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/timestamp_manager.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/timestamp_manager.h
@@ -50,7 +50,8 @@ class timestamp_manager : public component {
static const std::string decimal_to_hex(uint64_t value);
public:
- timestamp_manager(configuration *config);
+ explicit timestamp_manager(configuration *config);
+ virtual ~timestamp_manager() = default;
void load() override final;
void do_work() override final;
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_connection.cxx b/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_connection.cxx
new file mode 100644
index 00000000000..39a8fede916
--- /dev/null
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_connection.cxx
@@ -0,0 +1,46 @@
+/*-
+ * Public Domain 2014-present MongoDB, Inc.
+ * Public Domain 2008-2014 WiredTiger, Inc.
+ *
+ * This is free and unencumbered software released into the public domain.
+ *
+ * Anyone is free to copy, modify, publish, use, compile, sell, or
+ * distribute this software, either in source code form or as a compiled
+ * binary, for any purpose, commercial or non-commercial, and by any
+ * means.
+ *
+ * In jurisdictions that recognize copyright laws, the author or authors
+ * of this software dedicate any and all copyright interest in the
+ * software to the public domain. We make this dedication for the benefit
+ * of the public at large and to the detriment of our heirs and
+ * successors. We intend this dedication to be an overt act of
+ * relinquishment in perpetuity of all present and future rights to this
+ * software under copyright law.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+ * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+#include <iostream>
+#include <utility>
+
+#include "../connection_manager.h"
+#include "scoped_connection.h"
+
+namespace test_harness {
+
+scoped_connection::scoped_connection(const std::string &db_conn_config, const std::string &home)
+{
+ connection_manager::instance().create(db_conn_config, home);
+}
+
+scoped_connection::~scoped_connection()
+{
+ connection_manager::instance().close();
+}
+
+} // namespace test_harness
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_connection.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_connection.h
new file mode 100644
index 00000000000..6e999b6db54
--- /dev/null
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_connection.h
@@ -0,0 +1,56 @@
+/*-
+ * Public Domain 2014-present MongoDB, Inc.
+ * Public Domain 2008-2014 WiredTiger, Inc.
+ *
+ * This is free and unencumbered software released into the public domain.
+ *
+ * Anyone is free to copy, modify, publish, use, compile, sell, or
+ * distribute this software, either in source code form or as a compiled
+ * binary, for any purpose, commercial or non-commercial, and by any
+ * means.
+ *
+ * In jurisdictions that recognize copyright laws, the author or authors
+ * of this software dedicate any and all copyright interest in the
+ * software to the public domain. We make this dedication for the benefit
+ * of the public at large and to the detriment of our heirs and
+ * successors. We intend this dedication to be an overt act of
+ * relinquishment in perpetuity of all present and future rights to this
+ * software under copyright law.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+ * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#ifndef SCOPED_CONNECTION_H
+#define SCOPED_CONNECTION_H
+
+/* Following definitions are required in order to use printing format specifiers in C++. */
+#ifndef __STDC_LIMIT_MACROS
+#define __STDC_LIMIT_MACROS
+#endif
+#ifndef __STDC_FORMAT_MACROS
+#define __STDC_FORMAT_MACROS
+#endif
+
+extern "C" {
+#include "test_util.h"
+}
+
+#include "../connection_manager.h"
+
+namespace test_harness {
+
+class scoped_connection {
+ public:
+ explicit scoped_connection(
+ const std::string &db_conn_config, const std::string &home = DEFAULT_DIR);
+ ~scoped_connection();
+};
+
+} // namespace test_harness
+#endif
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_types.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_types.h
index edb38e3e22c..71fc24f6c26 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_types.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/util/scoped_types.h
@@ -45,12 +45,12 @@ namespace test_harness {
class scoped_cursor {
public:
scoped_cursor() = default;
- scoped_cursor(WT_SESSION *session, const char *uri, const char *cfg);
+ explicit scoped_cursor(WT_SESSION *session, const char *uri, const char *cfg);
/* Moving is ok but copying is not. */
scoped_cursor(scoped_cursor &&other);
- virtual ~scoped_cursor();
+ ~scoped_cursor();
scoped_cursor &operator=(scoped_cursor &&other);
scoped_cursor(const scoped_cursor &) = delete;
@@ -72,7 +72,7 @@ class scoped_session {
scoped_session() = default;
explicit scoped_session(WT_CONNECTION *conn);
- virtual ~scoped_session();
+ ~scoped_session();
/* Moving is ok but copying is not. */
scoped_session(scoped_session &&other);
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_model.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_model.h
index edbb5bd2675..c8d54524873 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_model.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_model.h
@@ -45,7 +45,7 @@ typedef std::string key_value_t;
/* A collection is made of mapped key value objects. */
class collection {
public:
- collection(const uint64_t id, const uint64_t key_count, const std::string &name);
+ explicit collection(const uint64_t id, const uint64_t key_count, const std::string &name);
/* Copies aren't allowed. */
collection(const collection &) = delete;
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.cxx b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.cxx
index 2f85cab681e..402a1e5237e 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.cxx
@@ -287,7 +287,7 @@ database_operation::update_operation(thread_context *tc)
/* Commit the current transaction if we're able to. */
if (tc->transaction.can_commit())
- tc->transaction.commit();
+ WT_IGNORE_RET(tc->transaction.commit());
}
/* Make sure the last operation is rolled back now the work is finished. */
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.h
index c8dd7370b0f..767ef50c5b7 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/database_operation.h
@@ -57,6 +57,8 @@ class database_operation {
* Basic update operation that chooses a random key and updates it.
*/
virtual void update_operation(thread_context *tc);
+
+ virtual ~database_operation() = default;
};
} // namespace test_harness
#endif
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/thread_context.cxx b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/thread_context.cxx
index f087d45cd06..8c8c4f10f88 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/thread_context.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/thread_context.cxx
@@ -91,11 +91,11 @@ transaction_context::commit(const std::string &config)
{
WT_DECL_RET;
testutil_assert(_in_txn && !_needs_rollback);
- if ((ret = _session->commit_transaction(_session, config.empty() ? nullptr : config.c_str())) !=
- 0) {
+ ret = _session->commit_transaction(_session, config.empty() ? nullptr : config.c_str());
+ testutil_assert(ret == 0 || ret == WT_ROLLBACK);
+ if (ret != 0)
logger::log_msg(LOG_WARN,
"Failed to commit transaction in commit, received error code: " + std::to_string(ret));
- }
_op_count = 0;
_in_txn = false;
return (ret == 0);
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/workload_tracking.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/workload_tracking.h
index 704f26ac84b..b6c232376c7 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/workload_tracking.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload/workload_tracking.h
@@ -58,9 +58,10 @@ enum class tracking_operation { CREATE_COLLECTION, DELETE_COLLECTION, DELETE_KEY
/* Class used to track operations performed on collections */
class workload_tracking : public component {
public:
- workload_tracking(configuration *_config, const std::string &operation_table_config,
+ explicit workload_tracking(configuration *_config, const std::string &operation_table_config,
const std::string &operation_table_name, const std::string &schema_table_config,
const std::string &schema_table_name, const bool use_compression, timestamp_manager &tsm);
+ virtual ~workload_tracking() = default;
const std::string &get_schema_table_name() const;
const std::string &get_operation_table_name() const;
diff --git a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload_generator.h b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload_generator.h
index e29410f6116..78950b18677 100644
--- a/src/third_party/wiredtiger/test/cppsuite/test_harness/workload_generator.h
+++ b/src/third_party/wiredtiger/test/cppsuite/test_harness/workload_generator.h
@@ -48,7 +48,7 @@ namespace test_harness {
*/
class operation_config {
public:
- operation_config(configuration *config, thread_type type);
+ explicit operation_config(configuration *config, thread_type type);
/* Returns a function pointer to the member function of the supplied database operation. */
std::function<void(test_harness::thread_context *)> get_func(database_operation *dbo);
@@ -64,7 +64,7 @@ class operation_config {
*/
class workload_generator : public component {
public:
- workload_generator(configuration *configuration, database_operation *db_operation,
+ explicit workload_generator(configuration *configuration, database_operation *db_operation,
timestamp_manager *timestamp_manager, workload_tracking *tracking, database &database);
~workload_generator();
diff --git a/src/third_party/wiredtiger/test/cppsuite/tests/csuite_style_example_test.cxx b/src/third_party/wiredtiger/test/cppsuite/tests/csuite_style_example_test.cxx
index d0059880446..bcacf115665 100644
--- a/src/third_party/wiredtiger/test/cppsuite/tests/csuite_style_example_test.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/tests/csuite_style_example_test.cxx
@@ -37,6 +37,7 @@
#include "test_harness/thread_manager.h"
#include "test_harness/util/api_const.h"
#include "test_harness/util/logger.h"
+#include "test_harness/util/scoped_connection.h"
#include "test_harness/workload/random_generator.h"
extern "C" {
@@ -94,7 +95,11 @@ main(int argc, char *argv[])
/* Create a connection, set the cache size and specify the home directory. */
const std::string conn_config = std::string(CONNECTION_CREATE) + ",cache_size=500MB";
const std::string home_dir = std::string(DEFAULT_DIR) + '_' + progname;
- connection_manager::instance().create(conn_config, home_dir);
+ /*
+ * A smart pointer is used here so that the connection can automatically be closed by the
+ * scoped_connection's destructor when the test finishes and the pointer goes out of scope.
+ */
+ std::unique_ptr<scoped_connection> scoped_conn(new scoped_connection(conn_config, home_dir));
WT_CONNECTION *conn = connection_manager::instance().get_connection();
/* Open different sessions. */
@@ -159,9 +164,6 @@ main(int argc, char *argv[])
for (auto c : cursors)
testutil_check(c->close(c));
- /* Close the connection. */
- connection_manager::instance().close();
-
/* Another message. */
logger::log_msg(LOG_INFO, "End of test.");
diff --git a/src/third_party/wiredtiger/test/cppsuite/tests/example_test.cxx b/src/third_party/wiredtiger/test/cppsuite/tests/example_test.cxx
index 4b49ad2b148..c5f344ac1df 100644
--- a/src/third_party/wiredtiger/test/cppsuite/tests/example_test.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/tests/example_test.cxx
@@ -37,7 +37,7 @@ class example_test : public test_harness::test {
example_test(const test_harness::test_args &args) : test(args) {}
void
- run()
+ run() override final
{
/* You can remove the call to the base class to fully customized your test. */
test::run();
diff --git a/src/third_party/wiredtiger/test/cppsuite/tests/hs_cleanup.cxx b/src/third_party/wiredtiger/test/cppsuite/tests/hs_cleanup.cxx
index 5a828f865f5..fc17f0c5efe 100644
--- a/src/third_party/wiredtiger/test/cppsuite/tests/hs_cleanup.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/tests/hs_cleanup.cxx
@@ -51,6 +51,8 @@ class hs_cleanup : public test {
LOG_INFO, type_string(tc->type) + " thread {" + std::to_string(tc->id) + "} commencing.");
const char *key_tmp;
+ const uint64_t MAX_ROLLBACKS = 100;
+ uint32_t rollback_retries = 0;
collection &coll = tc->db.get_collection(tc->id);
@@ -93,11 +95,16 @@ class hs_cleanup : public test {
*/
if (tc->update(cursor, coll.id, key_value_t(key_tmp))) {
if (tc->transaction.can_commit()) {
- tc->transaction.commit();
+ if (tc->transaction.commit())
+ rollback_retries = 0;
+ else
+ ++rollback_retries;
}
} else {
tc->transaction.rollback();
+ ++rollback_retries;
}
+ testutil_assert(rollback_retries < MAX_ROLLBACKS);
}
/* Ensure our last transaction is resolved. */
if (tc->transaction.active())
diff --git a/src/third_party/wiredtiger/test/cppsuite/tests/search_near_01.cxx b/src/third_party/wiredtiger/test/cppsuite/tests/search_near_01.cxx
index abacf3dc196..20bcebff4a0 100644
--- a/src/third_party/wiredtiger/test/cppsuite/tests/search_near_01.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/tests/search_near_01.cxx
@@ -55,7 +55,6 @@ class search_near_01 : public test_harness::test {
uint64_t collections_per_thread = tc->collection_count;
const uint64_t MAX_ROLLBACKS = 100;
uint32_t rollback_retries = 0;
- int cmpp;
/*
* Generate a table of data with prefix keys aaa -> zzz. We have 26 threads from ids
@@ -84,8 +83,8 @@ class search_near_01 : public test_harness::test {
--count;
} else {
/* Commit txn at commit timestamp 100. */
- tc->transaction.commit(
- "commit_timestamp=" + tc->tsm->decimal_to_hex(100));
+ testutil_assert(tc->transaction.commit(
+ "commit_timestamp=" + tc->tsm->decimal_to_hex(100)));
rollback_retries = 0;
}
}
@@ -254,7 +253,6 @@ class search_near_01 : public test_harness::test {
/* Reset our cursor to avoid pinning content. */
testutil_check(cursor->reset(cursor.get()));
}
- tc->transaction.commit();
/* Make sure the last transaction is rolled back now the work is finished. */
if (tc->transaction.active())
tc->transaction.rollback();
diff --git a/src/third_party/wiredtiger/test/cppsuite/tests/search_near_02.cxx b/src/third_party/wiredtiger/test/cppsuite/tests/search_near_02.cxx
index 1df75a83bc0..58c3047dfa9 100644
--- a/src/third_party/wiredtiger/test/cppsuite/tests/search_near_02.cxx
+++ b/src/third_party/wiredtiger/test/cppsuite/tests/search_near_02.cxx
@@ -95,7 +95,9 @@ class search_near_02 : public test_harness::test {
}
std::string key;
+ const uint64_t MAX_ROLLBACKS = 100;
uint64_t counter = 0;
+ uint32_t rollback_retries = 0;
while (tc->running()) {
@@ -109,12 +111,18 @@ class search_near_02 : public test_harness::test {
/* Insert a key value pair. */
if (tc->insert(cc.cursor, cc.coll.id, key)) {
- if (tc->transaction.can_commit())
+ if (tc->transaction.can_commit()) {
/* We are not checking the result of commit as it is not necessary. */
- tc->transaction.commit();
+ if (tc->transaction.commit())
+ rollback_retries = 0;
+ else
+ ++rollback_retries;
+ }
} else {
tc->transaction.rollback();
+ ++rollback_retries;
}
+ testutil_assert(rollback_retries < MAX_ROLLBACKS);
/* Sleep the duration defined by the configuration. */
tc->sleep();
@@ -268,7 +276,6 @@ class search_near_02 : public test_harness::test {
{
const char *k;
std::string k_str;
- int ret;
/*
* The prefix search near call cannot retrieve a key with a smaller value than the prefix we
diff --git a/src/third_party/wiredtiger/test/csuite/wt7989_compact_checkpoint/main.c b/src/third_party/wiredtiger/test/csuite/wt7989_compact_checkpoint/main.c
index a93d83b0985..8cd8c4b3e77 100644
--- a/src/third_party/wiredtiger/test/csuite/wt7989_compact_checkpoint/main.c
+++ b/src/third_party/wiredtiger/test/csuite/wt7989_compact_checkpoint/main.c
@@ -48,6 +48,7 @@ static const char conn_config[] = "create,cache_size=2GB,statistics=(all)";
static const char table_config[] =
"allocation_size=4KB,leaf_page_max=4KB,key_format=i,value_format=QQQS";
static char data_str[1024] = "";
+static pthread_t thread_compact;
/* Structures definition. */
struct thread_data {
@@ -105,7 +106,7 @@ run_test(bool stress_test, const char *home, const char *uri)
struct thread_data td;
WT_CONNECTION *conn;
WT_SESSION *session;
- pthread_t thread_checkpoint, thread_compact;
+ pthread_t thread_checkpoint;
uint64_t file_sz_after, file_sz_before;
testutil_make_work_dir(home);
@@ -147,8 +148,8 @@ run_test(bool stress_test, const char *home, const char *uri)
/* Create and initialize conditional variable. */
testutil_check(__wt_cond_alloc((WT_SESSION_IMPL *)session, "compact operation", &td.cond));
+ /* The checkpoint thread will spawn the compact thread when it's ready. */
testutil_check(pthread_create(&thread_checkpoint, NULL, thread_func_checkpoint, &td));
- testutil_check(pthread_create(&thread_compact, NULL, thread_func_compact, &td));
}
/* Wait for the threads to finish the work. */
@@ -188,12 +189,6 @@ thread_func_compact(void *arg)
testutil_check(td->conn->open_session(td->conn, NULL, NULL, &session));
if (td->cond != NULL) {
- /*
- * Make sure checkpoint thread is initialized and waiting for the signal. Sleep for one
- * second.
- */
- __wt_sleep(1, 0);
-
/* Wake up the checkpoint thread. */
printf("Sending the signal!\n");
__wt_cond_signal((WT_SESSION_IMPL *)session, td->cond);
@@ -237,6 +232,12 @@ thread_func_checkpoint(void *arg)
__wt_random_init_seed((WT_SESSION_IMPL *)session, &rnd);
if (td->cond != NULL) {
+ /*
+ * Spawn the compact thread here to make sure the both threads are ready for the synced
+ * start.
+ */
+ testutil_check(pthread_create(&thread_compact, NULL, thread_func_compact, td));
+
printf("Waiting for the signal...\n");
/*
* Wait for the signal and time out after 20 seconds. wait_run_check is required because the
diff --git a/src/third_party/wiredtiger/test/evergreen.yml b/src/third_party/wiredtiger/test/evergreen.yml
index a775a9aec7a..6b100d1d431 100755
--- a/src/third_party/wiredtiger/test/evergreen.yml
+++ b/src/third_party/wiredtiger/test/evergreen.yml
@@ -1,9 +1,31 @@
#
# This file defines the tasks and platforms for WiredTiger in the
-# MongoDB continuous integration system (see https://mci.mongodb.com).
+# MongoDB continuous integration system (https://evergreen.mongodb.com).
#
+#######################################
+# Project Settings #
+#######################################
+
+stepback: true
+pre:
+ - func: "cleanup"
+post:
+ - func: "upload artifact"
+ vars:
+ postfix: -${execution}
+ - func: "save wt hang analyzer core/debugger files"
+ - func: "dump stderr/stdout"
+ - func: "cleanup"
+timeout:
+ - func: "run wt hang analyzer"
+
+#######################################
+# Functions #
+#######################################
+
functions:
+
"get project" :
command: git.get_project
params:
@@ -100,8 +122,15 @@ functions:
working_dir: "wiredtiger"
shell: bash
script: |
+ # Fetch the gperftools library.
+ if [[ "${posix_configure_flags|}" =~ (tcmalloc|TCMALLOC) ]]; then
+ git clone git@github.com:wiredtiger/automation-scripts.git
+ . automation-scripts/evergreen/find_gperftools.sh ${s3_access_key} ${s3_secret_key} ${build_variant} ${is_cmake_build|false}
+ fi
+
set -o errexit
set -o verbose
+
# Check if the build variant has specified a build type, always default to
# Autoconf/Libtool if $is_cmake_build is not declared.
if [ ${is_cmake_build|false} = true ]; then
@@ -291,11 +320,18 @@ functions:
done
- command: shell.exec
params:
- working_dir: "wiredtiger.github.com"
shell: bash
silent: true
script: |
set -o errexit
+
+ # We could have exited the previous command for the same reason.
+ if [[ "${branch_name}" != "develop" ]]; then
+ echo "We only run the documentation update task on the WiredTiger (develop) Evergreen project."
+ exit 0
+ fi
+
+ cd wiredtiger.github.com
git push https://"${doc-update-github-token}"@github.com/wiredtiger/wiredtiger.github.com
"make check directory":
@@ -583,15 +619,18 @@ functions:
display_name: "Test results (JSON)"
remote_file: wiredtiger/${build_variant}/${revision}/perf-test-${perf-test-name}-${build_id}-${execution}/test-results.json
+#######################################
+# Variables #
+#######################################
+
+variables:
+
#########################################################################################
-# VARIABLES
-#
# The following stress tests are configured to run for six hours via the "-t 360"
# argument to format.sh: format-stress-test, format-stress-sanitizer-test, and
# race-condition-stress-sanitizer-test. The smoke and recovery tests run in a loop,
# with the number of runs adjusted to provide aproximately six hours of testing.
#########################################################################################
-variables:
- &format-stress-test
exec_timeout_secs: 25200
@@ -665,19 +704,12 @@ variables:
vars:
times: 25
-pre:
- - func: "cleanup"
-post:
- - func: "upload artifact"
- vars:
- postfix: -${execution}
- - func: "save wt hang analyzer core/debugger files"
- - func: "dump stderr/stdout"
- - func: "cleanup"
-timeout:
- - func: "run wt hang analyzer"
+#######################################
+# Tasks #
+#######################################
tasks:
+
# Base compile task on posix flavours
- name: compile
tags: ["pull_request"]
@@ -2154,6 +2186,7 @@ tasks:
- name: doc-update
patchable: false
+ stepback: false
commands:
- func: "get project"
- func: "compile wiredtiger docs"
@@ -2488,7 +2521,7 @@ tasks:
script: |
set -o errexit
set -o verbose
- ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args}" WT_TEST.$t' 120
+ ${test_env_vars|} ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args}" WT_TEST.$t' 120
- name: data-validation-stress-test-checkpoint-fp-hs-insert-s1
depends_on:
@@ -2502,7 +2535,7 @@ tasks:
script: |
set -o errexit
set -o verbose
- ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 1" WT_TEST.$t' 120
+ ${test_env_vars|} ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 1" WT_TEST.$t' 120
- name: data-validation-stress-test-checkpoint-fp-hs-insert-s2
depends_on:
@@ -2516,7 +2549,7 @@ tasks:
script: |
set -o errexit
set -o verbose
- ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 2" WT_TEST.$t' 120
+ ${test_env_vars|} ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 2" WT_TEST.$t' 120
- name: data-validation-stress-test-checkpoint-fp-hs-insert-s3
depends_on:
@@ -2530,7 +2563,7 @@ tasks:
script: |
set -o errexit
set -o verbose
- ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 3" WT_TEST.$t' 120
+ ${test_env_vars|} ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 3" WT_TEST.$t' 120
- name: data-validation-stress-test-checkpoint-fp-hs-insert-s4
depends_on:
@@ -2544,7 +2577,7 @@ tasks:
script: |
set -o errexit
set -o verbose
- ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 4" WT_TEST.$t' 120
+ ${test_env_vars|} ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 4" WT_TEST.$t' 120
- name: data-validation-stress-test-checkpoint-fp-hs-insert-s5
depends_on:
@@ -2558,7 +2591,7 @@ tasks:
script: |
set -o errexit
set -o verbose
- ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 5" WT_TEST.$t' 120
+ ${test_env_vars|} ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 5" WT_TEST.$t' 120
- name: data-validation-stress-test-checkpoint-fp-hs-insert-s6
depends_on:
@@ -2572,7 +2605,7 @@ tasks:
script: |
set -o errexit
set -o verbose
- ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 6" WT_TEST.$t' 120
+ ${test_env_vars|} ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 6" WT_TEST.$t' 120
- name: data-validation-stress-test-checkpoint-fp-hs-insert-s7
depends_on:
@@ -2586,7 +2619,7 @@ tasks:
script: |
set -o errexit
set -o verbose
- ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 7" WT_TEST.$t' 120
+ ${test_env_vars|} ../../../tools/run_parallel.sh 'nice ../../../test/checkpoint/recovery-test.sh "${data_validation_stress_test_args} -s 7" WT_TEST.$t' 120
- name: format-failure-configs-test
depends_on:
@@ -2600,7 +2633,7 @@ tasks:
set -o errexit
set -o verbose
- ./run_format_configs.sh
+ ${test_env_vars|} ./run_format_configs.sh
- name: static-wt-build-test
commands:
@@ -2954,6 +2987,30 @@ tasks:
vars:
perf-test-name: medium-multi-lsm
+ - name: perf-test-parallel-pop-lsm
+ commands:
+ - func: "get project"
+ - func: "compile wiredtiger"
+ - func: "generic-perf-test"
+ vars:
+ perf-test-name: parallel-pop-lsm
+ maxruns: 1
+ - func: "generic-perf-test-push-results"
+ vars:
+ perf-test-name: parallel-pop-lsm
+
+ - name: perf-test-update-lsm
+ commands:
+ - func: "get project"
+ - func: "compile wiredtiger"
+ - func: "generic-perf-test"
+ vars:
+ perf-test-name: update-lsm
+ maxruns: 1
+ - func: "generic-perf-test-push-results"
+ vars:
+ perf-test-name: update-lsm
+
###############################
# Performance Tests for btree #
###############################
@@ -3006,6 +3063,9 @@ tasks:
vars:
perf-test-name: medium-btree-backup
+#######################################
+# Buildvariants #
+#######################################
buildvariants:
@@ -3014,9 +3074,23 @@ buildvariants:
run_on:
- ubuntu2004-test
expansions:
- test_env_vars: LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.libs top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ test_env_vars:
+ LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs:$top_dir/TCMALLOC_LIB/lib
smp_command: -j $(echo "`grep -c ^processor /proc/cpuinfo` * 2" | bc)
- posix_configure_flags: --enable-silent-rules --enable-diagnostic --enable-python --enable-zlib --enable-snappy --enable-strict --enable-static --prefix=$(pwd)/LOCAL_INSTALL
+ posix_configure_flags:
+ --enable-silent-rules
+ --enable-diagnostic
+ --enable-python
+ --enable-zlib
+ --enable-snappy
+ --enable-strict
+ --enable-static
+ --enable-tcmalloc
+ --prefix=$(pwd)/LOCAL_INSTALL
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
data_validation_stress_test_args: -t r -m -W 3 -D -p -x -n 100000 -k 100000 -C cache_size=100MB
@@ -3053,8 +3127,21 @@ buildvariants:
run_on:
- ubuntu2004-test
expansions:
- test_env_vars: LD_LIBRARY_PATH=$(pwd) WT_BUILDDIR=$(pwd)
- posix_configure_flags: -DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/mongodbtoolchain_v3_gcc.cmake -DCMAKE_C_FLAGS="-ggdb" -DHAVE_DIAGNOSTIC=1 -DENABLE_PYTHON=1 -DENABLE_ZLIB=1 -DENABLE_SNAPPY=1 -DENABLE_STRICT=1 -DCMAKE_INSTALL_PREFIX=$(pwd)/LOCAL_INSTALL
+ test_env_vars:
+ WT_TOPDIR=$(git rev-parse --show-toplevel)
+ WT_BUILDDIR=$WT_TOPDIR/cmake_build
+ LD_LIBRARY_PATH=$WT_BUILDDIR:$WT_TOPDIR/TCMALLOC_LIB/lib
+ posix_configure_flags:
+ -DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/mongodbtoolchain_v3_gcc.cmake
+ -DCMAKE_C_FLAGS="-ggdb"
+ -DHAVE_DIAGNOSTIC=1
+ -DENABLE_PYTHON=1
+ -DENABLE_ZLIB=1
+ -DENABLE_SNAPPY=1
+ -DENABLE_STRICT=1
+ -DENABLE_TCMALLOC=1
+ -DCMAKE_PREFIX_PATH="$(pwd)/../TCMALLOC_LIB"
+ -DCMAKE_INSTALL_PREFIX=$(pwd)/LOCAL_INSTALL
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
smp_command: -j $(echo "`grep -c ^processor /proc/cpuinfo` * 2" | bc)
cmake_generator: Ninja
@@ -3076,14 +3163,23 @@ buildvariants:
PATH=/opt/mongodbtoolchain/v3/bin:$PATH
CFLAGS="-fsanitize=address -fno-omit-frame-pointer -ggdb"
CXXFLAGS="-fsanitize=address -fno-omit-frame-pointer -ggdb"
- posix_configure_flags: --enable-silent-rules --enable-strict --enable-diagnostic --disable-static --prefix=$(pwd)/LOCAL_INSTALL
+ posix_configure_flags:
+ --enable-silent-rules
+ --enable-strict
+ --enable-diagnostic
+ --disable-static
+ --prefix=$(pwd)/LOCAL_INSTALL
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
test_env_vars:
ASAN_OPTIONS="detect_leaks=1:abort_on_error=1:disable_coredump=0"
ASAN_SYMBOLIZER_PATH=/opt/mongodbtoolchain/v3/bin/llvm-symbolizer
TESTUTIL_BYPASS_ASAN=1
- LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.libs top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs
tasks:
- name: ".pull_request !.windows_only !.pull_request_compilers !.python"
- name: examples-c-test
@@ -3099,15 +3195,21 @@ buildvariants:
PATH=/opt/mongodbtoolchain/v3/bin:$PATH
CFLAGS="-fsanitize=memory -fno-omit-frame-pointer -fno-optimize-sibling-calls -O1 -ggdb"
posix_configure_flags:
- --enable-silent-rules --enable-strict --enable-diagnostic
- --disable-static --prefix=$(pwd)/LOCAL_INSTALL
+ --enable-silent-rules
+ --enable-strict
+ --enable-diagnostic
+ --disable-static
+ --prefix=$(pwd)/LOCAL_INSTALL
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
test_env_vars:
MSAN_OPTIONS="abort_on_error=1:disable_coredump=0:print_stacktrace=1"
MSAN_SYMBOLIZER_PATH=/opt/mongodbtoolchain/v3/bin/llvm-symbolizer
- LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so LD_LIBRARY_PATH=$(pwd)/.libs
- PATH=/opt/mongodbtoolchain/v3/bin:$PATH top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs
TESTUTIL_SLOW_MACHINE=1
tasks:
- name: clang-analyzer
@@ -3131,14 +3233,21 @@ buildvariants:
CFLAGS="-fsanitize=undefined -fno-omit-frame-pointer -fno-optimize-sibling-calls -O1 -ggdb"
CXXFLAGS="-fsanitize=undefined -fno-omit-frame-pointer -fno-optimize-sibling-calls -O1 -ggdb"
posix_configure_flags:
- --enable-silent-rules --enable-strict --enable-diagnostic
- --disable-static --prefix=$(pwd)/LOCAL_INSTALL
+ --enable-silent-rules
+ --enable-strict
+ --enable-diagnostic
+ --disable-static
+ --enable-tcmalloc
+ --prefix=$(pwd)/LOCAL_INSTALL
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
test_env_vars:
UBSAN_OPTIONS="detect_leaks=1:disable_coredump=0:external_symbolizer_path=/opt/mongodbtoolchain/v3/bin/llvm-symbolizer:halt_on_error=1:print_stacktrace=1"
- LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so LD_LIBRARY_PATH=$(pwd)/.libs
- PATH=/opt/mongodbtoolchain/v3/bin:$PATH top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs:$top_dir/TCMALLOC_LIB/lib
tasks:
- name: clang-analyzer
- name: compile
@@ -3157,7 +3266,16 @@ buildvariants:
run_on:
- ubuntu2004-wt-build
expansions:
- posix_configure_flags: --enable-silent-rules --enable-diagnostic --enable-strict --enable-lz4 --enable-snappy --enable-zlib --enable-zstd --enable-python
+ posix_configure_flags:
+ --enable-silent-rules
+ --enable-diagnostic
+ --enable-strict
+ --enable-lz4
+ --enable-snappy
+ --enable-zlib
+ --enable-zstd
+ --enable-python
+ --enable-tcmalloc
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
@@ -3172,11 +3290,21 @@ buildvariants:
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
test_env_vars:
- LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so LD_LIBRARY_PATH=$(pwd)/.libs
- PATH=/opt/mongodbtoolchain/v3/bin:$PATH top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs:$top_dir/TCMALLOC_LIB/lib
posix_configure_flags:
- --enable-silent-rules --enable-diagnostic --enable-python --enable-zlib --enable-snappy
- --enable-strict --enable-static --prefix=$(pwd)/LOCAL_INSTALL
+ --enable-silent-rules
+ --enable-diagnostic
+ --enable-python
+ --enable-zlib
+ --enable-snappy
+ --enable-strict
+ --enable-static
+ --enable-tcmalloc
+ --prefix=$(pwd)/LOCAL_INSTALL
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
tasks:
- name: ".stress-test-1"
@@ -3210,6 +3338,8 @@ buildvariants:
- name: perf-test-medium-lsm
- name: perf-test-medium-lsm-compact
- name: perf-test-medium-multi-lsm
+ - name: perf-test-parallel-pop-lsm
+ - name: perf-test-update-lsm
- name: large-scale-tests
@@ -3220,8 +3350,14 @@ buildvariants:
expansions:
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
posix_configure_flags:
- --enable-silent-rules --enable-python --enable-zlib --enable-snappy
- --enable-strict --enable-static --prefix=$(pwd)/LOCAL_INSTALL
+ --enable-silent-rules
+ --enable-python
+ --enable-zlib
+ --enable-snappy
+ --enable-strict
+ --enable-static
+ --enable-tcmalloc
+ --prefix=$(pwd)/LOCAL_INSTALL
test_env_vars:
PATH=/opt/mongodbtoolchain/v3/bin:$PATH
upload_source_dir: mongo-tests/largescale/many-collection/dbpath/diagnostic.data
@@ -3237,11 +3373,21 @@ buildvariants:
run_on:
- ubuntu2004-test
expansions:
- test_env_vars: LD_LIBRARY_PATH=$(pwd)/../../.libs PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ test_env_vars:
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs:$top_dir/TCMALLOC_LIB/lib
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
posix_configure_flags:
- --enable-diagnostic --enable-python --enable-silent-rules --enable-snappy --enable-static
- --enable-strict --enable-zlib
+ --enable-diagnostic
+ --enable-python
+ --enable-silent-rules
+ --enable-snappy
+ --enable-strict
+ --enable-static
+ --enable-tcmalloc
+ --enable-zlib
tasks:
- name: compile
- name: cppsuite-hs-cleanup-stress
@@ -3272,7 +3418,12 @@ buildvariants:
run_on:
- ubuntu2004-test
expansions:
- test_env_vars: LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.libs top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ test_env_vars:
+ LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
@@ -3287,7 +3438,12 @@ buildvariants:
run_on:
- rhel80-test
expansions:
- test_env_vars: LD_PRELOAD=/usr/local/lib/libeatmydata.so PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.libs top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ test_env_vars:
+ LD_PRELOAD=/usr/local/lib/libeatmydata.so
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
@@ -3318,7 +3474,12 @@ buildvariants:
run_on:
- ubuntu2004-test
expansions:
- test_env_vars: LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.libs top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ test_env_vars:
+ LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libeatmydata.so
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs
tasks:
- name: coverage-report
- name: cyclomatic-complexity
@@ -3360,7 +3521,7 @@ buildvariants:
expansions:
python_binary: 'python'
is_cmake_build: true
- test_env_vars: WT_BUILDDIR=$(pwd)
+ test_env_vars: WT_BUILDDIR=$(git rev-parse --show-toplevel)/cmake_build
windows_configure_flags: -vcvars_bat "'C:\Program Files (x86)\Microsoft Visual Studio\2017\Professional\VC\Auxiliary\Build\vcvars64.bat'"
tasks:
- name: compile
@@ -3371,13 +3532,25 @@ buildvariants:
display_name: OS X 10.14
run_on:
- macos-1014
+ batchtime: 120 # 2 hours
expansions:
configure_env_vars: PATH=/opt/mongodbtoolchain/v3/bin:$PATH ADD_CFLAGS="-ggdb -fPIC"
- posix_configure_flags: --enable-silent-rules --enable-diagnostic --enable-python --enable-zlib --enable-strict --enable-static --prefix=$(pwd)/LOCAL_INSTALL
+ posix_configure_flags:
+ --enable-silent-rules
+ --enable-diagnostic
+ --enable-python
+ --enable-zlib
+ --enable-strict
+ --enable-static
+ --prefix=$(pwd)/LOCAL_INSTALL
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
smp_command: -j $(sysctl -n hw.logicalcpu)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH ARCHFLAGS=-Wno-error=unused-command-line-argument-hard-error-in-future make
- test_env_vars: PATH=/opt/mongodbtoolchain/v3/bin:$PATH DYLD_LIBRARY_PATH=$(pwd)/.libs top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ test_env_vars:
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ DYLD_LIBRARY_PATH=$top_builddir/.libs
tasks:
- name: compile
- name: make-check-test
@@ -3388,13 +3561,22 @@ buildvariants:
display_name: "* OS X 10.14 CMake"
run_on:
- macos-1014
+ batchtime: 120 # 2 hours
expansions:
- posix_configure_flags: -DCMAKE_C_FLAGS="-ggdb" -DHAVE_DIAGNOSTIC=1 -DENABLE_PYTHON=1 -DENABLE_ZLIB=1 -DENABLE_STRICT=1 -DCMAKE_INSTALL_PREFIX=$(pwd)/LOCAL_INSTALL
+ posix_configure_flags:
+ -DCMAKE_C_FLAGS="-ggdb"
+ -DHAVE_DIAGNOSTIC=1
+ -DENABLE_PYTHON=1
+ -DENABLE_ZLIB=1
+ -DENABLE_STRICT=1
+ -DCMAKE_INSTALL_PREFIX=$(pwd)/LOCAL_INSTALL
python_binary: 'python3'
smp_command: -j $(sysctl -n hw.logicalcpu)
cmake_generator: "Unix Makefiles"
make_command: make
- test_env_vars: DYLD_LIBRARY_PATH=$(pwd) WT_BUILDDIR=$(pwd)
+ test_env_vars:
+ WT_BUILDDIR=$(git rev-parse --show-toplevel)/cmake_build
+ DYLD_LIBRARY_PATH=$WT_BUILDDIR
is_cmake_build: true
tasks:
- name: compile
@@ -3409,7 +3591,11 @@ buildvariants:
expansions:
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
- test_env_vars: PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.libs top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ test_env_vars:
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs
tasks:
- name: compile
- name: generate-datafile-little-endian
@@ -3426,7 +3612,11 @@ buildvariants:
expansions:
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
- test_env_vars: PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.lib top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ test_env_vars:
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.lib
tasks:
- name: compile
- name: generate-datafile-big-endian
@@ -3437,16 +3627,25 @@ buildvariants:
display_name: "~ Ubuntu 18.04 PPC"
run_on:
- ubuntu1804-power8-test
+ batchtime: 120 # 2 hours
expansions:
format_test_setting: ulimit -c unlimited
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
test_env_vars:
- PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.libs
- top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs
posix_configure_flags:
- --enable-silent-rules --enable-diagnostic --enable-python --enable-zlib --enable-snappy
- --enable-strict --enable-static --prefix=$(pwd)/LOCAL_INSTALL
+ --enable-silent-rules
+ --enable-diagnostic
+ --enable-python
+ --enable-zlib
+ --enable-snappy
+ --enable-strict
+ --enable-static
+ --prefix=$(pwd)/LOCAL_INSTALL
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
tasks:
- name: compile
@@ -3461,8 +3660,18 @@ buildvariants:
- ubuntu1804-power8-test
batchtime: 10080 # 7 days
expansions:
- test_env_vars: LD_LIBRARY_PATH=$(pwd) WT_BUILDDIR=$(pwd)
- posix_configure_flags: -DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/mongodbtoolchain_v3_gcc.cmake -DCMAKE_C_FLAGS="-ggdb" -DHAVE_DIAGNOSTIC=1 -DENABLE_PYTHON=1 -DENABLE_ZLIB=1 -DENABLE_SNAPPY=1 -DENABLE_STRICT=1 -DCMAKE_INSTALL_PREFIX=$(pwd)/LOCAL_INSTALL
+ test_env_vars:
+ WT_BUILDDIR=$(git rev-parse --show-toplevel)/cmake_build
+ LD_LIBRARY_PATH=$WT_BUILDDIR
+ posix_configure_flags:
+ -DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/mongodbtoolchain_v3_gcc.cmake
+ -DCMAKE_C_FLAGS="-ggdb"
+ -DHAVE_DIAGNOSTIC=1
+ -DENABLE_PYTHON=1
+ -DENABLE_ZLIB=1
+ -DENABLE_SNAPPY=1
+ -DENABLE_STRICT=1
+ -DCMAKE_INSTALL_PREFIX=$(pwd)/LOCAL_INSTALL
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
cmake_generator: Ninja
@@ -3477,15 +3686,24 @@ buildvariants:
display_name: "~ Ubuntu 18.04 zSeries"
run_on:
- ubuntu1804-zseries-test
+ batchtime: 120 # 2 hours
expansions:
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
make_command: PATH=/opt/mongodbtoolchain/v3/bin:$PATH make
test_env_vars:
- PATH=/opt/mongodbtoolchain/v3/bin:$PATH LD_LIBRARY_PATH=$(pwd)/.libs:$(pwd)/lang/python
- top_srcdir=$(pwd)/.. top_builddir=$(pwd)
+ PATH=/opt/mongodbtoolchain/v3/bin:$PATH
+ top_dir=$(git rev-parse --show-toplevel)
+ top_builddir=$top_dir/build_posix
+ LD_LIBRARY_PATH=$top_builddir/.libs
posix_configure_flags:
- --enable-silent-rules --enable-diagnostic --enable-python --enable-zlib --enable-snappy
- --enable-strict --enable-static --prefix=$(pwd)/LOCAL_INSTALL
+ --enable-silent-rules
+ --enable-diagnostic
+ --enable-python
+ --enable-zlib
+ --enable-snappy
+ --enable-strict
+ --enable-static
+ --prefix=$(pwd)/LOCAL_INSTALL
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
tasks:
- name: compile
@@ -3500,8 +3718,18 @@ buildvariants:
- ubuntu1804-zseries-test
batchtime: 10080 # 7 days
expansions:
- test_env_vars: LD_LIBRARY_PATH=$(pwd) WT_BUILDDIR=$(pwd)
- posix_configure_flags: -DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/mongodbtoolchain_v3_gcc.cmake -DCMAKE_C_FLAGS="-ggdb" -DHAVE_DIAGNOSTIC=1 -DENABLE_PYTHON=1 -DENABLE_ZLIB=1 -DENABLE_SNAPPY=1 -DENABLE_STRICT=1 -DCMAKE_INSTALL_PREFIX=$(pwd)/LOCAL_INSTALL
+ test_env_vars:
+ WT_BUILDDIR=$(git rev-parse --show-toplevel)/cmake_build
+ LD_LIBRARY_PATH=$WT_BUILDDIR
+ posix_configure_flags:
+ -DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/mongodbtoolchain_v3_gcc.cmake
+ -DCMAKE_C_FLAGS="-ggdb"
+ -DHAVE_DIAGNOSTIC=1
+ -DENABLE_PYTHON=1
+ -DENABLE_ZLIB=1
+ -DENABLE_SNAPPY=1
+ -DENABLE_STRICT=1
+ -DCMAKE_INSTALL_PREFIX=$(pwd)/LOCAL_INSTALL
python_binary: '/opt/mongodbtoolchain/v3/bin/python3'
smp_command: -j $(grep -c ^processor /proc/cpuinfo)
cmake_generator: Ninja
diff --git a/src/third_party/wiredtiger/test/suite/test_cursor17.py b/src/third_party/wiredtiger/test/suite/test_cursor17.py
index 322b82a65f8..becb6fcd5d3 100644
--- a/src/third_party/wiredtiger/test/suite/test_cursor17.py
+++ b/src/third_party/wiredtiger/test/suite/test_cursor17.py
@@ -118,32 +118,47 @@ class test_cursor17(wttest.WiredTigerTestCase):
session2 = self.setUpSessionOpen(self.conn)
cursor2 = session2.open_cursor(self.type + self.tablename, None)
session2.begin_transaction()
- cursor2[101] = self.ds.value(101)
+ cursor2[200] = self.ds.value(200)
cursor = self.session.open_cursor(self.type + self.tablename, None)
# Verify the largest key.
self.session.begin_transaction()
self.assertEqual(cursor.largest_key(), 0)
- self.assertEqual(cursor.get_key(), 101)
+ self.assertEqual(cursor.get_key(), 200)
self.session.rollback_transaction()
session2.rollback_transaction()
+
+ def test_invisible_timestamp(self):
+ self.populate(100)
- def test_read_timestamp(self):
+ cursor = self.session.open_cursor(self.type + self.tablename, None)
+ self.session.begin_transaction()
+ cursor[200] = self.ds.value(200)
+ self.session.commit_transaction("commit_timestamp=" + self.timestamp_str(10))
+
+ # Verify the largest key.
+ self.session.begin_transaction("read_timestamp=" + self.timestamp_str(5))
+ self.assertEqual(cursor.largest_key(), 0)
+ self.assertEqual(cursor.get_key(), 200)
+ self.session.rollback_transaction()
+
+ def test_prepared_update(self):
self.populate(100)
+ session2 = self.setUpSessionOpen(self.conn)
+ cursor2 = session2.open_cursor(self.type + self.tablename, None)
+ session2.begin_transaction()
+ cursor2[200] = self.ds.value(200)
+ session2.prepare_transaction("prepare_timestamp=" + self.timestamp_str(10))
+
cursor = self.session.open_cursor(self.type + self.tablename, None)
- self.session.begin_transaction('read_timestamp=' + self.timestamp_str(5))
- # Expect the largest key to throw.
- with self.expectedStderrPattern("largest key cannot be called with a read timestamp"):
- try:
- cursor.largest_key()
- except wiredtiger.WiredTigerError as e:
- gotException = True
- self.pr('got expected exception: ' + str(e))
- self.assertTrue(str(e).find('nvalid argument') >= 0)
- self.assertTrue(gotException, msg = 'expected exception')
+
+ # Verify the largest key.
+ self.session.begin_transaction("read_timestamp=" + self.timestamp_str(20))
+ self.assertEqual(cursor.largest_key(), 0)
+ self.assertEqual(cursor.get_key(), 200)
self.session.rollback_transaction()
def test_not_positioned(self):
diff --git a/src/third_party/wiredtiger/test/suite/test_hs28.py b/src/third_party/wiredtiger/test/suite/test_hs28.py
new file mode 100644
index 00000000000..3768262e86e
--- /dev/null
+++ b/src/third_party/wiredtiger/test/suite/test_hs28.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+#
+# Public Domain 2014-present MongoDB, Inc.
+# Public Domain 2008-2014 WiredTiger, Inc.
+#
+# This is free and unencumbered software released into the public domain.
+#
+# Anyone is free to copy, modify, publish, use, compile, sell, or
+# distribute this software, either in source code form or as a compiled
+# binary, for any purpose, commercial or non-commercial, and by any
+# means.
+#
+# In jurisdictions that recognize copyright laws, the author or authors
+# of this software dedicate any and all copyright interest in the
+# software to the public domain. We make this dedication for the benefit
+# of the public at large and to the detriment of our heirs and
+# successors. We intend this dedication to be an overt act of
+# relinquishment in perpetuity of all present and future rights to this
+# software under copyright law.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+
+import wiredtiger, wttest
+from wtscenario import make_scenarios
+
+# test_hs28.py
+# Test that we insert a full update instead of a reverse modify to the
+# history store if a modify follows a squashed on page value.
+
+class test_hs28(wttest.WiredTigerTestCase):
+ conn_config = ''
+ session_config = 'isolation=snapshot'
+
+ key_format_values = [
+ ('column', dict(key_format='r')),
+ ('integer_row', dict(key_format='i')),
+ ]
+
+ scenarios = make_scenarios(key_format_values)
+
+ def conn_config(self):
+ config = 'cache_size=50MB,statistics=(all),statistics_log=(json,on_close,wait=1)'
+ return config
+
+ def test_insert_hs_full_update(self):
+ uri = 'table:test_hs28'
+ self.session.create(uri, 'key_format={},value_format=S'.format(self.key_format))
+
+ value1 = "a"
+ value2 = "b"
+
+ cursor = self.session.open_cursor(uri)
+ # Insert a full value
+ self.session.begin_transaction()
+ cursor[1] = value1
+ self.session.commit_transaction('commit_timestamp=' + self.timestamp_str(2))
+
+ # Do a modify update
+ self.session.begin_transaction()
+ cursor.set_key(1)
+ mods = [wiredtiger.Modify('A', 0, 1)]
+ self.assertEqual(cursor.modify(mods), 0)
+ self.session.commit_transaction('commit_timestamp=' + self.timestamp_str(5))
+
+ # Commit a transaction with multiple updates on the same key
+ self.session.begin_transaction()
+ cursor[1] = value1
+ cursor[1] = value2
+ self.session.commit_transaction('commit_timestamp=' + self.timestamp_str(10))
+
+ # Move the updates to the history store
+ self.session.checkpoint()
+
+ stat_cursor = self.session.open_cursor('statistics:', None, None)
+ hs_full_update = stat_cursor[wiredtiger.stat.conn.cache_hs_insert_full_update][2]
+ hs_reverse_modify = stat_cursor[wiredtiger.stat.conn.cache_hs_insert_reverse_modify][2]
+ stat_cursor.close()
+
+ self.assertEqual(hs_full_update, 2)
+ self.assertEqual(hs_reverse_modify, 0)
diff --git a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable01.py b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable01.py
index e85de366970..5d1fe8029cd 100755
--- a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable01.py
+++ b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable01.py
@@ -154,6 +154,18 @@ class test_rollback_to_stable_base(wttest.WiredTigerTestCase):
self.assertEqual(count, nrows)
cursor.close()
+ def evict_cursor(self, uri, nrows, check_value):
+ # Configure debug behavior on a cursor to evict the page positioned on when the reset API is used.
+ evict_cursor = self.session.open_cursor(uri, None, "debug=(release_evict)")
+ self.session.begin_transaction("ignore_prepare=true")
+ for i in range (1, nrows + 1):
+ evict_cursor.set_key(i)
+ self.assertEqual(evict_cursor[i], check_value)
+ if i % 10 == 0:
+ evict_cursor.reset()
+ evict_cursor.close()
+ self.session.rollback_transaction()
+
# Test that rollback to stable clears the remove operation.
class test_rollback_to_stable01(test_rollback_to_stable_base):
session_config = 'isolation=snapshot'
diff --git a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable10.py b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable10.py
index 63989155146..cf4e43282da 100755
--- a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable10.py
+++ b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable10.py
@@ -26,13 +26,13 @@
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
-import fnmatch, os, shutil, threading, time
+import threading, time
from helper import copy_wiredtiger_home, simulate_crash_restart
from test_rollback_to_stable01 import test_rollback_to_stable_base
from wiredtiger import stat
from wtdataset import SimpleDataSet
from wtscenario import make_scenarios
-from wtthread import checkpoint_thread, op_thread
+from wtthread import checkpoint_thread
# test_rollback_to_stable10.py
# Test the rollback to stable operation performs sweeping history store.
@@ -52,7 +52,7 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
scenarios = make_scenarios(key_format_values, prepare_values)
def conn_config(self):
- config = 'cache_size=6MB,statistics=(all),statistics_log=(json,on_close,wait=1),log=(enabled=true),timing_stress_for_test=[history_store_checkpoint_delay]'
+ config = 'cache_size=25MB,statistics=(all),statistics_log=(json,on_close,wait=1),log=(enabled=true),timing_stress_for_test=[history_store_checkpoint_delay]'
return config
def test_rollback_to_stable(self):
@@ -117,6 +117,8 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
try:
self.pr("start checkpoint")
ckpt.start()
+ # Sleep for sometime so that checkpoint starts.
+ time.sleep(2)
# Perform several updates in parallel with checkpoint.
# Rollbacks may occur when checkpoint is running, so retry as needed.
@@ -125,10 +127,14 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
lambda: self.large_updates(uri_1, value_e, ds_1, nrows, self.prepare, 70))
self.retry_rollback('update ds2, e', None,
lambda: self.large_updates(uri_2, value_e, ds_2, nrows, self.prepare, 70))
+ self.evict_cursor(uri_1, nrows, value_e)
+ self.evict_cursor(uri_2, nrows, value_e)
self.retry_rollback('update ds1, f', None,
lambda: self.large_updates(uri_1, value_f, ds_1, nrows, self.prepare, 80))
self.retry_rollback('update ds2, f', None,
lambda: self.large_updates(uri_2, value_f, ds_2, nrows, self.prepare, 80))
+ self.evict_cursor(uri_1, nrows, value_f)
+ self.evict_cursor(uri_2, nrows, value_f)
finally:
done.set()
ckpt.join()
@@ -173,10 +179,6 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
def test_rollback_to_stable_prepare(self):
nrows = 1000
- # FIXME-WT-7250 This test fails because of cache stuck on Windows.
- if os.name == "nt":
- self.skipTest('rollback_to_stable10 prepare test skipped on Windows')
-
# Create a table without logging.
self.pr("create/populate tables")
uri_1 = "table:rollback_to_stable10_1"
@@ -199,7 +201,6 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
value_c = "ccccc" * 100
value_d = "ddddd" * 100
value_e = "eeeee" * 100
- value_f = "fffff" * 100
# Perform several updates.
self.pr("large updates")
@@ -216,13 +217,11 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
# Verify data is visible and correct.
self.check(value_d, uri_1, nrows, 20)
self.check(value_c, uri_1, nrows, 30)
- self.session.breakpoint()
self.check(value_b, uri_1, nrows, 40)
self.check(value_a, uri_1, nrows, 50)
self.check(value_d, uri_2, nrows, 20)
self.check(value_c, uri_2, nrows, 30)
- self.session.breakpoint()
self.check(value_b, uri_2, nrows, 40)
self.check(value_a, uri_2, nrows, 50)
@@ -250,6 +249,8 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
try:
self.pr("start checkpoint")
ckpt.start()
+ # Sleep for sometime so that checkpoint starts.
+ time.sleep(2)
# Perform several updates in parallel with checkpoint.
session_p1 = self.conn.open_session()
@@ -259,6 +260,7 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
lambda: prepare_range_updates(
session_p1, cursor_p1, ds_1, value_e, nrows,
'prepare_timestamp=' + self.timestamp_str(69)))
+ self.evict_cursor(uri_1, nrows, value_a)
# Perform several updates in parallel with checkpoint.
session_p2 = self.conn.open_session()
@@ -268,6 +270,7 @@ class test_rollback_to_stable10(test_rollback_to_stable_base):
lambda: prepare_range_updates(
session_p2, cursor_p2, ds_2, value_e, nrows,
'prepare_timestamp=' + self.timestamp_str(69)))
+ self.evict_cursor(uri_2, nrows, value_a)
finally:
done.set()
ckpt.join()
diff --git a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable14.py b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable14.py
index 25b625e1cfa..601302b0762 100755
--- a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable14.py
+++ b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable14.py
@@ -26,13 +26,13 @@
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
-import fnmatch, os, shutil, threading, time
+import threading, time
from helper import simulate_crash_restart
from test_rollback_to_stable01 import test_rollback_to_stable_base
from wiredtiger import stat
from wtdataset import SimpleDataSet
from wtscenario import make_scenarios
-from wtthread import checkpoint_thread, op_thread
+from wtthread import checkpoint_thread
def mod_val(value, char, location, nbytes=1):
return value[0:location] + char + value[location+nbytes:]
@@ -58,11 +58,11 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
scenarios = make_scenarios(key_format_values, prepare_values)
def conn_config(self):
- config = 'cache_size=8MB,statistics=(all),statistics_log=(json,on_close,wait=1),log=(enabled=true),timing_stress_for_test=[history_store_checkpoint_delay]'
+ config = 'cache_size=25MB,statistics=(all),statistics_log=(json,on_close,wait=1),log=(enabled=true),timing_stress_for_test=[history_store_checkpoint_delay]'
return config
def test_rollback_to_stable(self):
- nrows = 1500
+ nrows = 100
# Create a table without logging.
self.pr("create/populate table")
@@ -81,6 +81,10 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
value_modR = mod_val(value_modQ, 'R', 1)
value_modS = mod_val(value_modR, 'S', 2)
value_modT = mod_val(value_modS, 'T', 3)
+ value_modW = mod_val(value_modT, 'W', 4)
+ value_modX = mod_val(value_modW, 'X', 5)
+ value_modY = mod_val(value_modX, 'Y', 6)
+ value_modZ = mod_val(value_modY, 'Z', 7)
# Perform a combination of modifies and updates.
self.pr("large updates and modifies")
@@ -109,18 +113,24 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
try:
self.pr("start checkpoint")
ckpt.start()
+ # Sleep for sometime so that checkpoint starts.
+ time.sleep(2)
# Perform several modifies in parallel with checkpoint.
# Rollbacks may occur when checkpoint is running, so retry as needed.
self.pr("modifies")
self.retry_rollback('modify ds1, W', None,
lambda: self.large_modifies(uri, 'W', ds, 4, 1, nrows, self.prepare, 70))
+ self.evict_cursor(uri, nrows, value_modW)
self.retry_rollback('modify ds1, X', None,
lambda: self.large_modifies(uri, 'X', ds, 5, 1, nrows, self.prepare, 80))
+ self.evict_cursor(uri, nrows, value_modX)
self.retry_rollback('modify ds1, Y', None,
lambda: self.large_modifies(uri, 'Y', ds, 6, 1, nrows, self.prepare, 90))
+ self.evict_cursor(uri, nrows, value_modY)
self.retry_rollback('modify ds1, Z', None,
lambda: self.large_modifies(uri, 'Z', ds, 7, 1, nrows, self.prepare, 100))
+ self.evict_cursor(uri, nrows, value_modZ)
finally:
done.set()
ckpt.join()
@@ -163,7 +173,7 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
self.ignoreStdoutPatternIfExists("oldest pinned transaction ID rolled back for eviction")
def test_rollback_to_stable_same_ts(self):
- nrows = 1500
+ nrows = 100
# Create a table without logging.
self.pr("create/populate table")
@@ -182,6 +192,10 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
value_modR = mod_val(value_modQ, 'R', 1)
value_modS = mod_val(value_modR, 'S', 2)
value_modT = mod_val(value_modS, 'T', 3)
+ value_modW = mod_val(value_modT, 'W', 4)
+ value_modX = mod_val(value_modW, 'X', 5)
+ value_modY = mod_val(value_modX, 'Y', 6)
+ value_modZ = mod_val(value_modY, 'Z', 7)
# Perform a combination of modifies and updates.
self.pr("large updates and modifies")
@@ -210,18 +224,24 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
try:
self.pr("start checkpoint")
ckpt.start()
+ # Sleep for sometime so that checkpoint starts.
+ time.sleep(2)
# Perform several modifies in parallel with checkpoint.
# Rollbacks may occur when checkpoint is running, so retry as needed.
self.pr("modifies")
self.retry_rollback('modify ds1, W', None,
lambda: self.large_modifies(uri, 'W', ds, 4, 1, nrows, self.prepare, 70))
+ self.evict_cursor(uri, nrows, value_modW)
self.retry_rollback('modify ds1, X', None,
lambda: self.large_modifies(uri, 'X', ds, 5, 1, nrows, self.prepare, 80))
+ self.evict_cursor(uri, nrows, value_modX)
self.retry_rollback('modify ds1, Y', None,
lambda: self.large_modifies(uri, 'Y', ds, 6, 1, nrows, self.prepare, 90))
+ self.evict_cursor(uri, nrows, value_modY)
self.retry_rollback('modify ds1, Z', None,
lambda: self.large_modifies(uri, 'Z', ds, 7, 1, nrows, self.prepare, 100))
+ self.evict_cursor(uri, nrows, value_modZ)
finally:
done.set()
ckpt.join()
@@ -262,7 +282,7 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
self.ignoreStdoutPatternIfExists("oldest pinned transaction ID rolled back for eviction")
def test_rollback_to_stable_same_ts_append(self):
- nrows = 1500
+ nrows = 100
# Create a table without logging.
self.pr("create/populate table")
@@ -281,6 +301,10 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
value_modR = append_val(value_modQ, 'R')
value_modS = append_val(value_modR, 'S')
value_modT = append_val(value_modS, 'T')
+ value_modW = append_val(value_modT, 'W')
+ value_modX = append_val(value_modW, 'X')
+ value_modY = append_val(value_modX, 'Y')
+ value_modZ = append_val(value_modY, 'Z')
# Perform a combination of modifies and updates.
self.pr("large updates and modifies")
@@ -309,6 +333,8 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
try:
self.pr("start checkpoint")
ckpt.start()
+ # Sleep for sometime so that checkpoint starts.
+ time.sleep(2)
# Perform several modifies in parallel with checkpoint.
# Rollbacks may occur when checkpoint is running, so retry as needed.
@@ -316,11 +342,13 @@ class test_rollback_to_stable14(test_rollback_to_stable_base):
self.retry_rollback('modify ds1, W', None,
lambda: self.large_modifies(uri, 'W', ds, len(value_modT), 1, nrows, self.prepare, 70))
self.retry_rollback('modify ds1, X', None,
- lambda: self.large_modifies(uri, 'X', ds, len(value_modT) + 1, 1, nrows, self.prepare, 80))
+ lambda: self.large_modifies(uri, 'X', ds, len(value_modW), 1, nrows, self.prepare, 80))
+ self.evict_cursor(uri, nrows, value_modX)
self.retry_rollback('modify ds1, Y', None,
- lambda: self.large_modifies(uri, 'Y', ds, len(value_modT) + 2, 1, nrows, self.prepare, 90))
+ lambda: self.large_modifies(uri, 'Y', ds, len(value_modX), 1, nrows, self.prepare, 90))
self.retry_rollback('modify ds1, Z', None,
- lambda: self.large_modifies(uri, 'Z', ds, len(value_modT) + 3, 1, nrows, self.prepare, 100))
+ lambda: self.large_modifies(uri, 'Z', ds, len(value_modY), 1, nrows, self.prepare, 100))
+ self.evict_cursor(uri, nrows, value_modZ)
finally:
done.set()
ckpt.join()
diff --git a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable21.py b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable21.py
index 3a348d00dc0..5d1f01f43e5 100644
--- a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable21.py
+++ b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable21.py
@@ -245,5 +245,6 @@ class test_rollback_to_stable21(test_rollback_to_stable_base):
hs_restored_tombstone = stat_cursor[stat.conn.txn_rts_hs_restore_tombstones][2]
stat_cursor.close()
- self.assertGreater(hs_removed, 0)
- self.assertGreater(hs_restored_tombstone, 0)
+ # The udpate and delete operations are not inserted into the history store as they are not visible
+ self.assertEquals(hs_removed, 0)
+ self.assertEquals(hs_restored_tombstone, 0)
diff --git a/src/third_party/wiredtiger/test/suite/test_rollback_to_stable28.py b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable28.py
new file mode 100755
index 00000000000..50a8a8fb2fb
--- /dev/null
+++ b/src/third_party/wiredtiger/test/suite/test_rollback_to_stable28.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python
+#
+# Public Domain 2014-present MongoDB, Inc.
+# Public Domain 2008-2014 WiredTiger, Inc.
+#
+# This is free and unencumbered software released into the public domain.
+#
+# Anyone is free to copy, modify, publish, use, compile, sell, or
+# distribute this software, either in source code form or as a compiled
+# binary, for any purpose, commercial or non-commercial, and by any
+# means.
+#
+# In jurisdictions that recognize copyright laws, the author or authors
+# of this software dedicate any and all copyright interest in the
+# software to the public domain. We make this dedication for the benefit
+# of the public at large and to the detriment of our heirs and
+# successors. We intend this dedication to be an overt act of
+# relinquishment in perpetuity of all present and future rights to this
+# software under copyright law.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+
+import os, shutil, re
+from wiredtiger import stat
+import wttest
+from wtdataset import SimpleDataSet
+from helper import simulate_crash_restart
+from test_rollback_to_stable01 import test_rollback_to_stable_base
+
+# test_rollback_to_stable28.py
+# Test the debug mode setting for update_restore_evict during recovery.
+# Force update restore eviction, whenever we evict a page. We want to
+# perform this in recovery to ensure that all the in-memory images have
+# the proper write generation number and we don't end up reading stale
+# transaction ID's stored on the page.
+class test_rollback_to_stable28(test_rollback_to_stable_base):
+ conn_config = 'log=(enabled=true),statistics=(all)'
+ # Recovery connection config: The debug mode is only effective on high cache pressure as WiredTiger can potentially decide
+ # to do an update restore evict on a page when the cache pressure requirements are not met.
+ # This means setting eviction target low and cache size high.
+ conn_recon = ',eviction_updates_trigger=10,eviction_dirty_trigger=5,cache_size=10MB,' + \
+ 'debug_mode=(update_restore_evict=true),log=(recover=on)'
+
+ def parse_write_gen(self, uri):
+ meta_cursor = self.session.open_cursor('metadata:')
+ config = meta_cursor[uri]
+ meta_cursor.close()
+ # The search string will look like: 'run_write_gen=<num>'.
+ # Just reverse the string and take the digits from the back until we hit '='.
+ write_gen = re.search('write_gen=\d+', config)
+ run_write_gen = re.search('run_write_gen=\d+', config)
+ self.assertTrue(write_gen is not None)
+ write_gen_str = str()
+ run_write_gen_str = str()
+ for c in reversed(write_gen.group(0)):
+ if not c.isdigit():
+ self.assertEqual(c, '=')
+ break
+ write_gen_str = c + write_gen_str
+ for c in reversed(run_write_gen.group(0)):
+ if not c.isdigit():
+ self.assertEqual(c, '=')
+ break
+ run_write_gen_str = c + run_write_gen_str
+
+ return int(write_gen_str), int(run_write_gen_str)
+
+ def test_update_restore_evict_recovery(self):
+ uri = 'table:test_debug_mode10'
+ nrows = 10000
+
+ # Create our table.
+ ds = SimpleDataSet(self, uri, 0, key_format='i', value_format='S',config='log=(enabled=false)')
+ ds.populate()
+
+ value_a = 'a' * 500
+ value_b = 'b' * 500
+ value_c = 'c' * 500
+ value_d = 'd' * 500
+
+ # Perform several updates.
+ self.large_updates(uri, value_a, ds, nrows, False, 20)
+ self.large_updates(uri, value_b, ds, nrows, False, 30)
+ self.large_updates(uri, value_c, ds, nrows, False, 40)
+ # Verify data is visible and correct.
+ self.check(value_a, uri, nrows, 20)
+ self.check(value_b, uri, nrows, 30)
+ self.check(value_c, uri, nrows, 40)
+
+ # Pin the stable timestamp to 40. We will be validating the state of the data post-stable timestamp
+ # after we perform a recovery.
+ self.conn.set_timestamp('stable_timestamp=' + self.timestamp_str(40))
+
+ # Perform additional updates post-stable timestamp.
+ self.large_updates(uri, value_d, ds, nrows, False, 50)
+ self.large_updates(uri, value_a, ds, nrows, False, 60)
+ self.large_updates(uri, value_b, ds, nrows, False, 70)
+
+ # Verify additional updated data is visible and correct.
+ self.check(value_d, uri, nrows, 50)
+ self.check(value_a, uri, nrows, 60)
+ self.check(value_b, uri, nrows, 70)
+
+ # Checkpoint to ensure the data is flushed to disk.
+ self.session.checkpoint()
+
+ # Extract the most recent checkpoints write gen & run write gen. As we are still on a new DB connection,
+ # the run write gen should be 1 at this point, equal to the connection-wide base write gen.
+ # Since we checkpointed after a series of large writes/updates, the write gen of the pages should
+ # definitely be greater than 1.
+ checkpoint_write_gen, checkpoint_run_write_gen = self.parse_write_gen("file:test_debug_mode10.wt")
+ self.assertEqual(checkpoint_run_write_gen, 1)
+ self.assertGreater(checkpoint_write_gen, checkpoint_run_write_gen)
+
+ # Simulate a crash/restart, opening our new DB in recovery. As we open in recovery we want to additionally
+ # use the 'update_restore_evict' debug option to trigger update restore eviction.
+ self.conn_config = self.conn_config + self.conn_recon
+ simulate_crash_restart(self, ".", "RESTART")
+
+ # As we've created a new DB connection post-shutdown, the connection-wide
+ # base write gen should eventually initialise from the previous checkpoint's base 'write_gen' during the recovery process
+ # ('write_gen'+1). This should be reflected in the initialisation of the 'run_write_gen' field of the newest
+ # checkpoint post-recovery. As the recovery/rts process updates our pages, we'd also expect the latest checkpoint's
+ # 'write_gen' to again be greater than its 'run_write_gen'.
+ recovery_write_gen, recovery_run_write_gen = self.parse_write_gen("file:test_debug_mode10.wt")
+ self.assertGreater(recovery_run_write_gen, checkpoint_write_gen)
+ self.assertGreater(recovery_write_gen, recovery_run_write_gen)
+
+ # Read the statistics of pages that have been update restored (to check the mechanism was used).
+ stat_cursor = self.session.open_cursor('statistics:')
+ pages_update_restored = stat_cursor[stat.conn.cache_write_restore][2]
+ stat_cursor.close()
+ self.assertGreater(pages_update_restored, 0)
+
+ # Check that after recovery, we see the correct data with respect to our previous stable timestamp (40).
+ self.check(value_c, uri, nrows, 40)
+ self.check(value_c, uri, nrows, 50)
+ self.check(value_c, uri, nrows, 60)
+ self.check(value_c, uri, nrows, 70)
+ self.check(value_b, uri, nrows, 30)
+ self.check(value_a, uri, nrows, 20)
+ # Passing 0 results in opening a transaction with no read timestamp.
+ self.check(value_c, uri, nrows, 0)
diff --git a/src/third_party/wiredtiger/test/suite/wttest.py b/src/third_party/wiredtiger/test/suite/wttest.py
index 01c4f315f9c..07e5b0b6610 100755
--- a/src/third_party/wiredtiger/test/suite/wttest.py
+++ b/src/third_party/wiredtiger/test/suite/wttest.py
@@ -780,7 +780,12 @@ class WiredTigerTestCase(unittest.TestCase):
'test_file.test_file.test_funcname(scen1.scen2.scen3)'.
So transform '(', but remove final ')'.
"""
- return self.shortid().translate(str.maketrans('($[]/ ','______', ')'))
+ name = self.shortid().translate(str.maketrans('($[]/ ','______', ')'))
+
+ # On OS/X, we can get name conflicts if names differ by case. Upper
+ # case letters are uncommon in our python class and method names, so
+ # we lowercase them and prefix with '@', e.g. "AbC" -> "@ab@c".
+ return re.sub(r'[A-Z]', lambda x: '@' + x.group(0).lower(), name)
def className(self):
return self.__class__.__name__