From 4ae174dd53adaea999715ffbe19c435d685bc412 Mon Sep 17 00:00:00 2001 From: Jason Carey Date: Wed, 7 Feb 2018 15:21:47 -0500 Subject: SERVER-33158 Shrink LogicalSession refresh batches The batches created by the LogicalSessionCache can exceed the 16mb bson size limit for bson on the wire. This will cause the refresh step to fail, preventing logical sessions from ever being synced to the global collection. This happens because we don't explicitly size our batches (we were relying on the write_cmd item batch limit, rather than a byte limit). Previously the write_cmd batch limit had been 1000 items, which allowed for 16k per record. The new limit is 100k, which gives a 160 byte budget we can exceed with very large user names (as we sync the lsid + the user@db name). By forcing a new 10k limit on username sizes used with logical sessions we can then ensure that a lower 1k limit will always be safe. --- ...sh_logical_session_cache_with_long_usernames.js | 43 ++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 jstests/auth/refresh_logical_session_cache_with_long_usernames.js (limited to 'jstests/auth/refresh_logical_session_cache_with_long_usernames.js') diff --git a/jstests/auth/refresh_logical_session_cache_with_long_usernames.js b/jstests/auth/refresh_logical_session_cache_with_long_usernames.js new file mode 100644 index 00000000000..d9bbed5324f --- /dev/null +++ b/jstests/auth/refresh_logical_session_cache_with_long_usernames.js @@ -0,0 +1,43 @@ +// Verifies that we've fixed SERVER-33158 by creating large user lsid refresh records (via large +// usernames) + +(function() { + 'use strict'; + + const mongod = MongoRunner.runMongod({auth: ""}); + + const refresh = {refreshLogicalSessionCacheNow: 1}; + const startSession = {startSession: 1}; + + const admin = mongod.getDB('admin'); + const db = mongod.getDB("test"); + const config = mongod.getDB("config"); + + admin.createUser({user: 'admin', pwd: 'pass', roles: jsTest.adminUserRoles}); + assert(admin.auth('admin', 'pass')); + + const longUserName = "x".repeat(1000); + + // Create a user with a long name, so that the refresh records have a chance to blow out the + // 16MB limit, if all the sessions are flushed in one batch + db.createUser({user: longUserName, pwd: 'pass', roles: jsTest.basicUserRoles}); + admin.logout(); + + assert(db.auth(longUserName, 'pass')); + + // 20k * 1k = 20mb which is greater than 16mb + const numSessions = 20000; + for (var i = 0; i < numSessions; i++) { + assert.commandWorked(admin.runCommand(startSession), "unable to start session"); + } + + assert.commandWorked(admin.runCommand(refresh), "failed to refresh"); + + // Make sure we actually flushed the sessions + assert.eq(numSessions, + config.system.sessions.aggregate([{'$listSessions': {}}, {'$count': "count"}]) + .next() + .count); + + MongoRunner.stopMongod(mongod); +})(); -- cgit v1.2.1