summaryrefslogtreecommitdiff
path: root/items.c
diff options
context:
space:
mode:
authordormando <dormando@rydia.net>2017-06-04 15:58:37 -0700
committerdormando <dormando@rydia.net>2017-06-04 15:58:37 -0700
commite01bc5cca551845ea139aceefc484f303901cdc2 (patch)
treeca52d951081354220ebfd6856587a146bfab63ab /items.c
parent1a8373c9b2eb3fc23cdd160c0a57728dd1c25497 (diff)
downloadmemcached-e01bc5cca551845ea139aceefc484f303901cdc2.tar.gz
lru_crawler avoid-infinite-runs1.4.37
under enough set pressure some slab classes may never complete scanning, as there's always something new at the top. this is a quick workaround for the internal scanner. always use a limit seeded at the size of the largest class. smaller classes will simply finish sooner. needs a better fix for the user-based commands. change of the API would allow for per-crawler tocrawl values.
Diffstat (limited to 'items.c')
-rw-r--r--items.c11
1 files changed, 10 insertions, 1 deletions
diff --git a/items.c b/items.c
index f3db9e8..b21bf46 100644
--- a/items.c
+++ b/items.c
@@ -1370,6 +1370,7 @@ static void lru_maintainer_crawler_check(struct crawler_expired_data *cdata, log
uint8_t todo[POWER_LARGEST];
memset(todo, 0, sizeof(uint8_t) * POWER_LARGEST);
bool do_run = false;
+ unsigned int tocrawl_limit = 0;
// TODO: If not segmented LRU, skip non-cold
for (i = POWER_SMALLEST; i < POWER_LARGEST; i++) {
@@ -1438,13 +1439,21 @@ static void lru_maintainer_crawler_check(struct crawler_expired_data *cdata, log
pthread_mutex_unlock(&cdata->lock);
}
if (current_time > next_crawls[i]) {
+ pthread_mutex_lock(&lru_locks[i]);
+ if (sizes[i] > tocrawl_limit) {
+ tocrawl_limit = sizes[i];
+ }
+ pthread_mutex_unlock(&lru_locks[i]);
todo[i] = 1;
do_run = true;
next_crawls[i] = current_time + 5; // minimum retry wait.
}
}
if (do_run) {
- lru_crawler_start(todo, settings.lru_crawler_tocrawl, CRAWLER_AUTOEXPIRE, cdata, NULL, 0);
+ if (settings.lru_crawler_tocrawl && settings.lru_crawler_tocrawl < tocrawl_limit) {
+ tocrawl_limit = settings.lru_crawler_tocrawl;
+ }
+ lru_crawler_start(todo, tocrawl_limit, CRAWLER_AUTOEXPIRE, cdata, NULL, 0);
}
}