summaryrefslogtreecommitdiff
path: root/deps/v8/src/zone/accounting-allocator.cc
blob: 7b040b85d96263fde38177d0cae401e7f0a5d9e2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/zone/accounting-allocator.h"

#include <memory>

#include "src/base/bounded-page-allocator.h"
#include "src/base/logging.h"
#include "src/base/macros.h"
#include "src/utils/allocation.h"
#include "src/zone/zone-compression.h"
#include "src/zone/zone-segment.h"

namespace v8 {
namespace internal {

// These definitions are here in order to please the linker, which in debug mode
// sometimes requires static constants to be defined in .cc files.
const size_t ZoneCompression::kReservationSize;
const size_t ZoneCompression::kReservationAlignment;

namespace {

static constexpr size_t kZonePageSize = 256 * KB;

VirtualMemory ReserveAddressSpace(v8::PageAllocator* platform_allocator) {
  DCHECK(IsAligned(ZoneCompression::kReservationSize,
                   platform_allocator->AllocatePageSize()));

  void* hint = reinterpret_cast<void*>(RoundDown(
      reinterpret_cast<uintptr_t>(platform_allocator->GetRandomMmapAddr()),
      ZoneCompression::kReservationAlignment));

  VirtualMemory memory(platform_allocator, ZoneCompression::kReservationSize,
                       hint, ZoneCompression::kReservationAlignment);
  if (memory.IsReserved()) {
    CHECK(IsAligned(memory.address(), ZoneCompression::kReservationAlignment));
    return memory;
  }

  FATAL(
      "Fatal process out of memory: Failed to reserve memory for compressed "
      "zones");
  UNREACHABLE();
}

std::unique_ptr<v8::base::BoundedPageAllocator> CreateBoundedAllocator(
    v8::PageAllocator* platform_allocator, Address reservation_start) {
  CHECK(reservation_start);
  CHECK(IsAligned(reservation_start, ZoneCompression::kReservationAlignment));

  auto allocator = std::make_unique<v8::base::BoundedPageAllocator>(
      platform_allocator, reservation_start, ZoneCompression::kReservationSize,
      kZonePageSize);

  // Exclude first page from allocation to ensure that accesses through
  // decompressed null pointer will seg-fault.
  allocator->AllocatePagesAt(reservation_start, kZonePageSize,
                             v8::PageAllocator::kNoAccess);
  return allocator;
}

}  // namespace

AccountingAllocator::AccountingAllocator() {
  if (COMPRESS_ZONES_BOOL) {
    v8::PageAllocator* platform_page_allocator = GetPlatformPageAllocator();
    VirtualMemory memory = ReserveAddressSpace(platform_page_allocator);
    reserved_area_ = std::make_unique<VirtualMemory>(std::move(memory));
    bounded_page_allocator_ = CreateBoundedAllocator(platform_page_allocator,
                                                     reserved_area_->address());
  }
}

AccountingAllocator::~AccountingAllocator() = default;

Segment* AccountingAllocator::AllocateSegment(size_t bytes,
                                              bool supports_compression) {
  void* memory;
  if (COMPRESS_ZONES_BOOL && supports_compression) {
    bytes = RoundUp(bytes, kZonePageSize);
    memory = AllocatePages(bounded_page_allocator_.get(), nullptr, bytes,
                           kZonePageSize, PageAllocator::kReadWrite);

  } else {
    memory = AllocWithRetry(bytes);
  }
  if (memory == nullptr) return nullptr;

  size_t current =
      current_memory_usage_.fetch_add(bytes, std::memory_order_relaxed) + bytes;
  size_t max = max_memory_usage_.load(std::memory_order_relaxed);
  while (current > max && !max_memory_usage_.compare_exchange_weak(
                              max, current, std::memory_order_relaxed)) {
    // {max} was updated by {compare_exchange_weak}; retry.
  }
  DCHECK_LE(sizeof(Segment), bytes);
  return new (memory) Segment(bytes);
}

void AccountingAllocator::ReturnSegment(Segment* segment,
                                        bool supports_compression) {
  segment->ZapContents();
  size_t segment_size = segment->total_size();
  current_memory_usage_.fetch_sub(segment_size, std::memory_order_relaxed);
  segment->ZapHeader();
  if (COMPRESS_ZONES_BOOL && supports_compression) {
    CHECK(FreePages(bounded_page_allocator_.get(), segment, segment_size));

  } else {
    free(segment);
  }
}

}  // namespace internal
}  // namespace v8