summaryrefslogtreecommitdiff
path: root/chromium/v8/src/heap/concurrent-allocator.cc
blob: 9625bdb13aae4f94a5fdfab75e2d93faeef15b10 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/heap/concurrent-allocator.h"

#include "src/execution/isolate.h"
#include "src/handles/persistent-handles.h"
#include "src/heap/concurrent-allocator-inl.h"
#include "src/heap/local-heap.h"
#include "src/heap/marking.h"

namespace v8 {
namespace internal {

void StressConcurrentAllocatorTask::RunInternal() {
  Heap* heap = isolate_->heap();
  LocalHeap local_heap(heap);
  ConcurrentAllocator* allocator = local_heap.old_space_allocator();

  const int kNumIterations = 2000;
  const int kObjectSize = 10 * kTaggedSize;
  const int kLargeObjectSize = 8 * KB;

  for (int i = 0; i < kNumIterations; i++) {
    Address address = allocator->AllocateOrFail(
        kObjectSize, AllocationAlignment::kWordAligned,
        AllocationOrigin::kRuntime);
    heap->CreateFillerObjectAtBackground(
        address, kObjectSize, ClearFreedMemoryMode::kDontClearFreedMemory);
    address = allocator->AllocateOrFail(kLargeObjectSize,
                                        AllocationAlignment::kWordAligned,
                                        AllocationOrigin::kRuntime);
    heap->CreateFillerObjectAtBackground(
        address, kLargeObjectSize, ClearFreedMemoryMode::kDontClearFreedMemory);
    if (i % 10 == 0) {
      local_heap.Safepoint();
    }
  }

  Schedule(isolate_);
}

// static
void StressConcurrentAllocatorTask::Schedule(Isolate* isolate) {
  CHECK(FLAG_local_heaps && FLAG_concurrent_allocation);
  auto task = std::make_unique<StressConcurrentAllocatorTask>(isolate);
  const double kDelayInSeconds = 0.1;
  V8::GetCurrentPlatform()->CallDelayedOnWorkerThread(std::move(task),
                                                      kDelayInSeconds);
}

Address ConcurrentAllocator::PerformCollectionAndAllocateAgain(
    int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
  Heap* heap = local_heap_->heap();
  local_heap_->allocation_failed_ = true;

  for (int i = 0; i < 3; i++) {
    {
      ParkedScope scope(local_heap_);
      heap->RequestAndWaitForCollection();
    }

    AllocationResult result = Allocate(object_size, alignment, origin);
    if (!result.IsRetry()) {
      local_heap_->allocation_failed_ = false;
      return result.ToObjectChecked().address();
    }
  }

  heap->FatalProcessOutOfMemory("ConcurrentAllocator: allocation failed");
}

void ConcurrentAllocator::FreeLinearAllocationArea() {
  lab_.CloseAndMakeIterable();
}

void ConcurrentAllocator::MakeLinearAllocationAreaIterable() {
  lab_.MakeIterable();
}

void ConcurrentAllocator::MarkLinearAllocationAreaBlack() {
  Address top = lab_.top();
  Address limit = lab_.limit();

  if (top != kNullAddress && top != limit) {
    Page::FromAllocationAreaAddress(top)->CreateBlackAreaBackground(top, limit);
  }
}

void ConcurrentAllocator::UnmarkLinearAllocationArea() {
  Address top = lab_.top();
  Address limit = lab_.limit();

  if (top != kNullAddress && top != limit) {
    Page::FromAllocationAreaAddress(top)->DestroyBlackAreaBackground(top,
                                                                     limit);
  }
}

AllocationResult ConcurrentAllocator::AllocateOutsideLab(
    int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
  auto result = space_->SlowGetLinearAllocationAreaBackground(
      local_heap_, object_size, object_size, alignment, origin);

  if (result) {
    HeapObject object = HeapObject::FromAddress(result->first);

    if (local_heap_->heap()->incremental_marking()->black_allocation()) {
      local_heap_->heap()->incremental_marking()->MarkBlackBackground(
          object, object_size);
    }

    return AllocationResult(object);
  } else {
    return AllocationResult::Retry(OLD_SPACE);
  }
}

}  // namespace internal
}  // namespace v8