1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
|
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_HEAP_PAGE_H_
#define V8_HEAP_CPPGC_HEAP_PAGE_H_
#include "src/base/iterator.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/object-start-bitmap.h"
namespace cppgc {
namespace internal {
class BaseSpace;
class NormalPageSpace;
class LargePageSpace;
class HeapBase;
class PageBackend;
class V8_EXPORT_PRIVATE BasePage {
public:
static inline BasePage* FromPayload(void*);
static inline const BasePage* FromPayload(const void*);
static BasePage* FromInnerAddress(const HeapBase*, void*);
static const BasePage* FromInnerAddress(const HeapBase*, const void*);
static void Destroy(BasePage*);
BasePage(const BasePage&) = delete;
BasePage& operator=(const BasePage&) = delete;
HeapBase* heap() { return heap_; }
const HeapBase* heap() const { return heap_; }
BaseSpace* space() { return space_; }
const BaseSpace* space() const { return space_; }
void set_space(BaseSpace* space) { space_ = space; }
bool is_large() const { return type_ == PageType::kLarge; }
Address PayloadStart();
ConstAddress PayloadStart() const;
Address PayloadEnd();
ConstAddress PayloadEnd() const;
// |address| must refer to real object.
template <AccessMode = AccessMode::kNonAtomic>
HeapObjectHeader& ObjectHeaderFromInnerAddress(void* address) const;
template <AccessMode = AccessMode::kNonAtomic>
const HeapObjectHeader& ObjectHeaderFromInnerAddress(
const void* address) const;
// |address| is guaranteed to point into the page but not payload. Returns
// nullptr when pointing into free list entries and the valid header
// otherwise.
HeapObjectHeader* TryObjectHeaderFromInnerAddress(void* address) const;
const HeapObjectHeader* TryObjectHeaderFromInnerAddress(
const void* address) const;
// SynchronizedLoad and SynchronizedStore are used to sync pages after they
// are allocated. std::atomic_thread_fence is sufficient in practice but is
// not recognized by tsan. Atomic load and store of the |type_| field are
// added for tsan builds.
void SynchronizedLoad() const {
#if defined(THREAD_SANITIZER)
v8::base::AsAtomicPtr(&type_)->load(std::memory_order_acquire);
#endif
}
void SynchronizedStore() {
std::atomic_thread_fence(std::memory_order_seq_cst);
#if defined(THREAD_SANITIZER)
v8::base::AsAtomicPtr(&type_)->store(type_, std::memory_order_release);
#endif
}
protected:
enum class PageType : uint8_t { kNormal, kLarge };
BasePage(HeapBase*, BaseSpace*, PageType);
private:
HeapBase* heap_;
BaseSpace* space_;
PageType type_;
};
class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
template <typename T>
class IteratorImpl : v8::base::iterator<std::forward_iterator_tag, T> {
public:
explicit IteratorImpl(T* p, ConstAddress lab_start = nullptr,
size_t lab_size = 0)
: p_(p), lab_start_(lab_start), lab_size_(lab_size) {
DCHECK(p);
DCHECK_EQ(0, (lab_size & (sizeof(T) - 1)));
if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
p_ += (lab_size_ / sizeof(T));
}
}
T& operator*() { return *p_; }
const T& operator*() const { return *p_; }
bool operator==(IteratorImpl other) const { return p_ == other.p_; }
bool operator!=(IteratorImpl other) const { return !(*this == other); }
IteratorImpl& operator++() {
const size_t size = p_->GetSize();
DCHECK_EQ(0, (size & (sizeof(T) - 1)));
p_ += (size / sizeof(T));
if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
p_ += (lab_size_ / sizeof(T));
}
return *this;
}
IteratorImpl operator++(int) {
IteratorImpl temp(*this);
++(*this);
return temp;
}
T* base() const { return p_; }
private:
T* p_;
ConstAddress lab_start_;
size_t lab_size_;
};
public:
using iterator = IteratorImpl<HeapObjectHeader>;
using const_iterator = IteratorImpl<const HeapObjectHeader>;
// Allocates a new page in the detached state.
static NormalPage* Create(PageBackend*, NormalPageSpace*);
// Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called).
static void Destroy(NormalPage*);
static NormalPage* From(BasePage* page) {
DCHECK(!page->is_large());
return static_cast<NormalPage*>(page);
}
static const NormalPage* From(const BasePage* page) {
return From(const_cast<BasePage*>(page));
}
iterator begin();
const_iterator begin() const;
iterator end() {
return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadEnd()));
}
const_iterator end() const {
return const_iterator(
reinterpret_cast<const HeapObjectHeader*>(PayloadEnd()));
}
Address PayloadStart();
ConstAddress PayloadStart() const;
Address PayloadEnd();
ConstAddress PayloadEnd() const;
static size_t PayloadSize();
bool PayloadContains(ConstAddress address) const {
return (PayloadStart() <= address) && (address < PayloadEnd());
}
PlatformAwareObjectStartBitmap& object_start_bitmap() {
return object_start_bitmap_;
}
const PlatformAwareObjectStartBitmap& object_start_bitmap() const {
return object_start_bitmap_;
}
private:
NormalPage(HeapBase* heap, BaseSpace* space);
~NormalPage();
PlatformAwareObjectStartBitmap object_start_bitmap_;
};
class V8_EXPORT_PRIVATE LargePage final : public BasePage {
public:
// Allocates a new page in the detached state.
static LargePage* Create(PageBackend*, LargePageSpace*, size_t);
// Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called).
static void Destroy(LargePage*);
static LargePage* From(BasePage* page) {
DCHECK(page->is_large());
return static_cast<LargePage*>(page);
}
static const LargePage* From(const BasePage* page) {
return From(const_cast<BasePage*>(page));
}
HeapObjectHeader* ObjectHeader();
const HeapObjectHeader* ObjectHeader() const;
Address PayloadStart();
ConstAddress PayloadStart() const;
Address PayloadEnd();
ConstAddress PayloadEnd() const;
size_t PayloadSize() const { return payload_size_; }
bool PayloadContains(ConstAddress address) const {
return (PayloadStart() <= address) && (address < PayloadEnd());
}
private:
LargePage(HeapBase* heap, BaseSpace* space, size_t);
~LargePage();
size_t payload_size_;
};
// static
BasePage* BasePage::FromPayload(void* payload) {
return reinterpret_cast<BasePage*>(
(reinterpret_cast<uintptr_t>(payload) & kPageBaseMask) + kGuardPageSize);
}
// static
const BasePage* BasePage::FromPayload(const void* payload) {
return reinterpret_cast<const BasePage*>(
(reinterpret_cast<uintptr_t>(const_cast<void*>(payload)) &
kPageBaseMask) +
kGuardPageSize);
}
template <AccessMode mode = AccessMode::kNonAtomic>
const HeapObjectHeader* ObjectHeaderFromInnerAddressImpl(const BasePage* page,
const void* address) {
if (page->is_large()) {
return LargePage::From(page)->ObjectHeader();
}
const PlatformAwareObjectStartBitmap& bitmap =
NormalPage::From(page)->object_start_bitmap();
const HeapObjectHeader* header =
bitmap.FindHeader<mode>(static_cast<ConstAddress>(address));
DCHECK_LT(address, reinterpret_cast<ConstAddress>(header) +
header->GetSize<AccessMode::kAtomic>());
return header;
}
template <AccessMode mode>
HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress(void* address) const {
return const_cast<HeapObjectHeader&>(
ObjectHeaderFromInnerAddress<mode>(const_cast<const void*>(address)));
}
template <AccessMode mode>
const HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress(
const void* address) const {
// This method might be called for |address| found via a Trace method of
// another object. If |address| is on a newly allocated page , there will
// be no sync between the page allocation and a concurrent marking thread,
// resulting in a race with page initialization (specifically with writing
// the page |type_| field). This can occur when tracing a Member holding a
// reference to a mixin type
SynchronizedLoad();
const HeapObjectHeader* header =
ObjectHeaderFromInnerAddressImpl<mode>(this, address);
DCHECK_NE(kFreeListGCInfoIndex, header->GetGCInfoIndex());
return *header;
}
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_HEAP_PAGE_H_
|