summaryrefslogtreecommitdiff
path: root/chromium/v8/src/heap/read-only-spaces.h
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-12 14:27:29 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-13 09:35:20 +0000
commitc30a6232df03e1efbd9f3b226777b07e087a1122 (patch)
treee992f45784689f373bcc38d1b79a239ebe17ee23 /chromium/v8/src/heap/read-only-spaces.h
parent7b5b123ac58f58ffde0f4f6e488bcd09aa4decd3 (diff)
downloadqtwebengine-chromium-85-based.tar.gz
BASELINE: Update Chromium to 85.0.4183.14085-based
Change-Id: Iaa42f4680837c57725b1344f108c0196741f6057 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/v8/src/heap/read-only-spaces.h')
-rw-r--r--chromium/v8/src/heap/read-only-spaces.h90
1 files changed, 72 insertions, 18 deletions
diff --git a/chromium/v8/src/heap/read-only-spaces.h b/chromium/v8/src/heap/read-only-spaces.h
index dd82182b7f6..ae2e6859440 100644
--- a/chromium/v8/src/heap/read-only-spaces.h
+++ b/chromium/v8/src/heap/read-only-spaces.h
@@ -10,21 +10,27 @@
#include "include/v8-platform.h"
#include "src/base/macros.h"
+#include "src/common/globals.h"
+#include "src/heap/allocation-stats.h"
+#include "src/heap/base-space.h"
+#include "src/heap/basic-memory-chunk.h"
#include "src/heap/list.h"
#include "src/heap/memory-chunk.h"
-#include "src/heap/spaces.h"
namespace v8 {
namespace internal {
+class MemoryAllocator;
class ReadOnlyHeap;
-class ReadOnlyPage : public Page {
+class ReadOnlyPage : public BasicMemoryChunk {
public:
// Clears any pointers in the header that point out of the page that would
// otherwise make the header non-relocatable.
void MakeHeaderRelocatable();
+ size_t ShrinkToHighWaterMark();
+
private:
friend class ReadOnlySpace;
};
@@ -45,8 +51,8 @@ class ReadOnlyArtifacts {
return shared_read_only_space_.get();
}
- heap::List<MemoryChunk>& pages() { return pages_; }
- void TransferPages(heap::List<MemoryChunk>&& pages) {
+ std::vector<ReadOnlyPage*>& pages() { return pages_; }
+ void TransferPages(std::vector<ReadOnlyPage*>&& pages) {
pages_ = std::move(pages);
}
@@ -56,7 +62,7 @@ class ReadOnlyArtifacts {
ReadOnlyHeap* read_only_heap() { return read_only_heap_.get(); }
private:
- heap::List<MemoryChunk> pages_;
+ std::vector<ReadOnlyPage*> pages_;
AllocationStats stats_;
std::unique_ptr<SharedReadOnlySpace> shared_read_only_space_;
std::unique_ptr<ReadOnlyHeap> read_only_heap_;
@@ -64,22 +70,28 @@ class ReadOnlyArtifacts {
// -----------------------------------------------------------------------------
// Read Only space for all Immortal Immovable and Immutable objects
-class ReadOnlySpace : public PagedSpace {
+class ReadOnlySpace : public BaseSpace {
public:
- explicit ReadOnlySpace(Heap* heap);
+ V8_EXPORT_PRIVATE explicit ReadOnlySpace(Heap* heap);
// Detach the pages and them to artifacts for using in creating a
// SharedReadOnlySpace.
void DetachPagesAndAddToArtifacts(
std::shared_ptr<ReadOnlyArtifacts> artifacts);
- ~ReadOnlySpace() override { Unseal(); }
+ V8_EXPORT_PRIVATE ~ReadOnlySpace() override;
+
+ bool IsDetached() const { return heap_ == nullptr; }
bool writable() const { return !is_marked_read_only_; }
bool Contains(Address a) = delete;
bool Contains(Object o) = delete;
+ V8_EXPORT_PRIVATE
+ AllocationResult AllocateRaw(size_t size_in_bytes,
+ AllocationAlignment alignment);
+
V8_EXPORT_PRIVATE void ClearStringPaddingIfNeeded();
enum class SealMode { kDetachFromHeapAndForget, kDoNotDetachFromHeap };
@@ -87,13 +99,35 @@ class ReadOnlySpace : public PagedSpace {
// Seal the space by marking it read-only, optionally detaching it
// from the heap and forgetting it for memory bookkeeping purposes (e.g.
// prevent space's memory from registering as leaked).
- void Seal(SealMode ro_mode);
+ V8_EXPORT_PRIVATE void Seal(SealMode ro_mode);
// During boot the free_space_map is created, and afterwards we may need
- // to write it into the free list nodes that were already created.
- void RepairFreeListsAfterDeserialization();
+ // to write it into the free space nodes that were already created.
+ void RepairFreeSpacesAfterDeserialization();
+
+ size_t Size() override { return accounting_stats_.Size(); }
+ V8_EXPORT_PRIVATE size_t CommittedPhysicalMemory() override;
+
+ const std::vector<ReadOnlyPage*>& pages() const { return pages_; }
+ Address top() const { return top_; }
+ Address limit() const { return limit_; }
+ size_t Capacity() const { return capacity_; }
+
+ bool ContainsSlow(Address addr);
+ V8_EXPORT_PRIVATE void ShrinkPages();
+#ifdef VERIFY_HEAP
+ void Verify(Isolate* isolate);
+#ifdef DEBUG
+ void VerifyCounters(Heap* heap);
+#endif // DEBUG
+#endif // VERIFY_HEAP
+
+ // Return size of allocatable area on a page in this space.
+ int AreaSize() { return static_cast<int>(area_size_); }
+
+ ReadOnlyPage* InitializePage(BasicMemoryChunk* chunk);
- size_t Available() override { return 0; }
+ Address FirstPageAddress() const { return pages_.front()->address(); }
protected:
void SetPermissionsForPages(MemoryAllocator* memory_allocator,
@@ -101,16 +135,36 @@ class ReadOnlySpace : public PagedSpace {
bool is_marked_read_only_ = false;
+ // Accounting information for this space.
+ AllocationStats accounting_stats_;
+
+ std::vector<ReadOnlyPage*> pages_;
+
+ Address top_;
+ Address limit_;
+
private:
- // Unseal the space after is has been sealed, by making it writable.
- // TODO(v8:7464): Only possible if the space hasn't been detached.
+ // Unseal the space after it has been sealed, by making it writable.
void Unseal();
- //
- // String padding must be cleared just before serialization and therefore the
- // string padding in the space will already have been cleared if the space was
- // deserialized.
+ void DetachFromHeap() { heap_ = nullptr; }
+
+ AllocationResult AllocateRawUnaligned(int size_in_bytes);
+ AllocationResult AllocateRawAligned(int size_in_bytes,
+ AllocationAlignment alignment);
+
+ HeapObject TryAllocateLinearlyAligned(int size_in_bytes,
+ AllocationAlignment alignment);
+ void EnsureSpaceForAllocation(int size_in_bytes);
+ void FreeLinearAllocationArea();
+
+ // String padding must be cleared just before serialization and therefore
+ // the string padding in the space will already have been cleared if the
+ // space was deserialized.
bool is_string_padding_cleared_;
+
+ size_t capacity_;
+ const size_t area_size_;
};
class SharedReadOnlySpace : public ReadOnlySpace {