Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade V8 binaries for 9.7.106.18 version #264

Merged
merged 2 commits into from Jan 12, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
105 changes: 88 additions & 17 deletions deps/include/cppgc/allocation.h
Expand Up @@ -18,6 +18,23 @@
#include "cppgc/type-traits.h"
#include "v8config.h" // NOLINT(build/include_directory)

#if defined(__has_attribute)
#if __has_attribute(assume_aligned)
#define CPPGC_DEFAULT_ALIGNED \
__attribute__((assume_aligned(api_constants::kDefaultAlignment)))
#define CPPGC_DOUBLE_WORD_ALIGNED \
__attribute__((assume_aligned(2 * api_constants::kDefaultAlignment)))
#endif // __has_attribute(assume_aligned)
#endif // defined(__has_attribute)

#if !defined(CPPGC_DEFAULT_ALIGNED)
#define CPPGC_DEFAULT_ALIGNED
#endif

#if !defined(CPPGC_DOUBLE_WORD_ALIGNED)
#define CPPGC_DOUBLE_WORD_ALIGNED
#endif

namespace cppgc {

/**
Expand All @@ -27,6 +44,9 @@ class AllocationHandle;

namespace internal {

// Similar to C++17 std::align_val_t;
enum class AlignVal : size_t {};

class V8_EXPORT MakeGarbageCollectedTraitInternal {
protected:
static inline void MarkObjectAsFullyConstructed(const void* payload) {
Expand All @@ -45,32 +65,72 @@ class V8_EXPORT MakeGarbageCollectedTraitInternal {
atomic_mutable_bitfield->store(value, std::memory_order_release);
}

template <typename U, typename CustomSpace>
struct SpacePolicy {
static void* Allocate(AllocationHandle& handle, size_t size) {
// Custom space.
// Dispatch based on compile-time information.
//
// Default implementation is for a custom space with >`kDefaultAlignment` byte
// alignment.
template <typename GCInfoType, typename CustomSpace, size_t alignment>
struct AllocationDispatcher final {
static void* Invoke(AllocationHandle& handle, size_t size) {
static_assert(std::is_base_of<CustomSpaceBase, CustomSpace>::value,
"Custom space must inherit from CustomSpaceBase.");
static_assert(
!CustomSpace::kSupportsCompaction,
"Custom spaces that support compaction do not support allocating "
"objects with non-default (i.e. word-sized) alignment.");
return MakeGarbageCollectedTraitInternal::Allocate(
handle, size, internal::GCInfoTrait<U>::Index(),
CustomSpace::kSpaceIndex);
handle, size, static_cast<AlignVal>(alignment),
internal::GCInfoTrait<GCInfoType>::Index(), CustomSpace::kSpaceIndex);
}
};

// Fast path for regular allocations for the default space with
// `kDefaultAlignment` byte alignment.
template <typename GCInfoType>
struct AllocationDispatcher<GCInfoType, void,
api_constants::kDefaultAlignment>
final {
static void* Invoke(AllocationHandle& handle, size_t size) {
return MakeGarbageCollectedTraitInternal::Allocate(
handle, size, internal::GCInfoTrait<GCInfoType>::Index());
}
};

template <typename U>
struct SpacePolicy<U, void> {
static void* Allocate(AllocationHandle& handle, size_t size) {
// Default space.
// Default space with >`kDefaultAlignment` byte alignment.
template <typename GCInfoType, size_t alignment>
struct AllocationDispatcher<GCInfoType, void, alignment> final {
static void* Invoke(AllocationHandle& handle, size_t size) {
return MakeGarbageCollectedTraitInternal::Allocate(
handle, size, internal::GCInfoTrait<U>::Index());
handle, size, static_cast<AlignVal>(alignment),
internal::GCInfoTrait<GCInfoType>::Index());
}
};

// Custom space with `kDefaultAlignment` byte alignment.
template <typename GCInfoType, typename CustomSpace>
struct AllocationDispatcher<GCInfoType, CustomSpace,
api_constants::kDefaultAlignment>
final {
static void* Invoke(AllocationHandle& handle, size_t size) {
static_assert(std::is_base_of<CustomSpaceBase, CustomSpace>::value,
"Custom space must inherit from CustomSpaceBase.");
return MakeGarbageCollectedTraitInternal::Allocate(
handle, size, internal::GCInfoTrait<GCInfoType>::Index(),
CustomSpace::kSpaceIndex);
}
};

private:
static void* Allocate(cppgc::AllocationHandle& handle, size_t size,
GCInfoIndex index);
static void* Allocate(cppgc::AllocationHandle& handle, size_t size,
GCInfoIndex index, CustomSpaceIndex space_index);
static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t,
GCInfoIndex);
static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&,
size_t, AlignVal,
GCInfoIndex);
static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t,
GCInfoIndex, CustomSpaceIndex);
static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&,
size_t, AlignVal, GCInfoIndex,
CustomSpaceIndex);

friend class HeapObjectHeader;
};
Expand Down Expand Up @@ -109,10 +169,18 @@ class MakeGarbageCollectedTraitBase
std::is_base_of<typename T::ParentMostGarbageCollectedType, T>::value,
"U of GarbageCollected<U> must be a base of T. Check "
"GarbageCollected<T> base class inheritance.");
return SpacePolicy<
static constexpr size_t kWantedAlignment =
alignof(T) < internal::api_constants::kDefaultAlignment
? internal::api_constants::kDefaultAlignment
: alignof(T);
static_assert(
kWantedAlignment <= internal::api_constants::kMaxSupportedAlignment,
"Requested alignment larger than alignof(std::max_align_t) bytes. "
"Please file a bug to possibly get this restriction lifted.");
return AllocationDispatcher<
typename internal::GCInfoFolding<
T, typename T::ParentMostGarbageCollectedType>::ResultType,
typename SpaceTrait<T>::Space>::Allocate(handle, size);
typename SpaceTrait<T>::Space, kWantedAlignment>::Invoke(handle, size);
}

/**
Expand Down Expand Up @@ -236,4 +304,7 @@ V8_INLINE T* MakeGarbageCollected(AllocationHandle& handle,

} // namespace cppgc

#undef CPPGC_DEFAULT_ALIGNED
#undef CPPGC_DOUBLE_WORD_ALIGNED

#endif // INCLUDE_CPPGC_ALLOCATION_H_
5 changes: 5 additions & 0 deletions deps/include/cppgc/internal/api-constants.h
Expand Up @@ -39,6 +39,11 @@ constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
#endif

static constexpr size_t kDefaultAlignment = sizeof(void*);

// Maximum support alignment for a type as in `alignof(T)`.
static constexpr size_t kMaxSupportedAlignment = 2 * kDefaultAlignment;

} // namespace api_constants

} // namespace internal
Expand Down
19 changes: 9 additions & 10 deletions deps/include/cppgc/internal/persistent-node.h
Expand Up @@ -20,6 +20,7 @@ class Visitor;
namespace internal {

class CrossThreadPersistentRegion;
class FatalOutOfMemoryHandler;

// PersistentNode represents a variant of two states:
// 1) traceable node with a back pointer to the Persistent object;
Expand Down Expand Up @@ -79,7 +80,7 @@ class V8_EXPORT PersistentRegionBase {
using PersistentNodeSlots = std::array<PersistentNode, 256u>;

public:
PersistentRegionBase() = default;
explicit PersistentRegionBase(const FatalOutOfMemoryHandler& oom_handler);
// Clears Persistent fields to avoid stale pointers after heap teardown.
~PersistentRegionBase();

Expand All @@ -89,6 +90,7 @@ class V8_EXPORT PersistentRegionBase {
PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
if (!free_list_head_) {
EnsureNodeSlots();
CPPGC_DCHECK(free_list_head_);
}
PersistentNode* node = free_list_head_;
free_list_head_ = free_list_head_->FreeListNext();
Expand Down Expand Up @@ -122,6 +124,7 @@ class V8_EXPORT PersistentRegionBase {
std::vector<std::unique_ptr<PersistentNodeSlots>> nodes_;
PersistentNode* free_list_head_ = nullptr;
size_t nodes_in_use_ = 0;
const FatalOutOfMemoryHandler& oom_handler_;

friend class CrossThreadPersistentRegion;
};
Expand All @@ -130,29 +133,25 @@ class V8_EXPORT PersistentRegionBase {
// freeing happens only on the thread that created the region.
class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
public:
PersistentRegion();
explicit PersistentRegion(const FatalOutOfMemoryHandler&);
// Clears Persistent fields to avoid stale pointers after heap teardown.
~PersistentRegion() = default;

PersistentRegion(const PersistentRegion&) = delete;
PersistentRegion& operator=(const PersistentRegion&) = delete;

V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
#if V8_ENABLE_CHECKS
CheckIsCreationThread();
#endif // V8_ENABLE_CHECKS
CPPGC_DCHECK(IsCreationThread());
return PersistentRegionBase::AllocateNode(owner, trace);
}

V8_INLINE void FreeNode(PersistentNode* node) {
#if V8_ENABLE_CHECKS
CheckIsCreationThread();
#endif // V8_ENABLE_CHECKS
CPPGC_DCHECK(IsCreationThread());
PersistentRegionBase::FreeNode(node);
}

private:
void CheckIsCreationThread();
bool IsCreationThread();

int creation_thread_id_;
};
Expand All @@ -172,7 +171,7 @@ class V8_EXPORT PersistentRegionLock final {
class V8_EXPORT CrossThreadPersistentRegion final
: protected PersistentRegionBase {
public:
CrossThreadPersistentRegion() = default;
explicit CrossThreadPersistentRegion(const FatalOutOfMemoryHandler&);
// Clears Persistent fields to avoid stale pointers after heap teardown.
~CrossThreadPersistentRegion();

Expand Down
8 changes: 4 additions & 4 deletions deps/include/cppgc/internal/pointer-policies.h
Expand Up @@ -92,19 +92,19 @@ class DisabledCheckingPolicy {
void CheckPointer(const void*) {}
};

#if V8_ENABLE_CHECKS
#ifdef DEBUG
// Off heap members are not connected to object graph and thus cannot ressurect
// dead objects.
using DefaultMemberCheckingPolicy =
SameThreadEnabledCheckingPolicy<false /* kCheckOffHeapAssignments*/>;
using DefaultPersistentCheckingPolicy =
SameThreadEnabledCheckingPolicy<true /* kCheckOffHeapAssignments*/>;
#else
#else // !DEBUG
using DefaultMemberCheckingPolicy = DisabledCheckingPolicy;
using DefaultPersistentCheckingPolicy = DisabledCheckingPolicy;
#endif
#endif // !DEBUG
// For CT(W)P neither marking information (for value), nor objectstart bitmap
// (for slot) are guaranteed to be present because there's no synchonization
// (for slot) are guaranteed to be present because there's no synchronization
// between heaps after marking.
using DefaultCrossThreadPersistentCheckingPolicy = DisabledCheckingPolicy;

Expand Down
2 changes: 1 addition & 1 deletion deps/include/libplatform/libplatform.h
Expand Up @@ -95,7 +95,7 @@ V8_PLATFORM_EXPORT void RunIdleTasks(v8::Platform* platform,
* The |platform| has to be created using |NewDefaultPlatform|.
*
*/
V8_DEPRECATE_SOON("Access the DefaultPlatform directly")
V8_DEPRECATED("Access the DefaultPlatform directly")
V8_PLATFORM_EXPORT void SetTracingController(
v8::Platform* platform,
v8::platform::tracing::TracingController* tracing_controller);
Expand Down
2 changes: 1 addition & 1 deletion deps/include/v8-context.h
Expand Up @@ -318,7 +318,7 @@ class V8_EXPORT Context : public Data {
* stack may be allocated separately from the native stack. See also
* |TryCatch::JSStackComparableAddressPrivate| for details.
*/
V8_DEPRECATE_SOON(
V8_DEPRECATED(
"This is private V8 information that should not be exposed in the API.")
uintptr_t JSStackComparableAddress() const {
return JSStackComparableAddressPrivate();
Expand Down
17 changes: 13 additions & 4 deletions deps/include/v8-cppgc.h
Expand Up @@ -195,9 +195,11 @@ class V8_EXPORT JSHeapConsistency final {
* \returns whether a write barrier is needed and which barrier to invoke.
*/
template <typename HeapHandleCallback>
V8_DEPRECATE_SOON("Write barriers automatically emitted by TracedReference.")
static V8_INLINE WriteBarrierType
GetWriteBarrierType(const TracedReferenceBase& ref,
WriteBarrierParams& params, HeapHandleCallback callback) {
GetWriteBarrierType(const TracedReferenceBase& ref,
WriteBarrierParams& params,
HeapHandleCallback callback) {
if (ref.IsEmpty()) return WriteBarrierType::kNone;

if (V8_LIKELY(!cppgc::internal::WriteBarrier::
Expand Down Expand Up @@ -251,6 +253,7 @@ class V8_EXPORT JSHeapConsistency final {
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param ref The reference being written to.
*/
V8_DEPRECATE_SOON("Write barriers automatically emitted by TracedReference.")
static V8_INLINE void DijkstraMarkingBarrier(const WriteBarrierParams& params,
cppgc::HeapHandle& heap_handle,
const TracedReferenceBase& ref) {
Expand Down Expand Up @@ -280,6 +283,7 @@ class V8_EXPORT JSHeapConsistency final {
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param ref The reference being written to.
*/
V8_DEPRECATE_SOON("Write barriers automatically emitted by TracedReference.")
static V8_INLINE void GenerationalBarrier(const WriteBarrierParams& params,
const TracedReferenceBase& ref) {}

Expand Down Expand Up @@ -318,8 +322,13 @@ namespace cppgc {

template <typename T>
struct TraceTrait<v8::TracedReference<T>> {
static void Trace(Visitor* visitor, const v8::TracedReference<T>* self) {
static_cast<v8::JSVisitor*>(visitor)->Trace(*self);
static cppgc::TraceDescriptor GetTraceDescriptor(const void* self) {
return {nullptr, Trace};
}

static void Trace(Visitor* visitor, const void* self) {
static_cast<v8::JSVisitor*>(visitor)->Trace(
*static_cast<const v8::TracedReference<T>*>(self));
}
};

Expand Down
2 changes: 1 addition & 1 deletion deps/include/v8-embedder-heap.h
Expand Up @@ -127,7 +127,7 @@ class V8_EXPORT EmbedderHeapTracer {
/**
* Called by the embedder to notify V8 of an empty execution stack.
*/
V8_DEPRECATE_SOON(
V8_DEPRECATED(
"This call only optimized internal caches which V8 is able to figure out "
"on its own now.")
void NotifyEmptyEmbedderStack();
Expand Down
2 changes: 1 addition & 1 deletion deps/include/v8-exception.h
Expand Up @@ -169,7 +169,7 @@ class V8_EXPORT TryCatch {
*/
void SetCaptureMessage(bool value);

V8_DEPRECATE_SOON(
V8_DEPRECATED(
"This is private information that should not be exposed by the API")
static void* JSStackComparableAddress(TryCatch* handler) {
if (handler == nullptr) return nullptr;
Expand Down