Skip to content

Instantly share code, notes, and snippets.

@addaleax
Last active October 26, 2019 13:38
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save addaleax/15afba086636bad8af3b846074464b89 to your computer and use it in GitHub Desktop.
Save addaleax/15afba086636bad8af3b846074464b89 to your computer and use it in GitHub Desktop.
diff --git a/deps/v8/include/libplatform/v8-tracing.h b/deps/v8/include/libplatform/v8-tracing.h
index e7cd8bfcdb66..df145e95bf72 100644
--- a/deps/v8/include/libplatform/v8-tracing.h
+++ b/deps/v8/include/libplatform/v8-tracing.h
@@ -244,6 +244,8 @@ class V8_PLATFORM_EXPORT TracingController
TracingController();
~TracingController() override;
+
+ // Takes ownership of |trace_buffer|.
void Initialize(TraceBuffer* trace_buffer);
#ifdef V8_USE_PERFETTO
// Must be called before StartTracing() if V8_USE_PERFETTO is true. Provides
diff --git a/deps/v8/include/v8-internal.h b/deps/v8/include/v8-internal.h
index fe2ce67e0df0..8ce88fb3efcb 100644
--- a/deps/v8/include/v8-internal.h
+++ b/deps/v8/include/v8-internal.h
@@ -63,8 +63,8 @@ struct SmiTagging<4> {
V8_INLINE static int SmiToInt(const internal::Address value) {
int shift_bits = kSmiTagSize + kSmiShiftSize;
- // Shift down (requires >> to be sign extending).
- return static_cast<int>(static_cast<intptr_t>(value)) >> shift_bits;
+ // Truncate and shift down (requires >> to be sign extending).
+ return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
}
V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
// Is value in range [kSmiMinValue, kSmiMaxValue].
diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h
index 645920d9c1b3..cc44b78ecd95 100644
--- a/deps/v8/include/v8-profiler.h
+++ b/deps/v8/include/v8-profiler.h
@@ -18,14 +18,18 @@ namespace v8 {
class HeapGraphNode;
struct HeapStatsUpdate;
-typedef uint32_t SnapshotObjectId;
-
+using NativeObject = void*;
+using SnapshotObjectId = uint32_t;
struct CpuProfileDeoptFrame {
int script_id;
size_t position;
};
+namespace internal {
+class CpuProfile;
+} // namespace internal
+
} // namespace v8
#ifdef V8_OS_WIN
@@ -48,75 +52,6 @@ template class V8_EXPORT std::vector<v8::CpuProfileDeoptInfo>;
namespace v8 {
-// TickSample captures the information collected for each sample.
-struct V8_EXPORT TickSample {
- // Internal profiling (with --prof + tools/$OS-tick-processor) wants to
- // include the runtime function we're calling. Externally exposed tick
- // samples don't care.
- enum RecordCEntryFrame { kIncludeCEntryFrame, kSkipCEntryFrame };
-
- TickSample()
- : state(OTHER),
- pc(nullptr),
- external_callback_entry(nullptr),
- frames_count(0),
- has_external_callback(false),
- update_stats(true) {}
-
- /**
- * Initialize a tick sample from the isolate.
- * \param isolate The isolate.
- * \param state Execution state.
- * \param record_c_entry_frame Include or skip the runtime function.
- * \param update_stats Whether update the sample to the aggregated stats.
- * \param use_simulator_reg_state When set to true and V8 is running under a
- * simulator, the method will use the simulator
- * register state rather than the one provided
- * with |state| argument. Otherwise the method
- * will use provided register |state| as is.
- */
- void Init(Isolate* isolate, const v8::RegisterState& state,
- RecordCEntryFrame record_c_entry_frame, bool update_stats,
- bool use_simulator_reg_state = true);
- /**
- * Get a call stack sample from the isolate.
- * \param isolate The isolate.
- * \param state Register state.
- * \param record_c_entry_frame Include or skip the runtime function.
- * \param frames Caller allocated buffer to store stack frames.
- * \param frames_limit Maximum number of frames to capture. The buffer must
- * be large enough to hold the number of frames.
- * \param sample_info The sample info is filled up by the function
- * provides number of actual captured stack frames and
- * the current VM state.
- * \param use_simulator_reg_state When set to true and V8 is running under a
- * simulator, the method will use the simulator
- * register state rather than the one provided
- * with |state| argument. Otherwise the method
- * will use provided register |state| as is.
- * \note GetStackSample is thread and signal safe and should only be called
- * when the JS thread is paused or interrupted.
- * Otherwise the behavior is undefined.
- */
- static bool GetStackSample(Isolate* isolate, v8::RegisterState* state,
- RecordCEntryFrame record_c_entry_frame,
- void** frames, size_t frames_limit,
- v8::SampleInfo* sample_info,
- bool use_simulator_reg_state = true);
- StateTag state; // The state of the VM.
- void* pc; // Instruction pointer.
- union {
- void* tos; // Top stack value (*sp).
- void* external_callback_entry;
- };
- static const unsigned kMaxFramesCountLog2 = 8;
- static const unsigned kMaxFramesCount = (1 << kMaxFramesCountLog2) - 1;
- void* stack[kMaxFramesCount]; // Call stack.
- unsigned frames_count : kMaxFramesCountLog2; // Number of captured frames.
- bool has_external_callback : 1;
- bool update_stats : 1; // Whether the sample should update aggregated stats.
-};
-
/**
* CpuProfileNode represents a node in a call graph.
*/
@@ -307,6 +242,15 @@ enum CpuProfilingNamingMode {
kDebugNaming,
};
+enum CpuProfilingLoggingMode {
+ // Enables logging when a profile is active, and disables logging when all
+ // profiles are detached.
+ kLazyLogging,
+ // Enables logging for the lifetime of the CpuProfiler. Calls to
+ // StartRecording are faster, at the expense of runtime overhead.
+ kEagerLogging,
+};
+
/**
* Optional profiling attributes.
*/
@@ -340,6 +284,11 @@ class V8_EXPORT CpuProfilingOptions {
int sampling_interval_us() const { return sampling_interval_us_; }
private:
+ friend class internal::CpuProfile;
+
+ bool has_filter_context() const;
+ void* raw_filter_context() const;
+
CpuProfilingMode mode_;
unsigned max_samples_;
int sampling_interval_us_;
@@ -359,6 +308,9 @@ class V8_EXPORT CpuProfiler {
static CpuProfiler* New(Isolate* isolate);
static CpuProfiler* New(Isolate* isolate,
CpuProfilingNamingMode mode);
+ static CpuProfiler* New(Isolate* isolate,
+ CpuProfilingNamingMode namingMode,
+ CpuProfilingLoggingMode loggingMode);
/**
* Synchronously collect current stack sample in all profilers attached to
@@ -801,6 +753,12 @@ class V8_EXPORT EmbedderGraph {
*/
virtual const char* NamePrefix() { return nullptr; }
+ /**
+ * Returns the NativeObject that can be used for querying the
+ * |HeapSnapshot|. Not implemented in Node 12.
+ */
+ NativeObject GetNativeObject() { return nullptr; }
+
Node(const Node&) = delete;
Node& operator=(const Node&) = delete;
};
@@ -863,6 +821,13 @@ class V8_EXPORT HeapProfiler {
*/
SnapshotObjectId GetObjectId(Local<Value> value);
+ /**
+ * Returns SnapshotObjectId for a native object referenced by |value| if it
+ * has been seen by the heap profiler, kUnknownObjectId otherwise.
+ * Not implemented in Node 12.
+ */
+ SnapshotObjectId GetObjectId(NativeObject value);
+
/**
* Returns heap object with given SnapshotObjectId if the object is alive,
* otherwise empty handle is returned.
diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h
index a87655058a05..9999affb265f 100644
--- a/deps/v8/include/v8-version.h
+++ b/deps/v8/include/v8-version.h
@@ -9,9 +9,9 @@
// NOTE these macros are used by some of the tool scripts and the build
// system so their names cannot be changed without changing the scripts.
#define V8_MAJOR_VERSION 7
-#define V8_MINOR_VERSION 7
-#define V8_BUILD_NUMBER 299
-#define V8_PATCH_LEVEL 13
+#define V8_MINOR_VERSION 8
+#define V8_BUILD_NUMBER 279
+#define V8_PATCH_LEVEL 19
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h
index f3fbdc696294..f4ea851d41b0 100644
--- a/deps/v8/include/v8.h
+++ b/deps/v8/include/v8.h
@@ -19,6 +19,7 @@
#include <stdint.h>
#include <stdio.h>
#include <memory>
+#include <type_traits>
#include <utility>
#include <vector>
@@ -129,6 +130,7 @@ class PropertyCallbackArguments;
class FunctionCallbackArguments;
class GlobalHandles;
class ScopedExternalStringLock;
+class ThreadLocalTop;
namespace wasm {
class NativeModule;
@@ -823,24 +825,43 @@ template <class T>
using UniquePersistent = Global<T>;
/**
- * A traced handle with move semantics, similar to std::unique_ptr. The handle
- * is to be used together with |v8::EmbedderHeapTracer| and specifies edges from
- * the embedder into V8's heap.
+ * Trait specifying behavior of |TracedGlobal<T>|.
+ */
+template <typename T>
+struct TracedGlobalTrait {
+ /**
+ * Specifies whether |TracedGlobal<T>| should clear its handle on destruction.
+ *
+ * V8 will *not* clear the embedder-side memory of the handle. The embedder is
+ * expected to report all |TracedGlobal<T>| handles through
+ * |EmbedderHeapTracer| upon garabge collection.
+ *
+ * See |EmbedderHeapTracer::IsRootForNonTracingGC| for handling with
+ * non-tracing GCs in V8.
+ */
+ static constexpr bool kRequiresExplicitDestruction = true;
+};
+
+/**
+ * A traced handle with copy and move semantics. The handle is to be used
+ * together with |v8::EmbedderHeapTracer| and specifies edges from the embedder
+ * into V8's heap.
*
* The exact semantics are:
* - Tracing garbage collections use |v8::EmbedderHeapTracer|.
* - Non-tracing garbage collections refer to
* |v8::EmbedderHeapTracer::IsRootForNonTracingGC()| whether the handle should
* be treated as root or not.
+ *
+ * For destruction semantics see |TracedGlobalTrait<T>|.
*/
template <typename T>
-class V8_EXPORT TracedGlobal {
+class TracedGlobal {
public:
/**
* An empty TracedGlobal without storage cell.
*/
TracedGlobal() = default;
- ~TracedGlobal() { Reset(); }
/**
* Construct a TracedGlobal from a Local.
@@ -857,7 +878,41 @@ class V8_EXPORT TracedGlobal {
/**
* Move constructor initializing TracedGlobal from an existing one.
*/
- V8_INLINE TracedGlobal(TracedGlobal&& other);
+ V8_INLINE TracedGlobal(TracedGlobal&& other) {
+ // Forward to operator=.
+ *this = std::move(other);
+ }
+
+ /**
+ * Move constructor initializing TracedGlobal from an existing one.
+ */
+ template <typename S>
+ V8_INLINE TracedGlobal(TracedGlobal<S>&& other) {
+ // Forward to operator=.
+ *this = std::move(other);
+ }
+
+ /**
+ * Copy constructor initializing TracedGlobal from an existing one.
+ */
+ V8_INLINE TracedGlobal(const TracedGlobal& other) {
+ // Forward to operator=;
+ *this = other;
+ }
+
+ /**
+ * Copy constructor initializing TracedGlobal from an existing one.
+ */
+ template <typename S>
+ V8_INLINE TracedGlobal(const TracedGlobal<S>& other) {
+ // Forward to operator=;
+ *this = other;
+ }
+
+ /**
+ * Move assignment operator initializing TracedGlobal from an existing one.
+ */
+ V8_INLINE TracedGlobal& operator=(TracedGlobal&& rhs);
/**
* Move assignment operator initializing TracedGlobal from an existing one.
@@ -866,10 +921,21 @@ class V8_EXPORT TracedGlobal {
V8_INLINE TracedGlobal& operator=(TracedGlobal<S>&& rhs);
/**
- * TracedGlobal only supports move semantics and forbids copying.
+ * Copy assignment operator initializing TracedGlobal from an existing one.
+ *
+ * Note: Prohibited when |other| has a finalization callback set through
+ * |SetFinalizationCallback|.
+ */
+ V8_INLINE TracedGlobal& operator=(const TracedGlobal& rhs);
+
+ /**
+ * Copy assignment operator initializing TracedGlobal from an existing one.
+ *
+ * Note: Prohibited when |other| has a finalization callback set through
+ * |SetFinalizationCallback|.
*/
- TracedGlobal(const TracedGlobal&) = delete;
- void operator=(const TracedGlobal&) = delete;
+ template <class S>
+ V8_INLINE TracedGlobal& operator=(const TracedGlobal<S>& rhs);
/**
* Returns true if this TracedGlobal is empty, i.e., has not been assigned an
@@ -903,8 +969,8 @@ class V8_EXPORT TracedGlobal {
template <class S>
V8_INLINE bool operator==(const TracedGlobal<S>& that) const {
- internal::Address* a = reinterpret_cast<internal::Address*>(this->val_);
- internal::Address* b = reinterpret_cast<internal::Address*>(that.val_);
+ internal::Address* a = reinterpret_cast<internal::Address*>(**this);
+ internal::Address* b = reinterpret_cast<internal::Address*>(*that);
if (a == nullptr) return b == nullptr;
if (b == nullptr) return false;
return *a == *b;
@@ -912,8 +978,8 @@ class V8_EXPORT TracedGlobal {
template <class S>
V8_INLINE bool operator==(const Local<S>& that) const {
- internal::Address* a = reinterpret_cast<internal::Address*>(this->val_);
- internal::Address* b = reinterpret_cast<internal::Address*>(that.val_);
+ internal::Address* a = reinterpret_cast<internal::Address*>(**this);
+ internal::Address* b = reinterpret_cast<internal::Address*>(*that);
if (a == nullptr) return b == nullptr;
if (b == nullptr) return false;
return *a == *b;
@@ -954,11 +1020,32 @@ class V8_EXPORT TracedGlobal {
void* parameter, WeakCallbackInfo<void>::Callback callback);
private:
- V8_INLINE static T* New(Isolate* isolate, T* that, T** slot);
+ // Wrapping type used when clearing on destruction is required.
+ struct WrappedForDestruction {
+ T* value;
+
+ explicit WrappedForDestruction(T* val) : value(val) {}
+ ~WrappedForDestruction();
+ operator T*() const { return value; }
+ T* operator*() const { return value; }
+ T* operator->() const { return value; }
+ WrappedForDestruction& operator=(const WrappedForDestruction& other) {
+ value = other.value;
+ return *this;
+ }
+ WrappedForDestruction& operator=(T* val) {
+ value = val;
+ return *this;
+ }
+ };
+
+ V8_INLINE static T* New(Isolate* isolate, T* that, void* slot);
T* operator*() const { return this->val_; }
- T* val_ = nullptr;
+ typename std::conditional<
+ TracedGlobalTrait<TracedGlobal<T>>::kRequiresExplicitDestruction,
+ WrappedForDestruction, T*>::type val_{nullptr};
friend class EmbedderHeapTracer;
template <typename F>
@@ -3782,6 +3869,15 @@ class V8_EXPORT Object : public Value {
*/
bool IsConstructor();
+ /**
+ * True if this object can carry information relevant to the embedder in its
+ * embedder fields, false otherwise. This is generally true for objects
+ * constructed through function templates but also holds for other types where
+ * V8 automatically adds internal fields at compile time, such as e.g.
+ * v8::ArrayBuffer.
+ */
+ bool IsApiWrapper();
+
/**
* Call an Object as a function if a callback is set by the
* ObjectTemplate::SetCallAsFunctionHandler method.
@@ -4850,8 +4946,8 @@ class V8_EXPORT ArrayBuffer : public Object {
bool IsDetachable() const;
// TODO(913887): fix the use of 'neuter' in the API.
- V8_DEPRECATE_SOON("Use IsDetachable() instead.",
- inline bool IsNeuterable() const) {
+ V8_DEPRECATED("Use IsDetachable() instead.",
+ inline bool IsNeuterable() const) {
return IsDetachable();
}
@@ -4864,7 +4960,7 @@ class V8_EXPORT ArrayBuffer : public Object {
void Detach();
// TODO(913887): fix the use of 'neuter' in the API.
- V8_DEPRECATE_SOON("Use Detach() instead.", inline void Neuter()) { Detach(); }
+ V8_DEPRECATED("Use Detach() instead.", inline void Neuter()) { Detach(); }
/**
* Make this ArrayBuffer external. The pointer to underlying memory block
@@ -5502,6 +5598,32 @@ class V8_EXPORT RegExp : public Object {
static void CheckCast(Value* obj);
};
+/**
+ * An instance of the built-in FinalizationGroup constructor.
+ *
+ * This API is experimental and may change significantly.
+ */
+class V8_EXPORT FinalizationGroup : public Object {
+ public:
+ /**
+ * Runs the cleanup callback of the given FinalizationGroup.
+ *
+ * V8 will inform the embedder that there are finalizer callbacks be
+ * called through HostCleanupFinalizationGroupCallback.
+ *
+ * HostCleanupFinalizationGroupCallback should schedule a task to
+ * call FinalizationGroup::Cleanup() at some point in the
+ * future. It's the embedders responsiblity to make this call at a
+ * time which does not interrupt synchronous ECMAScript code
+ * execution.
+ *
+ * If the result is Nothing<bool> then an exception has
+ * occurred. Otherwise the result is |true| if the cleanup callback
+ * was called successfully. The result is never |false|.
+ */
+ static V8_WARN_UNUSED_RESULT Maybe<bool> Cleanup(
+ Local<FinalizationGroup> finalization_group);
+};
/**
* A JavaScript value that wraps a C++ void*. This type of value is mainly used
@@ -6743,10 +6865,34 @@ typedef void* (*CreateHistogramCallback)(const char* name,
typedef void (*AddHistogramSampleCallback)(void* histogram, int sample);
+// --- Crashkeys Callback ---
+enum class CrashKeyId {
+ kIsolateAddress,
+ kReadonlySpaceFirstPageAddress,
+ kMapSpaceFirstPageAddress,
+ kCodeSpaceFirstPageAddress,
+};
+
+typedef void (*AddCrashKeyCallback)(CrashKeyId id, const std::string& value);
+
// --- Enter/Leave Script Callback ---
typedef void (*BeforeCallEnteredCallback)(Isolate*);
typedef void (*CallCompletedCallback)(Isolate*);
+/**
+ * HostCleanupFinalizationGroupCallback is called when we require the
+ * embedder to enqueue a task that would call
+ * FinalizationGroup::Cleanup().
+ *
+ * The FinalizationGroup is the one for which the embedder needs to
+ * call FinalizationGroup::Cleanup() on.
+ *
+ * The context provided is the one in which the FinalizationGroup was
+ * created in.
+ */
+typedef void (*HostCleanupFinalizationGroupCallback)(
+ Local<Context> context, Local<FinalizationGroup> fg);
+
/**
* HostImportModuleDynamicallyCallback is called when we require the
* embedder to load a module. This is used as part of the dynamic
@@ -7006,6 +7152,10 @@ typedef void (*WasmStreamingCallback)(const FunctionCallbackInfo<Value>&);
// --- Callback for checking if WebAssembly threads are enabled ---
typedef bool (*WasmThreadsEnabledCallback)(Local<Context> context);
+// --- Callback for loading source map file for WASM profiling support
+typedef Local<String> (*WasmLoadSourceMapCallback)(Isolate* isolate,
+ const char* name);
+
// --- Garbage Collection Callbacks ---
/**
@@ -7382,7 +7532,7 @@ class V8_EXPORT EmbedderHeapTracer {
/**
* Called at the beginning of a GC cycle.
*/
- V8_DEPRECATE_SOON("Use version with flags.", virtual void TracePrologue()) {}
+ V8_DEPRECATED("Use version with flags.", virtual void TracePrologue()) {}
virtual void TracePrologue(TraceFlags flags);
/**
@@ -7433,14 +7583,37 @@ class V8_EXPORT EmbedderHeapTracer {
/**
* Returns true if the TracedGlobal handle should be considered as root for
* the currently running non-tracing garbage collection and false otherwise.
+ * The default implementation will keep all TracedGlobal references as roots.
*
- * Default implementation will keep all TracedGlobal references as roots.
+ * If this returns false, then V8 may decide that the object referred to by
+ * such a handle is reclaimed. In that case:
+ * - No action is required if handles are used with destructors.
+ * - When run without destructors (by specializing
+ * |TracedGlobalTrait::kRequiresExplicitDestruction|) V8 calls
+ * |ResetHandleInNonTracingGC|.
+ *
+ * Note that the |handle| is different from the |TracedGlobal<T>| handle that
+ * the embedder holds for retaining the object. The embedder may use
+ * |TracedGlobal<T>::WrapperClassId()| to distinguish cases where it wants
+ * handles to be treated as roots from not being treated as roots.
*/
virtual bool IsRootForNonTracingGC(
const v8::TracedGlobal<v8::Value>& handle) {
return true;
}
+ /**
+ * Used in combination with |IsRootForNonTracingGC|. Called by V8 when an
+ * object that is backed by a handle is reclaimed by a non-tracing garbage
+ * collection. It is up to the embedder to reset the original handle.
+ *
+ * Note that the |handle| is different from the |TracedGlobal<T>| handle that
+ * the embedder holds for retaining the object. It is up to the embedder to
+ * find the orignal |TracedGlobal<T>| handle via the object or class id.
+ */
+ virtual void ResetHandleInNonTracingGC(
+ const v8::TracedGlobal<v8::Value>& handle) {}
+
/*
* Called by the embedder to immediately perform a full garbage collection.
*
@@ -7660,7 +7833,6 @@ class V8_EXPORT Isolate {
class V8_EXPORT SuppressMicrotaskExecutionScope {
public:
explicit SuppressMicrotaskExecutionScope(Isolate* isolate);
- explicit SuppressMicrotaskExecutionScope(MicrotaskQueue* microtask_queue);
~SuppressMicrotaskExecutionScope();
// Prevent copying of Scope objects.
@@ -7671,7 +7843,15 @@ class V8_EXPORT Isolate {
private:
internal::Isolate* const isolate_;
- internal::MicrotaskQueue* const microtask_queue_;
+ internal::Address previous_stack_height_;
+ static_assert(sizeof(internal::Address) ==
+ sizeof(internal::MicrotaskQueue*) &&
+ alignof(internal::Address) ==
+ alignof(internal::MicrotaskQueue*),
+ "The previous_stack_height_ field can replace the "
+ "microtask_queue_ field ABI-wise");
+
+ friend class internal::ThreadLocalTop;
};
/**
@@ -7785,9 +7965,10 @@ class V8_EXPORT Isolate {
kStringNormalize = 75,
kCallSiteAPIGetFunctionSloppyCall = 76,
kCallSiteAPIGetThisSloppyCall = 77,
+ kRegExpMatchAllWithNonGlobalRegExp = 78,
// If you add new values here, you'll also need to update Chromium's:
- // web_feature.mojom, UseCounterCallback.cpp, and enums.xml. V8 changes to
+ // web_feature.mojom, use_counter_callback.cc, and enums.xml. V8 changes to
// this list need to be landed first, then changes on the Chromium side.
kUseCounterFeatureCount // This enum value must be last.
};
@@ -7845,6 +8026,18 @@ class V8_EXPORT Isolate {
*/
static Isolate* GetCurrent();
+ /**
+ * Clears the set of objects held strongly by the heap. This set of
+ * objects are originally built when a WeakRef is created or
+ * successfully dereferenced.
+ *
+ * The embedder is expected to call this when a synchronous sequence
+ * of ECMAScript execution completes. It's the embedders
+ * responsiblity to make this call at a time which does not
+ * interrupt synchronous ECMAScript code execution.
+ */
+ void ClearKeptObjects();
+
/**
* Custom callback used by embedders to help V8 determine if it should abort
* when it throws and no internal handler is predicted to catch the
@@ -7858,6 +8051,14 @@ class V8_EXPORT Isolate {
void SetAbortOnUncaughtExceptionCallback(
AbortOnUncaughtExceptionCallback callback);
+ /**
+ * This specifies the callback to be called when finalization groups
+ * are ready to be cleaned up and require FinalizationGroup::Cleanup()
+ * to be called in a future task.
+ */
+ void SetHostCleanupFinalizationGroupCallback(
+ HostCleanupFinalizationGroupCallback callback);
+
/**
* This specifies the callback called by the upcoming dynamic
* import() language feature to load modules.
@@ -8411,6 +8612,13 @@ class V8_EXPORT Isolate {
void SetCreateHistogramFunction(CreateHistogramCallback);
void SetAddHistogramSampleFunction(AddHistogramSampleCallback);
+ /**
+ * Enables the host application to provide a mechanism for recording a
+ * predefined set of data as crash keys to be used in postmortem debugging in
+ * case of a crash.
+ */
+ void SetAddCrashKeyCallback(AddCrashKeyCallback);
+
/**
* Optional notification that the embedder is idle.
* V8 uses the notification to perform garbage collection.
@@ -8610,6 +8818,8 @@ class V8_EXPORT Isolate {
void SetWasmThreadsEnabledCallback(WasmThreadsEnabledCallback callback);
+ void SetWasmLoadSourceMapCallback(WasmLoadSourceMapCallback callback);
+
/**
* Check if V8 is dead and therefore unusable. This is the case after
* fatal errors such as out-of-memory situations.
@@ -8970,11 +9180,14 @@ class V8_EXPORT V8 {
internal::Address* handle);
static internal::Address* GlobalizeTracedReference(internal::Isolate* isolate,
internal::Address* handle,
- internal::Address* slot);
+ internal::Address* slot,
+ bool has_destructor);
static void MoveGlobalReference(internal::Address** from,
internal::Address** to);
static void MoveTracedGlobalReference(internal::Address** from,
internal::Address** to);
+ static void CopyTracedGlobalReference(const internal::Address* const* from,
+ internal::Address** to);
static internal::Address* CopyGlobalReference(internal::Address* from);
static void DisposeGlobal(internal::Address* global_handle);
static void DisposeTracedGlobal(internal::Address* global_handle);
@@ -10093,18 +10306,26 @@ Global<T>& Global<T>::operator=(Global<S>&& rhs) {
}
template <class T>
-T* TracedGlobal<T>::New(Isolate* isolate, T* that, T** slot) {
+TracedGlobal<T>::WrappedForDestruction::~WrappedForDestruction() {
+ if (value == nullptr) return;
+ V8::DisposeTracedGlobal(reinterpret_cast<internal::Address*>(value));
+ value = nullptr;
+}
+
+template <class T>
+T* TracedGlobal<T>::New(Isolate* isolate, T* that, void* slot) {
if (that == nullptr) return nullptr;
internal::Address* p = reinterpret_cast<internal::Address*>(that);
return reinterpret_cast<T*>(V8::GlobalizeTracedReference(
reinterpret_cast<internal::Isolate*>(isolate), p,
- reinterpret_cast<internal::Address*>(slot)));
+ reinterpret_cast<internal::Address*>(slot),
+ TracedGlobalTrait<TracedGlobal<T>>::kRequiresExplicitDestruction));
}
template <class T>
void TracedGlobal<T>::Reset() {
if (IsEmpty()) return;
- V8::DisposeTracedGlobal(reinterpret_cast<internal::Address*>(val_));
+ V8::DisposeTracedGlobal(reinterpret_cast<internal::Address*>(**this));
val_ = nullptr;
}
@@ -10118,19 +10339,23 @@ void TracedGlobal<T>::Reset(Isolate* isolate, const Local<S>& other) {
}
template <class T>
-TracedGlobal<T>::TracedGlobal(TracedGlobal&& other) : val_(other.val_) {
- if (other.val_ != nullptr) {
- V8::MoveTracedGlobalReference(
- reinterpret_cast<internal::Address**>(&other.val_),
- reinterpret_cast<internal::Address**>(&this->val_));
- other.val_ = nullptr;
- }
+template <class S>
+TracedGlobal<T>& TracedGlobal<T>::operator=(TracedGlobal<S>&& rhs) {
+ TYPE_CHECK(T, S);
+ *this = std::move(rhs.template As<T>());
+ return *this;
}
template <class T>
template <class S>
-TracedGlobal<T>& TracedGlobal<T>::operator=(TracedGlobal<S>&& rhs) {
+TracedGlobal<T>& TracedGlobal<T>::operator=(const TracedGlobal<S>& rhs) {
TYPE_CHECK(T, S);
+ *this = rhs.template As<T>();
+ return *this;
+}
+
+template <class T>
+TracedGlobal<T>& TracedGlobal<T>::operator=(TracedGlobal&& rhs) {
if (this != &rhs) {
this->Reset();
if (rhs.val_ != nullptr) {
@@ -10144,11 +10369,24 @@ TracedGlobal<T>& TracedGlobal<T>::operator=(TracedGlobal<S>&& rhs) {
return *this;
}
+template <class T>
+TracedGlobal<T>& TracedGlobal<T>::operator=(const TracedGlobal& rhs) {
+ if (this != &rhs) {
+ this->Reset();
+ if (rhs.val_ != nullptr) {
+ V8::CopyTracedGlobalReference(
+ reinterpret_cast<const internal::Address* const*>(&rhs.val_),
+ reinterpret_cast<internal::Address**>(&this->val_));
+ }
+ }
+ return *this;
+}
+
template <class T>
void TracedGlobal<T>::SetWrapperClassId(uint16_t class_id) {
typedef internal::Internals I;
if (IsEmpty()) return;
- internal::Address* obj = reinterpret_cast<internal::Address*>(this->val_);
+ internal::Address* obj = reinterpret_cast<internal::Address*>(**this);
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + I::kNodeClassIdOffset;
*reinterpret_cast<uint16_t*>(addr) = class_id;
}
@@ -10157,7 +10395,7 @@ template <class T>
uint16_t TracedGlobal<T>::WrapperClassId() const {
typedef internal::Internals I;
if (IsEmpty()) return 0;
- internal::Address* obj = reinterpret_cast<internal::Address*>(this->val_);
+ internal::Address* obj = reinterpret_cast<internal::Address*>(**this);
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + I::kNodeClassIdOffset;
return *reinterpret_cast<uint16_t*>(addr);
}
@@ -10166,7 +10404,7 @@ template <class T>
void TracedGlobal<T>::SetFinalizationCallback(
void* parameter, typename WeakCallbackInfo<void>::Callback callback) {
V8::SetFinalizationCallbackTraced(
- reinterpret_cast<internal::Address*>(this->val_), parameter, callback);
+ reinterpret_cast<internal::Address*>(**this), parameter, callback);
}
template <typename T>
@@ -11111,9 +11349,12 @@ int64_t Isolate::AdjustAmountOfExternalAllocatedMemory(
}
if (change_in_bytes < 0) {
- const int64_t lower_limit = *external_memory_limit + change_in_bytes;
- if (lower_limit > I::kExternalAllocationSoftLimit)
+ const int64_t lower_limit =
+ static_cast<int64_t>(static_cast<uint64_t>(*external_memory_limit) +
+ static_cast<uint64_t>(change_in_bytes));
+ if (lower_limit > I::kExternalAllocationSoftLimit) {
*external_memory_limit = lower_limit;
+ }
} else if (change_in_bytes > 0 && amount > *external_memory_limit) {
ReportExternalAllocationLimitReached();
}
diff --git a/deps/v8/include/v8config.h b/deps/v8/include/v8config.h
index 7bd2938225bc..7670c0e449c7 100644
--- a/deps/v8/include/v8config.h
+++ b/deps/v8/include/v8config.h
@@ -186,6 +186,8 @@
// V8_HAS_BUILTIN_SADD_OVERFLOW - __builtin_sadd_overflow() supported
// V8_HAS_BUILTIN_SSUB_OVERFLOW - __builtin_ssub_overflow() supported
// V8_HAS_BUILTIN_UADD_OVERFLOW - __builtin_uadd_overflow() supported
+// V8_HAS_COMPUTED_GOTO - computed goto/labels as values
+// supported
// V8_HAS_DECLSPEC_DEPRECATED - __declspec(deprecated) supported
// V8_HAS_DECLSPEC_NOINLINE - __declspec(noinline) supported
// V8_HAS_DECLSPEC_SELECTANY - __declspec(selectany) supported
@@ -214,6 +216,7 @@
# define V8_HAS_ATTRIBUTE_WARN_UNUSED_RESULT \
(__has_attribute(warn_unused_result))
+# define V8_HAS_BUILTIN_ASSUME_ALIGNED (__has_builtin(__builtin_assume_aligned))
# define V8_HAS_BUILTIN_BSWAP16 (__has_builtin(__builtin_bswap16))
# define V8_HAS_BUILTIN_BSWAP32 (__has_builtin(__builtin_bswap32))
# define V8_HAS_BUILTIN_BSWAP64 (__has_builtin(__builtin_bswap64))
@@ -226,6 +229,10 @@
# define V8_HAS_BUILTIN_SSUB_OVERFLOW (__has_builtin(__builtin_ssub_overflow))
# define V8_HAS_BUILTIN_UADD_OVERFLOW (__has_builtin(__builtin_uadd_overflow))
+// Clang has no __has_feature for computed gotos.
+// GCC doc: https://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html
+# define V8_HAS_COMPUTED_GOTO 1
+
# if __cplusplus >= 201402L
# define V8_CAN_HAVE_DCHECK_IN_CONSTEXPR 1
# endif
@@ -256,12 +263,16 @@
# define V8_HAS_ATTRIBUTE_WARN_UNUSED_RESULT \
(!V8_CC_INTEL && V8_GNUC_PREREQ(4, 1, 0))
+# define V8_HAS_BUILTIN_ASSUME_ALIGNED (V8_GNUC_PREREQ(4, 7, 0))
# define V8_HAS_BUILTIN_CLZ (V8_GNUC_PREREQ(3, 4, 0))
# define V8_HAS_BUILTIN_CTZ (V8_GNUC_PREREQ(3, 4, 0))
# define V8_HAS_BUILTIN_EXPECT (V8_GNUC_PREREQ(2, 96, 0))
# define V8_HAS_BUILTIN_FRAME_ADDRESS (V8_GNUC_PREREQ(2, 96, 0))
# define V8_HAS_BUILTIN_POPCOUNT (V8_GNUC_PREREQ(3, 4, 0))
+// GCC doc: https://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html
+#define V8_HAS_COMPUTED_GOTO (V8_GNUC_PREREQ(2, 0, 0))
+
#endif
#if defined(_MSC_VER)
@@ -291,6 +302,12 @@
# define V8_INLINE inline
#endif
+#if V8_HAS_BUILTIN_ASSUME_ALIGNED
+# define V8_ASSUME_ALIGNED(ptr, alignment) \
+ __builtin_assume_aligned((ptr), (alignment))
+#else
+# define V8_ASSUME_ALIGNED(ptr) (ptr)
+#endif
// A macro used to tell the compiler to never inline a particular function.
// Don't bother for debug builds.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment