1 // Copyright 2016 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 // Note 1: Any file that includes this one should include object-macros-undef.h 6 // at the bottom. 7 8 // Note 2: This file is deliberately missing the include guards (the undeffing 9 // approach wouldn't work otherwise). 10 // 11 // PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD 12 13 // The accessors with RELAXED_, ACQUIRE_, and RELEASE_ prefixes should be used 14 // for fields that can be written to and read from multiple threads at the same 15 // time. See comments in src/base/atomicops.h for the memory ordering sematics. 16 17 #include "src/base/memory.h" 18 19 // Since this changes visibility, it should always be last in a class 20 // definition. 21 #define OBJECT_CONSTRUCTORS(Type, ...) \ 22 public: \ 23 constexpr Type() : __VA_ARGS__() {} \ 24 \ 25 protected: \ 26 template <typename TFieldType, int kFieldOffset> \ 27 friend class TaggedField; \ 28 \ 29 explicit inline Type(Address ptr) 30 31 #define OBJECT_CONSTRUCTORS_IMPL(Type, Super) \ 32 inline Type::Type(Address ptr) : Super(ptr) { SLOW_DCHECK(Is##Type()); } 33 // In these cases, we don't have our own instance type to check, so check the 34 // supertype instead. This happens for types denoting a NativeContext-dependent 35 // set of maps. 36 #define OBJECT_CONSTRUCTORS_IMPL_CHECK_SUPER(Type, Super) \ 37 inline Type::Type(Address ptr) : Super(ptr) { SLOW_DCHECK(Is##Super()); } 38 39 #define NEVER_READ_ONLY_SPACE \ 40 inline Heap* GetHeap() const; \ 41 inline Isolate* GetIsolate() const; 42 43 // TODO(leszeks): Add checks in the factory that we never allocate these 44 // objects in RO space. 45 #define NEVER_READ_ONLY_SPACE_IMPL(Type) \ 46 Heap* Type::GetHeap() const { return GetHeapFromWritableObject(*this); } \ 47 Isolate* Type::GetIsolate() const { \ 48 return GetIsolateFromWritableObject(*this); \ 49 } 50 51 #define DECL_PRIMITIVE_ACCESSORS(name, type) \ 52 inline type name() const; \ 53 inline void set_##name(type value); 54 55 #define DECL_SYNCHRONIZED_PRIMITIVE_ACCESSORS(name, type) \ 56 inline type synchronized_##name() const; \ 57 inline void synchronized_set_##name(type value); 58 59 #define DECL_BOOLEAN_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, bool) 60 61 #define DECL_INT_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, int) 62 63 #define DECL_SYNCHRONIZED_INT_ACCESSORS(name) \ 64 DECL_SYNCHRONIZED_PRIMITIVE_ACCESSORS(name, int) 65 66 #define DECL_INT32_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, int32_t) 67 68 #define DECL_UINT16_ACCESSORS(name) \ 69 inline uint16_t name() const; \ 70 inline void set_##name(int value); 71 72 #define DECL_INT16_ACCESSORS(name) \ 73 inline int16_t name() const; \ 74 inline void set_##name(int16_t value); 75 76 #define DECL_UINT8_ACCESSORS(name) \ 77 inline uint8_t name() const; \ 78 inline void set_##name(int value); 79 80 // TODO(ishell): eventually isolate-less getters should not be used anymore. 81 // For full pointer-mode the C++ compiler should optimize away unused isolate 82 // parameter. 83 #define DECL_GETTER(name, type) \ 84 inline type name() const; \ 85 inline type name(IsolateRoot isolate) const; 86 87 #define DEF_GETTER(holder, name, type) \ 88 type holder::name() const { \ 89 IsolateRoot isolate = GetIsolateForPtrCompr(*this); \ 90 return holder::name(isolate); \ 91 } \ 92 type holder::name(IsolateRoot isolate) const 93 94 #define DECL_ACCESSORS(name, type) \ 95 DECL_GETTER(name, type) \ 96 inline void set_##name(type value, \ 97 WriteBarrierMode mode = UPDATE_WRITE_BARRIER); 98 99 #define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \ 100 inline type name(tag_type tag) const; \ 101 inline type name(IsolateRoot isolate, tag_type) const; 102 103 #define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \ 104 inline void set_##name(type value, tag_type, \ 105 WriteBarrierMode mode = UPDATE_WRITE_BARRIER); 106 107 #define DECL_RELAXED_GETTER(name, type) \ 108 DECL_ACCESSORS_LOAD_TAG(name, type, RelaxedLoadTag) 109 110 #define DECL_RELAXED_SETTER(name, type) \ 111 DECL_ACCESSORS_STORE_TAG(name, type, RelaxedStoreTag) 112 113 #define DECL_RELAXED_ACCESSORS(name, type) \ 114 DECL_RELAXED_GETTER(name, type) \ 115 DECL_RELAXED_SETTER(name, type) 116 117 #define DECL_ACQUIRE_GETTER(name, type) \ 118 DECL_ACCESSORS_LOAD_TAG(name, type, AcquireLoadTag) 119 120 #define DECL_RELEASE_SETTER(name, type) \ 121 DECL_ACCESSORS_STORE_TAG(name, type, ReleaseStoreTag) 122 123 #define DECL_RELEASE_ACQUIRE_ACCESSORS(name, type) \ 124 DECL_ACQUIRE_GETTER(name, type) \ 125 DECL_RELEASE_SETTER(name, type) 126 127 #define DECL_CAST(Type) \ 128 V8_INLINE static Type cast(Object object); \ 129 V8_INLINE static Type unchecked_cast(Object object) { \ 130 return bit_cast<Type>(object); \ 131 } 132 133 #define CAST_ACCESSOR(Type) \ 134 Type Type::cast(Object object) { return Type(object.ptr()); } 135 136 #define INT_ACCESSORS(holder, name, offset) \ 137 int holder::name() const { return ReadField<int>(offset); } \ 138 void holder::set_##name(int value) { WriteField<int>(offset, value); } 139 140 #define INT32_ACCESSORS(holder, name, offset) \ 141 int32_t holder::name() const { return ReadField<int32_t>(offset); } \ 142 void holder::set_##name(int32_t value) { WriteField<int32_t>(offset, value); } 143 144 #define RELAXED_INT32_ACCESSORS(holder, name, offset) \ 145 int32_t holder::name() const { \ 146 return RELAXED_READ_INT32_FIELD(*this, offset); \ 147 } \ 148 void holder::set_##name(int32_t value) { \ 149 RELAXED_WRITE_INT32_FIELD(*this, offset, value); \ 150 } 151 152 #define UINT16_ACCESSORS(holder, name, offset) \ 153 uint16_t holder::name() const { return ReadField<uint16_t>(offset); } \ 154 void holder::set_##name(int value) { \ 155 DCHECK_GE(value, 0); \ 156 DCHECK_LE(value, static_cast<uint16_t>(-1)); \ 157 WriteField<uint16_t>(offset, value); \ 158 } 159 160 #define UINT8_ACCESSORS(holder, name, offset) \ 161 uint8_t holder::name() const { return ReadField<uint8_t>(offset); } \ 162 void holder::set_##name(int value) { \ 163 DCHECK_GE(value, 0); \ 164 DCHECK_LE(value, static_cast<uint8_t>(-1)); \ 165 WriteField<uint8_t>(offset, value); \ 166 } 167 168 #define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \ 169 set_condition) \ 170 DEF_GETTER(holder, name, type) { \ 171 type value = TaggedField<type, offset>::load(isolate, *this); \ 172 DCHECK(get_condition); \ 173 return value; \ 174 } \ 175 void holder::set_##name(type value, WriteBarrierMode mode) { \ 176 DCHECK(set_condition); \ 177 TaggedField<type, offset>::store(*this, value); \ 178 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \ 179 } 180 181 #define ACCESSORS_CHECKED(holder, name, type, offset, condition) \ 182 ACCESSORS_CHECKED2(holder, name, type, offset, condition, condition) 183 184 #define ACCESSORS(holder, name, type, offset) \ 185 ACCESSORS_CHECKED(holder, name, type, offset, true) 186 187 #define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \ 188 set_condition) \ 189 type holder::name(RelaxedLoadTag tag) const { \ 190 IsolateRoot isolate = GetIsolateForPtrCompr(*this); \ 191 return holder::name(isolate, tag); \ 192 } \ 193 type holder::name(IsolateRoot isolate, RelaxedLoadTag) const { \ 194 type value = TaggedField<type, offset>::load(isolate, *this); \ 195 DCHECK(get_condition); \ 196 return value; \ 197 } \ 198 void holder::set_##name(type value, RelaxedStoreTag, \ 199 WriteBarrierMode mode) { \ 200 DCHECK(set_condition); \ 201 TaggedField<type, offset>::store(*this, value); \ 202 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \ 203 } 204 205 #define RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, condition) \ 206 RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, condition, condition) 207 208 #define RELAXED_ACCESSORS(holder, name, type, offset) \ 209 RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, true) 210 211 #define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \ 212 get_condition, set_condition) \ 213 type holder::name(AcquireLoadTag tag) const { \ 214 IsolateRoot isolate = GetIsolateForPtrCompr(*this); \ 215 return holder::name(isolate, tag); \ 216 } \ 217 type holder::name(IsolateRoot isolate, AcquireLoadTag) const { \ 218 type value = TaggedField<type, offset>::Acquire_Load(isolate, *this); \ 219 DCHECK(get_condition); \ 220 return value; \ 221 } \ 222 void holder::set_##name(type value, ReleaseStoreTag, \ 223 WriteBarrierMode mode) { \ 224 DCHECK(set_condition); \ 225 TaggedField<type, offset>::Release_Store(*this, value); \ 226 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \ 227 } 228 229 #define RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, \ 230 condition) \ 231 RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, condition, \ 232 condition) 233 234 #define RELEASE_ACQUIRE_ACCESSORS(holder, name, type, offset) \ 235 RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, true) 236 237 #define WEAK_ACCESSORS_CHECKED2(holder, name, offset, get_condition, \ 238 set_condition) \ 239 DEF_GETTER(holder, name, MaybeObject) { \ 240 MaybeObject value = \ 241 TaggedField<MaybeObject, offset>::load(isolate, *this); \ 242 DCHECK(get_condition); \ 243 return value; \ 244 } \ 245 void holder::set_##name(MaybeObject value, WriteBarrierMode mode) { \ 246 DCHECK(set_condition); \ 247 TaggedField<MaybeObject, offset>::store(*this, value); \ 248 CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \ 249 } 250 251 #define WEAK_ACCESSORS_CHECKED(holder, name, offset, condition) \ 252 WEAK_ACCESSORS_CHECKED2(holder, name, offset, condition, condition) 253 254 #define WEAK_ACCESSORS(holder, name, offset) \ 255 WEAK_ACCESSORS_CHECKED(holder, name, offset, true) 256 257 #define SYNCHRONIZED_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \ 258 get_condition, set_condition) \ 259 DEF_GETTER(holder, name, MaybeObject) { \ 260 MaybeObject value = \ 261 TaggedField<MaybeObject, offset>::Acquire_Load(isolate, *this); \ 262 DCHECK(get_condition); \ 263 return value; \ 264 } \ 265 void holder::set_##name(MaybeObject value, WriteBarrierMode mode) { \ 266 DCHECK(set_condition); \ 267 TaggedField<MaybeObject, offset>::Release_Store(*this, value); \ 268 CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \ 269 } 270 271 #define SYNCHRONIZED_WEAK_ACCESSORS_CHECKED(holder, name, offset, condition) \ 272 SYNCHRONIZED_WEAK_ACCESSORS_CHECKED2(holder, name, offset, condition, \ 273 condition) 274 275 #define SYNCHRONIZED_WEAK_ACCESSORS(holder, name, offset) \ 276 SYNCHRONIZED_WEAK_ACCESSORS_CHECKED(holder, name, offset, true) 277 278 // Getter that returns a Smi as an int and writes an int as a Smi. 279 #define SMI_ACCESSORS_CHECKED(holder, name, offset, condition) \ 280 int holder::name() const { \ 281 DCHECK(condition); \ 282 Smi value = TaggedField<Smi, offset>::load(*this); \ 283 return value.value(); \ 284 } \ 285 void holder::set_##name(int value) { \ 286 DCHECK(condition); \ 287 TaggedField<Smi, offset>::store(*this, Smi::FromInt(value)); \ 288 } 289 290 #define SMI_ACCESSORS(holder, name, offset) \ 291 SMI_ACCESSORS_CHECKED(holder, name, offset, true) 292 293 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \ 294 int holder::synchronized_##name() const { \ 295 Smi value = TaggedField<Smi, offset>::Acquire_Load(*this); \ 296 return value.value(); \ 297 } \ 298 void holder::synchronized_set_##name(int value) { \ 299 TaggedField<Smi, offset>::Release_Store(*this, Smi::FromInt(value)); \ 300 } 301 302 #define RELAXED_SMI_ACCESSORS(holder, name, offset) \ 303 int holder::relaxed_read_##name() const { \ 304 Smi value = TaggedField<Smi, offset>::Relaxed_Load(*this); \ 305 return value.value(); \ 306 } \ 307 void holder::relaxed_write_##name(int value) { \ 308 TaggedField<Smi, offset>::Relaxed_Store(*this, Smi::FromInt(value)); \ 309 } 310 311 #define BOOL_GETTER(holder, field, name, offset) \ 312 bool holder::name() const { return BooleanBit::get(field(), offset); } 313 314 #define BOOL_ACCESSORS(holder, field, name, offset) \ 315 bool holder::name() const { return BooleanBit::get(field(), offset); } \ 316 void holder::set_##name(bool value) { \ 317 set_##field(BooleanBit::set(field(), offset, value)); \ 318 } 319 320 #define BIT_FIELD_ACCESSORS(holder, field, name, BitField) \ 321 typename BitField::FieldType holder::name() const { \ 322 return BitField::decode(field()); \ 323 } \ 324 void holder::set_##name(typename BitField::FieldType value) { \ 325 set_##field(BitField::update(field(), value)); \ 326 } 327 328 #define INSTANCE_TYPE_CHECKER(type, forinstancetype) \ 329 V8_INLINE bool Is##type(InstanceType instance_type) { \ 330 return instance_type == forinstancetype; \ 331 } 332 333 #define TYPE_CHECKER(type, ...) \ 334 DEF_GETTER(HeapObject, Is##type, bool) { \ 335 return InstanceTypeChecker::Is##type(map(isolate).instance_type()); \ 336 } 337 338 #define RELAXED_INT16_ACCESSORS(holder, name, offset) \ 339 int16_t holder::name() const { \ 340 return RELAXED_READ_INT16_FIELD(*this, offset); \ 341 } \ 342 void holder::set_##name(int16_t value) { \ 343 RELAXED_WRITE_INT16_FIELD(*this, offset, value); \ 344 } 345 346 #define FIELD_ADDR(p, offset) ((p).ptr() + offset - kHeapObjectTag) 347 348 #define ACQUIRE_READ_FIELD(p, offset) \ 349 TaggedField<Object>::Acquire_Load(p, offset) 350 351 #define RELAXED_READ_FIELD(p, offset) \ 352 TaggedField<Object>::Relaxed_Load(p, offset) 353 354 #define RELAXED_READ_WEAK_FIELD(p, offset) \ 355 TaggedField<MaybeObject>::Relaxed_Load(p, offset) 356 357 #define WRITE_FIELD(p, offset, value) \ 358 TaggedField<Object>::store(p, offset, value) 359 360 #define RELEASE_WRITE_FIELD(p, offset, value) \ 361 TaggedField<Object>::Release_Store(p, offset, value) 362 363 #define RELAXED_WRITE_FIELD(p, offset, value) \ 364 TaggedField<Object>::Relaxed_Store(p, offset, value) 365 366 #define RELAXED_WRITE_WEAK_FIELD(p, offset, value) \ 367 TaggedField<MaybeObject>::Relaxed_Store(p, offset, value) 368 369 #ifdef V8_DISABLE_WRITE_BARRIERS 370 #define WRITE_BARRIER(object, offset, value) 371 #else 372 #define WRITE_BARRIER(object, offset, value) \ 373 do { \ 374 DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \ 375 WriteBarrier::Marking(object, (object).RawField(offset), value); \ 376 GenerationalBarrier(object, (object).RawField(offset), value); \ 377 } while (false) 378 #endif 379 380 #ifdef V8_DISABLE_WRITE_BARRIERS 381 #define WEAK_WRITE_BARRIER(object, offset, value) 382 #else 383 #define WEAK_WRITE_BARRIER(object, offset, value) \ 384 do { \ 385 DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \ 386 WriteBarrier::Marking(object, (object).RawMaybeWeakField(offset), value); \ 387 GenerationalBarrier(object, (object).RawMaybeWeakField(offset), value); \ 388 } while (false) 389 #endif 390 391 #ifdef V8_DISABLE_WRITE_BARRIERS 392 #define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value) 393 #elif V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS 394 #define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value) \ 395 WRITE_BARRIER(object, offset, value) 396 #else 397 #define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value) \ 398 do { \ 399 DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \ 400 EphemeronHashTable table = EphemeronHashTable::cast(object); \ 401 WriteBarrier::Marking(object, (object).RawField(offset), value); \ 402 GenerationalEphemeronKeyBarrier(table, (object).RawField(offset), value); \ 403 } while (false) 404 #endif 405 406 #ifdef V8_DISABLE_WRITE_BARRIERS 407 #define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) 408 #elif V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS 409 #define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \ 410 WRITE_BARRIER(object, offset, value) 411 #else 412 #define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \ 413 do { \ 414 DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \ 415 DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \ 416 if (mode != SKIP_WRITE_BARRIER) { \ 417 if (mode == UPDATE_WRITE_BARRIER) { \ 418 WriteBarrier::Marking(object, (object).RawField(offset), value); \ 419 } \ 420 GenerationalBarrier(object, (object).RawField(offset), value); \ 421 } \ 422 } while (false) 423 #endif 424 425 #ifdef V8_DISABLE_WRITE_BARRIERS 426 #define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) 427 #elif V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS 428 #define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \ 429 WRITE_BARRIER(object, offset, value) 430 #else 431 #define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \ 432 do { \ 433 DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \ 434 DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \ 435 if (mode != SKIP_WRITE_BARRIER) { \ 436 if (mode == UPDATE_WRITE_BARRIER) { \ 437 WriteBarrier::Marking(object, (object).RawMaybeWeakField(offset), \ 438 value); \ 439 } \ 440 GenerationalBarrier(object, (object).RawMaybeWeakField(offset), value); \ 441 } \ 442 } while (false) 443 #endif 444 445 #ifdef V8_DISABLE_WRITE_BARRIERS 446 #define CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(object, offset, value, mode) 447 #else 448 #define CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(object, offset, value, mode) \ 449 do { \ 450 DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \ 451 DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \ 452 EphemeronHashTable table = EphemeronHashTable::cast(object); \ 453 if (mode != SKIP_WRITE_BARRIER) { \ 454 if (mode == UPDATE_WRITE_BARRIER) { \ 455 WriteBarrier::Marking(object, (object).RawField(offset), value); \ 456 } \ 457 GenerationalEphemeronKeyBarrier(table, (object).RawField(offset), \ 458 value); \ 459 } \ 460 } while (false) 461 #endif 462 463 #define ACQUIRE_READ_INT32_FIELD(p, offset) \ 464 static_cast<int32_t>(base::Acquire_Load( \ 465 reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset)))) 466 467 #define RELAXED_WRITE_INT8_FIELD(p, offset, value) \ 468 base::Relaxed_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \ 469 static_cast<base::Atomic8>(value)); 470 #define RELAXED_READ_INT8_FIELD(p, offset) \ 471 static_cast<int8_t>(base::Relaxed_Load( \ 472 reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset)))) 473 474 #define RELAXED_READ_INT16_FIELD(p, offset) \ 475 static_cast<int16_t>(base::Relaxed_Load( \ 476 reinterpret_cast<const base::Atomic16*>(FIELD_ADDR(p, offset)))) 477 478 #define RELAXED_WRITE_INT16_FIELD(p, offset, value) \ 479 base::Relaxed_Store( \ 480 reinterpret_cast<base::Atomic16*>(FIELD_ADDR(p, offset)), \ 481 static_cast<base::Atomic16>(value)); 482 483 #define RELAXED_READ_UINT32_FIELD(p, offset) \ 484 static_cast<uint32_t>(base::Relaxed_Load( \ 485 reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset)))) 486 487 #define RELAXED_WRITE_UINT32_FIELD(p, offset, value) \ 488 base::Relaxed_Store( \ 489 reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \ 490 static_cast<base::Atomic32>(value)); 491 492 #define RELAXED_READ_INT32_FIELD(p, offset) \ 493 static_cast<int32_t>(base::Relaxed_Load( \ 494 reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset)))) 495 496 #define RELEASE_WRITE_INT32_FIELD(p, offset, value) \ 497 base::Release_Store( \ 498 reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \ 499 static_cast<base::Atomic32>(value)) 500 501 #define RELAXED_WRITE_INT32_FIELD(p, offset, value) \ 502 base::Relaxed_Store( \ 503 reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \ 504 static_cast<base::Atomic32>(value)); 505 506 #define RELAXED_READ_BYTE_FIELD(p, offset) \ 507 static_cast<byte>(base::Relaxed_Load( \ 508 reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset)))) 509 510 #define RELAXED_WRITE_BYTE_FIELD(p, offset, value) \ 511 base::Relaxed_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \ 512 static_cast<base::Atomic8>(value)); 513 514 #ifdef OBJECT_PRINT 515 #define DECL_PRINTER(Name) void Name##Print(std::ostream& os); // NOLINT 516 #else 517 #define DECL_PRINTER(Name) 518 #endif 519 520 #ifdef VERIFY_HEAP 521 #define DECL_VERIFIER(Name) void Name##Verify(Isolate* isolate); 522 #define EXPORT_DECL_VERIFIER(Name) \ 523 V8_EXPORT_PRIVATE void Name##Verify(Isolate* isolate); 524 #else 525 #define DECL_VERIFIER(Name) 526 #define EXPORT_DECL_VERIFIER(Name) 527 #endif 528 529 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \ 530 type DeoptimizationData::name() const { \ 531 return type::cast(get(k##name##Index)); \ 532 } \ 533 void DeoptimizationData::Set##name(type value) { set(k##name##Index, value); } 534 535 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \ 536 type DeoptimizationData::name(int i) const { \ 537 return type::cast(get(IndexForEntry(i) + k##name##Offset)); \ 538 } \ 539 void DeoptimizationData::Set##name(int i, type value) { \ 540 set(IndexForEntry(i) + k##name##Offset, value); \ 541 } 542 543 #define TQ_OBJECT_CONSTRUCTORS(Type) \ 544 public: \ 545 constexpr Type() = default; \ 546 \ 547 protected: \ 548 template <typename TFieldType, int kFieldOffset> \ 549 friend class TaggedField; \ 550 \ 551 inline explicit Type(Address ptr); \ 552 friend class TorqueGenerated##Type<Type, Super>; 553 554 #define TQ_OBJECT_CONSTRUCTORS_IMPL(Type) \ 555 inline Type::Type(Address ptr) \ 556 : TorqueGenerated##Type<Type, Type::Super>(ptr) {} 557