/external/v8/src/crankshaft/arm64/ |
D | delayed-masm-arm64.cc | 99 if ((pending_ == kLoad) && in Load() 124 pending_ = kLoad; in Load() 175 case kLoad: in EmitPending()
|
D | delayed-masm-arm64.h | 135 kLoad, kStore, enumerator
|
/external/v8/src/runtime/ |
D | runtime-classes.cc | 240 enum class SuperMode { kLoad, kStore }; enumerator 255 mode == SuperMode::kLoad ? MessageTemplate::kNonObjectPropertyLoad in GetSuperHolder() 272 GetSuperHolder(isolate, receiver, home_object, SuperMode::kLoad, name, 0), in LoadFromSuper() 287 GetSuperHolder(isolate, receiver, home_object, SuperMode::kLoad, in LoadElementFromSuper()
|
/external/v8/src/compiler/ |
D | access-info.cc | 51 case AccessMode::kLoad: in operator <<() 279 if (access_mode == AccessMode::kLoad && in ComputePropertyAccessInfo() 367 access_mode == AccessMode::kLoad in ComputePropertyAccessInfo() 381 if (access_mode == AccessMode::kLoad) { in ComputePropertyAccessInfo()
|
D | js-native-context-specialization.cc | 193 receiver_map, factory()->has_instance_symbol(), AccessMode::kLoad, in ReduceJSInstanceOf() 368 if (access_mode == AccessMode::kLoad) { in ReduceGlobalAccess() 520 return ReduceGlobalAccess(node, nullptr, nullptr, name, AccessMode::kLoad); in ReduceJSLoadGlobal() 875 AccessMode::kLoad, p.language_mode()); in ReduceJSLoadNamed() 1295 return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kLoad, in ReduceJSLoadProperty() 1333 DCHECK_EQ(AccessMode::kLoad, access_mode); in BuildPropertyAccess() 1351 case AccessMode::kLoad: { in BuildPropertyAccess() 1430 if (access_mode == AccessMode::kLoad) { in BuildPropertyAccess() 1484 if (access_mode == AccessMode::kLoad) { in BuildPropertyAccess() 1873 case AccessMode::kLoad: { in BuildElementAccess() [all …]
|
D | access-info.h | 30 enum class AccessMode { kLoad, kStore, kStoreInLiteral }; enumerator
|
D | machine-operator.cc | 38 DCHECK(IrOpcode::kLoad == op->opcode() || in LoadRepresentationOf() 462 IrOpcode::kLoad, \ 490 Load##Type##Operator kLoad##Type; \ 702 return &cache_.kLoad##Type; \
|
D | int64-lowering.cc | 142 case IrOpcode::kLoad: in LowerNode() 145 if (node->opcode() == IrOpcode::kLoad) { in LowerNode() 160 if (node->opcode() == IrOpcode::kLoad) { in LowerNode()
|
D | machine-graph-verifier.cc | 113 case IrOpcode::kLoad: in Run() 434 case IrOpcode::kLoad: in Run()
|
D | store-store-elimination.cc | 338 node->opcode() == IrOpcode::kLoad || in CannotObserveStoreField()
|
D | node.cc | 302 if (from->opcode() != IrOpcode::kLoad && in OwnedByAddressingOperand()
|
D | simd-scalar-lowering.cc | 278 case IrOpcode::kLoad: { in LowerNode()
|
D | memory-optimizer.cc | 94 case IrOpcode::kLoad: in VisitNode()
|
D | escape-analysis.cc | 1079 use->opcode() == IrOpcode::kLoad) && in IsEffectBranchPoint() 1219 node->opcode() != IrOpcode::kLoad && in ForwardVirtualState()
|
/external/skia/src/gpu/ |
D | GrGpuCommandBuffer.h | 40 kLoad, enumerator
|
D | GrRenderTargetOpList.cpp | 89 GrGpuCommandBuffer::LoadOp::kLoad, in create_command_buffer()
|
/external/v8/src/ |
D | perf-jit.cc | 64 kLoad = 0, enumerator 230 code_load.event_ = PerfJitCodeLoad::kLoad; in LogRecordedBuffer()
|
/external/v8/src/compiler/x64/ |
D | instruction-selector-x64.cc | 51 if (input->opcode() != IrOpcode::kLoad || in CanBeMemoryOperand() 1149 if (value->opcode() == IrOpcode::kLoad && CanCover(node, value)) { in VisitChangeInt32ToInt64() 1219 case IrOpcode::kLoad: { in ZeroExtendsWord32ToWord64() 1563 DCHECK(left->opcode() == IrOpcode::kLoad); in VisitCompareWithMemoryOperand() 1624 if (hint_node->opcode() == IrOpcode::kLoad) { in MachineTypeForNarrow() 1658 return node->opcode() == IrOpcode::kLoad ? LoadRepresentationOf(node->op()) in MachineTypeForNarrow()
|
/external/v8/src/compiler/ia32/ |
D | instruction-selector-ia32.cc | 32 if (input->opcode() != IrOpcode::kLoad || in CanBeMemoryOperand() 1149 DCHECK(left->opcode() == IrOpcode::kLoad); in VisitCompareWithMemoryOperand() 1210 if (hint_node->opcode() == IrOpcode::kLoad) { in MachineTypeForNarrow() 1244 return node->opcode() == IrOpcode::kLoad ? LoadRepresentationOf(node->op()) in MachineTypeForNarrow()
|
/external/v8/src/compiler/x87/ |
D | instruction-selector-x87.cc | 32 if (input->opcode() != IrOpcode::kLoad || in CanBeMemoryOperand() 1222 DCHECK(left->opcode() == IrOpcode::kLoad); in VisitCompareWithMemoryOperand() 1283 if (hint_node->opcode() == IrOpcode::kLoad) { in MachineTypeForNarrow() 1317 return node->opcode() == IrOpcode::kLoad ? LoadRepresentationOf(node->op()) in MachineTypeForNarrow()
|
/external/vixl/src/aarch64/ |
D | macro-assembler-aarch64.cc | 2235 LoadStoreCPURegListHelper(kLoad, registers, src); in LoadCPURegList() 2270 VIXL_ASSERT(op == kLoad); in LoadStoreCPURegListHelper() 2281 VIXL_ASSERT(op == kLoad); in LoadStoreCPURegListHelper() 2290 VIXL_ASSERT(op == kLoad); in LoadStoreCPURegListHelper()
|
/external/skia/src/gpu/vk/ |
D | GrVkGpuCommandBuffer.cpp | 28 case GrGpuCommandBuffer::LoadOp::kLoad: in get_vk_load_store_ops()
|
/external/v8/src/compiler/s390/ |
D | instruction-selector-s390.cc | 136 if (input->opcode() != IrOpcode::kLoad || in CanBeMemoryOperand() 374 case IrOpcode::kLoad: { in ZeroExtendsWord32ToWord64() 2152 case IrOpcode::kLoad: { in VisitWordCompareZero()
|
/external/v8/src/compiler/arm64/ |
D | instruction-selector-arm64.cc | 1644 if (value->opcode() == IrOpcode::kLoad && CanCover(node, value)) { in VisitChangeInt32ToInt64() 1708 case IrOpcode::kLoad: { in VisitChangeUint32ToUint64()
|
/external/v8/src/compiler/mips64/ |
D | instruction-selector-mips64.cc | 1305 if (value->opcode() == IrOpcode::kLoad && CanCover(node, value)) { in VisitChangeInt32ToInt64() 1345 case IrOpcode::kLoad: { in VisitChangeUint32ToUint64()
|