Lines Matching refs:offset
135 int32_t offset, in CanSplitLoadStoreOffset() argument
138 int32_t other_bits = offset & ~allowed_offset_bits; in CanSplitLoadStoreOffset()
140 *add_to_base = offset & ~allowed_offset_bits; in CanSplitLoadStoreOffset()
141 *offset_for_load_store = offset & allowed_offset_bits; in CanSplitLoadStoreOffset()
150 int32_t offset) { in AdjustLoadStoreOffset() argument
151 DCHECK_NE(offset & ~allowed_offset_bits, 0); in AdjustLoadStoreOffset()
153 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) { in AdjustLoadStoreOffset()
157 ___ Mov(temp, offset); in AdjustLoadStoreOffset()
204 static bool CanHoldLoadOffsetThumb(LoadOperandType type, int offset) { in CanHoldLoadOffsetThumb() argument
211 return IsAbsoluteUint<12>(offset); in CanHoldLoadOffsetThumb()
214 return IsAbsoluteUint<10>(offset) && IsAligned<4>(offset); // VFP addressing mode. in CanHoldLoadOffsetThumb()
216 return IsAbsoluteUint<10>(offset) && IsAligned<4>(offset); in CanHoldLoadOffsetThumb()
224 static bool CanHoldStoreOffsetThumb(StoreOperandType type, int offset) { in CanHoldStoreOffsetThumb() argument
229 return IsAbsoluteUint<12>(offset); in CanHoldStoreOffsetThumb()
232 return IsAbsoluteUint<10>(offset) && IsAligned<4>(offset); // VFP addressing mode. in CanHoldStoreOffsetThumb()
234 return IsAbsoluteUint<10>(offset) && IsAligned<4>(offset); in CanHoldStoreOffsetThumb()
247 int32_t offset) { in StoreToOffset() argument
251 if (!CanHoldStoreOffsetThumb(type, offset)) { in StoreToOffset()
267 offset += kRegisterSize; in StoreToOffset()
272 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(type), tmp_reg, base, offset); in StoreToOffset()
275 DCHECK(CanHoldStoreOffsetThumb(type, offset)); in StoreToOffset()
278 ___ Strb(reg, MemOperand(base, offset)); in StoreToOffset()
281 ___ Strh(reg, MemOperand(base, offset)); in StoreToOffset()
284 ___ Str(reg, MemOperand(base, offset)); in StoreToOffset()
287 ___ Strd(reg, vixl32::Register(reg.GetCode() + 1), MemOperand(base, offset)); in StoreToOffset()
305 int32_t offset) { in LoadFromOffset() argument
306 if (!CanHoldLoadOffsetThumb(type, offset)) { in LoadFromOffset()
310 DCHECK_NE(offset & ~allowed_offset_bits, 0); in LoadFromOffset()
312 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) { in LoadFromOffset()
316 offset = offset_for_load; in LoadFromOffset()
320 LoadImmediate(temp, offset); in LoadFromOffset()
325 offset = 0; in LoadFromOffset()
329 DCHECK(CanHoldLoadOffsetThumb(type, offset)); in LoadFromOffset()
332 ___ Ldrsb(dest, MemOperand(base, offset)); in LoadFromOffset()
335 ___ Ldrb(dest, MemOperand(base, offset)); in LoadFromOffset()
338 ___ Ldrsh(dest, MemOperand(base, offset)); in LoadFromOffset()
341 ___ Ldrh(dest, MemOperand(base, offset)); in LoadFromOffset()
345 ___ Ldr(dest, MemOperand(base, offset)); in LoadFromOffset()
348 ___ Ldrd(dest, vixl32::Register(dest.GetCode() + 1), MemOperand(base, offset)); in LoadFromOffset()
358 int32_t offset) { in StoreSToOffset() argument
359 ___ Vstr(source, MemOperand(base, offset)); in StoreSToOffset()
364 int32_t offset) { in StoreDToOffset() argument
365 ___ Vstr(source, MemOperand(base, offset)); in StoreDToOffset()
370 int32_t offset) { in LoadSFromOffset() argument
371 ___ Vldr(reg, MemOperand(base, offset)); in LoadSFromOffset()
376 int32_t offset) { in LoadDFromOffset() argument
377 ___ Vldr(reg, MemOperand(base, offset)); in LoadDFromOffset()