Lines Matching refs:reg
207 MOV(AL, 0, parts.count.reg, in scanline_core()
208 reg_imm(parts.count.reg, ROR, GGL_DITHER_ORDER_SHIFT)); in scanline_core()
209 ADD(AL, 0, parts.count.reg, parts.count.reg, in scanline_core()
211 MOV(AL, 0, parts.count.reg, in scanline_core()
212 reg_imm(parts.count.reg, ROR, 32 - GGL_DITHER_ORDER_SHIFT)); in scanline_core()
264 AND(AL, 0, parts.dither.reg, parts.count.reg, imm(mask)); in scanline_core()
265 ADDR_ADD(AL, 0, parts.dither.reg, ctxtReg, parts.dither.reg); in scanline_core()
266 LDRB(AL, parts.dither.reg, parts.dither.reg, in scanline_core()
291 if (pixel.reg == -1) { in scanline_core()
323 SUB(AL, S, parts.count.reg, parts.count.reg, imm(1<<16)); in scanline_core()
338 ADDR_ADD(AL, 0, parts.cbPtr.reg, parts.cbPtr.reg, imm(parts.cbPtr.size>>3)); in scanline_core()
340 SUB(AL, S, parts.count.reg, parts.count.reg, imm(1<<16)); in scanline_core()
361 CONTEXT_LOAD(parts.count.reg, iterators.xr); in build_scanline_prolog()
365 SUB(AL, 0, parts.count.reg, parts.count.reg, Rx); in build_scanline_prolog()
366 SUB(AL, 0, parts.count.reg, parts.count.reg, imm(1)); in build_scanline_prolog()
379 ORR(AL, 0, parts.count.reg, tx, reg_imm(parts.count.reg, LSL, 16)); in build_scanline_prolog()
383 MOV(AL, 0, parts.count.reg, reg_imm(parts.count.reg, LSL, 16)); in build_scanline_prolog()
393 CONTEXT_ADDR_LOAD(parts.cbPtr.reg, state.buffers.color.data); in build_scanline_prolog()
419 int ydzdy = parts.z.reg; in build_scanline_prolog()
422 MLA(AL, 0, parts.z.reg, Rx, dzdx, ydzdy); in build_scanline_prolog()
431 ADD(AL, 0, Rs, Rs, reg_imm(parts.count.reg, LSR, 16)); in build_scanline_prolog()
446 CONTEXT_ADDR_LOAD(parts.covPtr.reg, state.buffers.coverage); in build_scanline_prolog()
447 ADDR_ADD(AL, 0, parts.covPtr.reg, parts.covPtr.reg, reg_imm(Rx, LSL, 1)); in build_scanline_prolog()
554 MOV(AL, 0, fragment.reg, reg_imm(incoming.reg, LSR, incoming.l)); in build_incoming_component()
570 mAlphaSource.setTo(fragment.reg, in build_incoming_component()
573 MOV(AL, 0, mAlphaSource.reg, in build_incoming_component()
574 reg_imm(mAlphaSource.reg, LSR, shift)); in build_incoming_component()
582 MOV(AL, 0, mAlphaSource.reg, in build_incoming_component()
583 reg_imm(fragment.reg, LSR, shift)); in build_incoming_component()
585 MOV(AL, 0, mAlphaSource.reg, fragment.reg); in build_incoming_component()
639 int c = parts.argb[i].reg; in build_smooth_shade()
640 int dx = parts.argb_dx[i].reg; in build_smooth_shade()
681 LDRH(AL, cf, parts.covPtr.reg, immed8_post(2)); in build_coverage_application()
684 SMULWB(AL, fragment.reg, incoming.reg, cf); in build_coverage_application()
686 MOV(AL, 0, fragment.reg, reg_imm(incoming.reg, LSL, 1)); in build_coverage_application()
687 SMULWB(AL, fragment.reg, fragment.reg, cf); in build_coverage_application()
703 if (shift) CMP(AL, fragment.reg, reg_imm(ref, LSR, shift)); in build_alpha_test()
704 else CMP(AL, fragment.reg, ref); in build_alpha_test()
764 int z = parts.z.reg; in build_depth_test()
767 ADDR_SUB(AL, 0, zbase, zbase, reg_imm(parts.count.reg, LSR, 15)); in build_depth_test()
793 ADD(AL, 0, parts.z.reg, parts.z.reg, dzdx); in build_iterate_z()
824 pixel.reg = regs.obtain(); in build_logic_op()
830 case GGL_CLEAR: MOV(AL, 0, pixel.reg, imm(0)); break; in build_logic_op()
831 case GGL_AND: AND(AL, 0, pixel.reg, s.reg, d.reg); break; in build_logic_op()
832 case GGL_AND_REVERSE: BIC(AL, 0, pixel.reg, s.reg, d.reg); break; in build_logic_op()
834 case GGL_AND_INVERTED: BIC(AL, 0, pixel.reg, d.reg, s.reg); break; in build_logic_op()
835 case GGL_NOOP: MOV(AL, 0, pixel.reg, d.reg); break; in build_logic_op()
836 case GGL_XOR: EOR(AL, 0, pixel.reg, s.reg, d.reg); break; in build_logic_op()
837 case GGL_OR: ORR(AL, 0, pixel.reg, s.reg, d.reg); break; in build_logic_op()
838 case GGL_NOR: ORR(AL, 0, pixel.reg, s.reg, d.reg); in build_logic_op()
839 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
840 case GGL_EQUIV: EOR(AL, 0, pixel.reg, s.reg, d.reg); in build_logic_op()
841 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
842 case GGL_INVERT: MVN(AL, 0, pixel.reg, d.reg); break; in build_logic_op()
844 BIC(AL, 0, pixel.reg, d.reg, s.reg); in build_logic_op()
845 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
846 case GGL_COPY_INVERTED: MVN(AL, 0, pixel.reg, s.reg); break; in build_logic_op()
848 BIC(AL, 0, pixel.reg, s.reg, d.reg); in build_logic_op()
849 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
850 case GGL_NAND: AND(AL, 0, pixel.reg, s.reg, d.reg); in build_logic_op()
851 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
852 case GGL_SET: MVN(AL, 0, pixel.reg, imm(0)); break; in build_logic_op()
945 pixel.reg = regs.obtain(); in build_masking()
968 build_and_immediate(pixel.reg, s.reg, mask, fb.size()); in build_masking()
974 build_and_immediate(fb.reg, fb.reg, ~mask, fb.size()); in build_masking()
977 if (s.reg == fb.reg) { in build_masking()
979 if (s.reg == pixel.reg) { in build_masking()
982 MOV(AL, 0, pixel.reg, fb.reg); in build_masking()
985 ORR(AL, 0, pixel.reg, s.reg, fb.reg); in build_masking()
996 ADDR_ADD(AL, 0, d.reg, b.reg, reg_imm(o.reg, LSL, 2)); in base_offset()
999 if (d.reg == b.reg) { in base_offset()
1000 ADDR_ADD(AL, 0, d.reg, b.reg, reg_imm(o.reg, LSL, 1)); in base_offset()
1001 ADDR_ADD(AL, 0, d.reg, d.reg, o.reg); in base_offset()
1003 ADDR_ADD(AL, 0, d.reg, o.reg, reg_imm(o.reg, LSL, 1)); in base_offset()
1004 ADDR_ADD(AL, 0, d.reg, d.reg, b.reg); in base_offset()
1008 ADDR_ADD(AL, 0, d.reg, b.reg, reg_imm(o.reg, LSL, 1)); in base_offset()
1011 ADDR_ADD(AL, 0, d.reg, b.reg, o.reg); in base_offset()
1034 int RegisterAllocator::reserveReg(int reg) in reserveReg() argument
1036 return mRegs.reserve(reg); in reserveReg()
1044 void RegisterAllocator::recycleReg(int reg) in recycleReg() argument
1046 mRegs.recycle(reg); in recycleReg()
1095 int RegisterAllocator::RegisterFile::reserve(int reg) in reserve() argument
1097 reg += mRegisterOffset; in reserve()
1098 LOG_ALWAYS_FATAL_IF(isUsed(reg), in reserve()
1100 reg); in reserve()
1101 mRegs |= (1<<reg); in reserve()
1103 return reg; in reserve()
1113 int RegisterAllocator::RegisterFile::isUsed(int reg) const in isUsed()
1115 LOG_ALWAYS_FATAL_IF(reg>=16+(int)mRegisterOffset, "invalid register %d", reg); in isUsed()
1116 return mRegs & (1<<reg); in isUsed()
1126 int i, r, reg; in obtain() local
1142 reg = reserve(r); // Param in Arm range 0-15, returns range 2-17 on Mips. in obtain()
1143 return reg; in obtain()
1164 void RegisterAllocator::RegisterFile::recycle(int reg) in recycle() argument
1172 mRegs &= ~(1<<reg); in recycle()