Lines Matching refs:reg
208 MOV(AL, 0, parts.count.reg, in scanline_core()
209 reg_imm(parts.count.reg, ROR, GGL_DITHER_ORDER_SHIFT)); in scanline_core()
210 ADD(AL, 0, parts.count.reg, parts.count.reg, in scanline_core()
212 MOV(AL, 0, parts.count.reg, in scanline_core()
213 reg_imm(parts.count.reg, ROR, 32 - GGL_DITHER_ORDER_SHIFT)); in scanline_core()
265 AND(AL, 0, parts.dither.reg, parts.count.reg, imm(mask)); in scanline_core()
266 ADD(AL, 0, parts.dither.reg, parts.dither.reg, ctxtReg); in scanline_core()
267 LDRB(AL, parts.dither.reg, parts.dither.reg, in scanline_core()
292 if (pixel.reg == -1) { in scanline_core()
324 SUB(AL, S, parts.count.reg, parts.count.reg, imm(1<<16)); in scanline_core()
339 ADD(AL, 0, parts.cbPtr.reg, parts.cbPtr.reg, imm(parts.cbPtr.size>>3)); in scanline_core()
341 SUB(AL, S, parts.count.reg, parts.count.reg, imm(1<<16)); in scanline_core()
363 CONTEXT_LOAD(parts.count.reg, iterators.xr); in build_scanline_prolog()
367 SUB(AL, 0, parts.count.reg, parts.count.reg, Rx); in build_scanline_prolog()
368 SUB(AL, 0, parts.count.reg, parts.count.reg, imm(1)); in build_scanline_prolog()
381 ORR(AL, 0, parts.count.reg, tx, reg_imm(parts.count.reg, LSL, 16)); in build_scanline_prolog()
385 MOV(AL, 0, parts.count.reg, reg_imm(parts.count.reg, LSL, 16)); in build_scanline_prolog()
395 CONTEXT_LOAD(parts.cbPtr.reg, state.buffers.color.data); in build_scanline_prolog()
421 int ydzdy = parts.z.reg; in build_scanline_prolog()
424 MLA(AL, 0, parts.z.reg, Rx, dzdx, ydzdy); in build_scanline_prolog()
433 ADD(AL, 0, Rs, Rs, reg_imm(parts.count.reg, LSR, 16)); in build_scanline_prolog()
448 CONTEXT_LOAD(parts.covPtr.reg, state.buffers.coverage); in build_scanline_prolog()
449 ADD(AL, 0, parts.covPtr.reg, parts.covPtr.reg, reg_imm(Rx, LSL, 1)); in build_scanline_prolog()
556 MOV(AL, 0, fragment.reg, reg_imm(incoming.reg, LSR, incoming.l)); in build_incoming_component()
572 mAlphaSource.setTo(fragment.reg, in build_incoming_component()
575 MOV(AL, 0, mAlphaSource.reg, in build_incoming_component()
576 reg_imm(mAlphaSource.reg, LSR, shift)); in build_incoming_component()
584 MOV(AL, 0, mAlphaSource.reg, in build_incoming_component()
585 reg_imm(fragment.reg, LSR, shift)); in build_incoming_component()
587 MOV(AL, 0, mAlphaSource.reg, fragment.reg); in build_incoming_component()
641 int c = parts.argb[i].reg; in build_smooth_shade()
642 int dx = parts.argb_dx[i].reg; in build_smooth_shade()
683 LDRH(AL, cf, parts.covPtr.reg, immed8_post(2)); in build_coverage_application()
686 SMULWB(AL, fragment.reg, incoming.reg, cf); in build_coverage_application()
688 MOV(AL, 0, fragment.reg, reg_imm(incoming.reg, LSL, 1)); in build_coverage_application()
689 SMULWB(AL, fragment.reg, fragment.reg, cf); in build_coverage_application()
705 if (shift) CMP(AL, fragment.reg, reg_imm(ref, LSR, shift)); in build_alpha_test()
706 else CMP(AL, fragment.reg, ref); in build_alpha_test()
766 int z = parts.z.reg; in build_depth_test()
769 SUB(AL, 0, zbase, zbase, reg_imm(parts.count.reg, LSR, 15)); in build_depth_test()
795 ADD(AL, 0, parts.z.reg, parts.z.reg, dzdx); in build_iterate_z()
826 pixel.reg = regs.obtain(); in build_logic_op()
832 case GGL_CLEAR: MOV(AL, 0, pixel.reg, imm(0)); break; in build_logic_op()
833 case GGL_AND: AND(AL, 0, pixel.reg, s.reg, d.reg); break; in build_logic_op()
834 case GGL_AND_REVERSE: BIC(AL, 0, pixel.reg, s.reg, d.reg); break; in build_logic_op()
836 case GGL_AND_INVERTED: BIC(AL, 0, pixel.reg, d.reg, s.reg); break; in build_logic_op()
837 case GGL_NOOP: MOV(AL, 0, pixel.reg, d.reg); break; in build_logic_op()
838 case GGL_XOR: EOR(AL, 0, pixel.reg, s.reg, d.reg); break; in build_logic_op()
839 case GGL_OR: ORR(AL, 0, pixel.reg, s.reg, d.reg); break; in build_logic_op()
840 case GGL_NOR: ORR(AL, 0, pixel.reg, s.reg, d.reg); in build_logic_op()
841 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
842 case GGL_EQUIV: EOR(AL, 0, pixel.reg, s.reg, d.reg); in build_logic_op()
843 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
844 case GGL_INVERT: MVN(AL, 0, pixel.reg, d.reg); break; in build_logic_op()
846 BIC(AL, 0, pixel.reg, d.reg, s.reg); in build_logic_op()
847 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
848 case GGL_COPY_INVERTED: MVN(AL, 0, pixel.reg, s.reg); break; in build_logic_op()
850 BIC(AL, 0, pixel.reg, s.reg, d.reg); in build_logic_op()
851 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
852 case GGL_NAND: AND(AL, 0, pixel.reg, s.reg, d.reg); in build_logic_op()
853 MVN(AL, 0, pixel.reg, pixel.reg); break; in build_logic_op()
854 case GGL_SET: MVN(AL, 0, pixel.reg, imm(0)); break; in build_logic_op()
942 pixel.reg = regs.obtain(); in build_masking()
965 build_and_immediate(pixel.reg, s.reg, mask, fb.size()); in build_masking()
971 build_and_immediate(fb.reg, fb.reg, ~mask, fb.size()); in build_masking()
974 if (s.reg == fb.reg) { in build_masking()
976 if (s.reg == pixel.reg) { in build_masking()
979 MOV(AL, 0, pixel.reg, fb.reg); in build_masking()
982 ORR(AL, 0, pixel.reg, s.reg, fb.reg); in build_masking()
993 ADD(AL, 0, d.reg, b.reg, reg_imm(o.reg, LSL, 2)); in base_offset()
996 if (d.reg == b.reg) { in base_offset()
997 ADD(AL, 0, d.reg, b.reg, reg_imm(o.reg, LSL, 1)); in base_offset()
998 ADD(AL, 0, d.reg, d.reg, o.reg); in base_offset()
1000 ADD(AL, 0, d.reg, o.reg, reg_imm(o.reg, LSL, 1)); in base_offset()
1001 ADD(AL, 0, d.reg, d.reg, b.reg); in base_offset()
1005 ADD(AL, 0, d.reg, b.reg, reg_imm(o.reg, LSL, 1)); in base_offset()
1008 ADD(AL, 0, d.reg, b.reg, o.reg); in base_offset()
1031 int RegisterAllocator::reserveReg(int reg) in reserveReg() argument
1033 return mRegs.reserve(reg); in reserveReg()
1041 void RegisterAllocator::recycleReg(int reg) in recycleReg() argument
1043 mRegs.recycle(reg); in recycleReg()
1090 int RegisterAllocator::RegisterFile::reserve(int reg) in reserve() argument
1092 reg += mRegisterOffset; in reserve()
1093 LOG_ALWAYS_FATAL_IF(isUsed(reg), in reserve()
1095 reg); in reserve()
1096 mRegs |= (1<<reg); in reserve()
1098 return reg; in reserve()
1108 int RegisterAllocator::RegisterFile::isUsed(int reg) const in isUsed()
1110 LOG_ALWAYS_FATAL_IF(reg>=16+(int)mRegisterOffset, "invalid register %d", reg); in isUsed()
1111 return mRegs & (1<<reg); in isUsed()
1121 int i, r, reg; in obtain() local
1137 reg = reserve(r); // Param in Arm range 0-15, returns range 2-17 on Mips. in obtain()
1138 return reg; in obtain()
1159 void RegisterAllocator::RegisterFile::recycle(int reg) in recycle() argument
1167 mRegs &= ~(1<<reg); in recycle()