• Home
  • Raw
  • Download

Lines Matching refs:instr

30 scalar_possible(struct ir2_instr *instr)  in scalar_possible()  argument
32 if (instr->alu.scalar_opc == SCALAR_NONE) in scalar_possible()
35 return src_ncomp(instr) == 1; in scalar_possible()
59 alu_vector_prio(struct ir2_instr *instr) in alu_vector_prio() argument
61 if (instr->alu.vector_opc == VECTOR_NONE) in alu_vector_prio()
64 if (is_export(instr)) in alu_vector_prio()
68 if (instr->src_count == 3) in alu_vector_prio()
71 if (!scalar_possible(instr)) in alu_vector_prio()
74 return instr->src_count == 2 ? 2 : 3; in alu_vector_prio()
79 alu_scalar_prio(struct ir2_instr *instr) in alu_scalar_prio() argument
81 if (!scalar_possible(instr)) in alu_scalar_prio()
85 if (instr->src_count > 1) in alu_scalar_prio()
88 if (is_export(instr)) in alu_scalar_prio()
92 if (instr->alu.scalar_opc >= PRED_SETEs && in alu_scalar_prio()
93 instr->alu.scalar_opc <= PRED_SET_RESTOREs) in alu_scalar_prio()
97 return instr->alu.vector_opc == VECTOR_NONE ? 0 : 3; in alu_scalar_prio()
115 if (s->instr && s->instr->block_idx != block_idx) in insert()
121 if ((s->instr && s->instr->idx == src1.num) || in insert()
131 if (s->instr_s || s->instr->src_count == 3) in insert()
134 if (s->instr->type != IR2_ALU || s->instr->alu.export >= 0) in insert()
157 scalarize_case1(struct ir2_context *ctx, struct ir2_instr *instr, bool order) in scalarize_case1() argument
159 struct ir2_src src0 = instr->src[order]; in scalarize_case1()
160 struct ir2_src src1 = instr->src[!order]; in scalarize_case1()
186 if (reg->comp[i].ref_count != !!(instr->alu.write_mask & 1 << i)) in scalarize_case1()
190 sched = insert(ctx, instr->block_idx, reg->idx, src1, &comp); in scalarize_case1()
194 ins = &ctx->instr[idx = ctx->instr_count++]; in scalarize_case1()
206 ins->pred = instr->pred; in scalarize_case1()
207 ins->block_idx = instr->block_idx; in scalarize_case1()
209 instr->src[0] = src0; in scalarize_case1()
210 instr->alu.src1_swizzle = comp; in scalarize_case1()
227 ir2_foreach_instr (instr, ctx) { in sched_next()
228 if (!instr->need_emit) in sched_next()
230 if (is_export(instr)) in sched_next()
231 export = MIN2(export, export_buf(instr->alu.export)); in sched_next()
234 ir2_foreach_instr (instr, ctx) { in sched_next()
235 if (!instr->need_emit) in sched_next()
239 if (is_export(instr) && export_buf(instr->alu.export) != export) in sched_next()
243 block_idx = instr->block_idx; in sched_next()
244 else if (block_idx != instr->block_idx || /* must be same block */ in sched_next()
245 instr->type == IR2_CF || /* CF/MEM must be alone */ in sched_next()
246 (is_export(instr) && export == SQ_MEMORY)) in sched_next()
255 ir2_foreach_src (src, instr) { in sched_next()
264 if (!p->is_ssa && p->reg == reg && p->idx < instr->idx) in sched_next()
269 is_ok &= !ctx->instr[src->num].need_emit; in sched_next()
273 if (!instr->is_ssa) { in sched_next()
275 if (!p->need_emit || p->idx >= instr->idx) in sched_next()
279 if (get_reg_src(ctx, src) == instr->reg) in sched_next()
285 if (avail_count && instr->pred != avail[0]->pred) in sched_next()
291 avail[avail_count++] = instr; in sched_next()
300 ir2_foreach_avail (instr) { in sched_next()
301 if (instr->type == IR2_ALU) in sched_next()
304 ra_src_free(ctx, instr); in sched_next()
305 ra_reg(ctx, get_reg(instr), -1, false, 0); in sched_next()
307 instr->need_emit = false; in sched_next()
308 sched->instr = instr; in sched_next()
316 ir2_foreach_avail (instr) { in sched_next()
317 prio = alu_vector_prio(instr); in sched_next()
319 instr_v = instr; in sched_next()
326 ir2_foreach_avail (instr) { in sched_next()
327 bool compat = is_alu_compatible(instr_v, instr); in sched_next()
329 prio = alu_scalar_prio(instr); in sched_next()
334 instr_s = instr; in sched_next()
348 ir2_foreach_avail (instr) { in sched_next()
349 if (!is_alu_compatible(instr_v, instr) || !scalar_possible(instr)) in sched_next()
353 assert(instr->src_count == 2); in sched_next()
355 if (scalarize_case1(ctx, instr, 0)) { in sched_next()
356 instr_s = instr; in sched_next()
359 if (scalarize_case1(ctx, instr, 1)) { in sched_next()
360 instr_s = instr; in sched_next()
386 sched->instr = instr_v; in sched_next()
414 struct ir2_instr *instr = sched->instr, *tex_lod; in schedule_instrs() local
415 if (instr && instr->type == IR2_FETCH && instr->fetch.opc == TEX_FETCH && in schedule_instrs()
416 instr->src_count == 2) { in schedule_instrs()
418 tex_lod = &ctx->instr[ctx->instr_count++]; in schedule_instrs()
420 tex_lod->block_idx = instr->block_idx; in schedule_instrs()
421 tex_lod->pred = instr->pred; in schedule_instrs()
423 tex_lod->src[0] = instr->src[1]; in schedule_instrs()
427 sched->instr = tex_lod; in schedule_instrs()
432 ir2_foreach_instr (instr, ctx) in schedule_instrs()
433 free_block &= instr->block_idx != block_idx; in schedule_instrs()