/drivers/net/ethernet/huawei/hinic/ |
D | hinic_hw_csr.h | 30 #define HINIC_CSR_DMA_ATTR_ADDR(idx) \ argument 31 (HINIC_DMA_ATTR_BASE + (idx) * HINIC_DMA_ATTR_STRIDE) 36 #define HINIC_CSR_PPF_ELECTION_ADDR(idx) \ argument 37 (HINIC_ELECTION_BASE + (idx) * HINIC_PPF_ELECTION_STRIDE) 44 #define HINIC_CSR_API_CMD_CHAIN_HEAD_HI_ADDR(idx) \ argument 45 (HINIC_CSR_API_CMD_BASE + 0x0 + (idx) * HINIC_CSR_API_CMD_STRIDE) 47 #define HINIC_CSR_API_CMD_CHAIN_HEAD_LO_ADDR(idx) \ argument 48 (HINIC_CSR_API_CMD_BASE + 0x4 + (idx) * HINIC_CSR_API_CMD_STRIDE) 50 #define HINIC_CSR_API_CMD_STATUS_HI_ADDR(idx) \ argument 51 (HINIC_CSR_API_CMD_BASE + 0x8 + (idx) * HINIC_CSR_API_CMD_STRIDE) [all …]
|
/drivers/staging/ccree/ |
D | ssi_hash.c | 423 int idx = 0; in ssi_hash_digest() local 450 hw_desc_init(&desc[idx]); in ssi_hash_digest() 451 set_cipher_mode(&desc[idx], ctx->hw_mode); in ssi_hash_digest() 453 set_din_type(&desc[idx], DMA_DLLI, state->digest_buff_dma_addr, in ssi_hash_digest() 456 set_din_sram(&desc[idx], larval_digest_addr, in ssi_hash_digest() 459 set_flow_mode(&desc[idx], S_DIN_to_HASH); in ssi_hash_digest() 460 set_setup_mode(&desc[idx], SETUP_LOAD_STATE0); in ssi_hash_digest() 461 idx++; in ssi_hash_digest() 464 hw_desc_init(&desc[idx]); in ssi_hash_digest() 465 set_cipher_mode(&desc[idx], ctx->hw_mode); in ssi_hash_digest() [all …]
|
D | ssi_aead.c | 319 int idx = 0; in hmac_setkey() local 325 hw_desc_init(&desc[idx]); in hmac_setkey() 326 set_cipher_mode(&desc[idx], hash_mode); in hmac_setkey() 327 set_din_sram(&desc[idx], in hmac_setkey() 331 set_flow_mode(&desc[idx], S_DIN_to_HASH); in hmac_setkey() 332 set_setup_mode(&desc[idx], SETUP_LOAD_STATE0); in hmac_setkey() 333 idx++; in hmac_setkey() 336 hw_desc_init(&desc[idx]); in hmac_setkey() 337 set_cipher_mode(&desc[idx], hash_mode); in hmac_setkey() 338 set_din_const(&desc[idx], 0, HASH_LEN_SIZE); in hmac_setkey() [all …]
|
D | ssi_ivgen.c | 65 unsigned int idx = *iv_seq_len; in ssi_ivgen_generate_pool() local 72 hw_desc_init(&iv_seq[idx]); in ssi_ivgen_generate_pool() 73 set_din_sram(&iv_seq[idx], ivgen_ctx->ctr_key, AES_KEYSIZE_128); in ssi_ivgen_generate_pool() 74 set_setup_mode(&iv_seq[idx], SETUP_LOAD_KEY0); in ssi_ivgen_generate_pool() 75 set_cipher_config0(&iv_seq[idx], DESC_DIRECTION_ENCRYPT_ENCRYPT); in ssi_ivgen_generate_pool() 76 set_flow_mode(&iv_seq[idx], S_DIN_to_AES); in ssi_ivgen_generate_pool() 77 set_key_size_aes(&iv_seq[idx], CC_AES_128_BIT_KEY_SIZE); in ssi_ivgen_generate_pool() 78 set_cipher_mode(&iv_seq[idx], DRV_CIPHER_CTR); in ssi_ivgen_generate_pool() 79 idx++; in ssi_ivgen_generate_pool() 82 hw_desc_init(&iv_seq[idx]); in ssi_ivgen_generate_pool() [all …]
|
/drivers/net/can/sja1000/ |
D | sja1000_isa.c | 133 int idx = pdev->id; in sja1000_isa_probe() local 137 idx, port[idx], mem[idx], irq[idx]); in sja1000_isa_probe() 139 if (mem[idx]) { in sja1000_isa_probe() 140 if (!request_mem_region(mem[idx], iosize, DRV_NAME)) { in sja1000_isa_probe() 144 base = ioremap_nocache(mem[idx], iosize); in sja1000_isa_probe() 150 if (indirect[idx] > 0 || in sja1000_isa_probe() 151 (indirect[idx] == -1 && indirect[0] > 0)) in sja1000_isa_probe() 153 if (!request_region(port[idx], iosize, DRV_NAME)) { in sja1000_isa_probe() 166 dev->irq = irq[idx]; in sja1000_isa_probe() 168 if (mem[idx]) { in sja1000_isa_probe() [all …]
|
/drivers/net/can/cc770/ |
D | cc770_isa.c | 175 int idx = pdev->id; in cc770_isa_probe() local 180 idx, port[idx], mem[idx], irq[idx]); in cc770_isa_probe() 181 if (mem[idx]) { in cc770_isa_probe() 182 if (!request_mem_region(mem[idx], iosize, KBUILD_MODNAME)) { in cc770_isa_probe() 186 base = ioremap_nocache(mem[idx], iosize); in cc770_isa_probe() 192 if (indirect[idx] > 0 || in cc770_isa_probe() 193 (indirect[idx] == -1 && indirect[0] > 0)) in cc770_isa_probe() 195 if (!request_region(port[idx], iosize, KBUILD_MODNAME)) { in cc770_isa_probe() 208 dev->irq = irq[idx]; in cc770_isa_probe() 210 if (mem[idx]) { in cc770_isa_probe() [all …]
|
/drivers/net/ethernet/ti/ |
D | cpsw_ale.c | 70 int idx; in cpsw_ale_get_field() local 72 idx = start / 32; in cpsw_ale_get_field() 73 start -= idx * 32; in cpsw_ale_get_field() 74 idx = 2 - idx; /* flip */ in cpsw_ale_get_field() 75 return (ale_entry[idx] >> start) & BITMASK(bits); in cpsw_ale_get_field() 81 int idx; in cpsw_ale_set_field() local 84 idx = start / 32; in cpsw_ale_set_field() 85 start -= idx * 32; in cpsw_ale_set_field() 86 idx = 2 - idx; /* flip */ in cpsw_ale_set_field() 87 ale_entry[idx] &= ~(BITMASK(bits) << start); in cpsw_ale_set_field() [all …]
|
/drivers/gpu/drm/radeon/ |
D | evergreen_cs.c | 756 unsigned idx) in evergreen_cs_track_validate_texture() argument 764 texdw[0] = radeon_get_ib_value(p, idx + 0); in evergreen_cs_track_validate_texture() 765 texdw[1] = radeon_get_ib_value(p, idx + 1); in evergreen_cs_track_validate_texture() 766 texdw[2] = radeon_get_ib_value(p, idx + 2); in evergreen_cs_track_validate_texture() 767 texdw[3] = radeon_get_ib_value(p, idx + 3); in evergreen_cs_track_validate_texture() 768 texdw[4] = radeon_get_ib_value(p, idx + 4); in evergreen_cs_track_validate_texture() 769 texdw[5] = radeon_get_ib_value(p, idx + 5); in evergreen_cs_track_validate_texture() 770 texdw[6] = radeon_get_ib_value(p, idx + 6); in evergreen_cs_track_validate_texture() 771 texdw[7] = radeon_get_ib_value(p, idx + 7); in evergreen_cs_track_validate_texture() 1050 unsigned idx, unsigned reg) in evergreen_packet0_check() argument [all …]
|
D | r600_cs.c | 840 r = radeon_cs_packet_parse(p, &wait_reg_mem, p->idx); in r600_cs_common_vline_parse() 851 wait_reg_mem_info = radeon_get_ib_value(p, wait_reg_mem.idx + 1); in r600_cs_common_vline_parse() 867 if ((radeon_get_ib_value(p, wait_reg_mem.idx + 2) << 2) != vline_status[0]) { in r600_cs_common_vline_parse() 872 if (radeon_get_ib_value(p, wait_reg_mem.idx + 5) != RADEON_VLINE_STAT) { in r600_cs_common_vline_parse() 878 r = radeon_cs_packet_parse(p, &p3reloc, p->idx + wait_reg_mem.count + 2); in r600_cs_common_vline_parse() 882 h_idx = p->idx - 2; in r600_cs_common_vline_parse() 883 p->idx += wait_reg_mem.count + 2; in r600_cs_common_vline_parse() 884 p->idx += p3reloc.count + 2; in r600_cs_common_vline_parse() 921 unsigned idx, unsigned reg) in r600_packet0_check() argument 930 idx, reg); in r600_packet0_check() [all …]
|
/drivers/clk/uniphier/ |
D | clk-uniphier-sys.c | 33 #define UNIPHIER_LD4_SYS_CLK_NAND(idx) \ argument 35 UNIPHIER_CLK_GATE("nand", (idx), "nand-200m", 0x2104, 2) 37 #define UNIPHIER_PRO5_SYS_CLK_NAND(idx) \ argument 39 UNIPHIER_CLK_GATE("nand", (idx), "nand-200m", 0x2104, 2) 41 #define UNIPHIER_LD11_SYS_CLK_NAND(idx) \ argument 43 UNIPHIER_CLK_GATE("nand", (idx), "nand-200m", 0x210c, 0) 45 #define UNIPHIER_LD11_SYS_CLK_EMMC(idx) \ argument 46 UNIPHIER_CLK_GATE("emmc", (idx), NULL, 0x210c, 2) 48 #define UNIPHIER_LD4_SYS_CLK_STDMAC(idx) \ argument 49 UNIPHIER_CLK_GATE("stdmac", (idx), NULL, 0x2104, 10) [all …]
|
/drivers/gpu/drm/nouveau/include/nvkm/core/ |
D | device.h | 192 int (*bar )(struct nvkm_device *, int idx, struct nvkm_bar **); 193 int (*bios )(struct nvkm_device *, int idx, struct nvkm_bios **); 194 int (*bus )(struct nvkm_device *, int idx, struct nvkm_bus **); 195 int (*clk )(struct nvkm_device *, int idx, struct nvkm_clk **); 196 int (*devinit )(struct nvkm_device *, int idx, struct nvkm_devinit **); 197 int (*fb )(struct nvkm_device *, int idx, struct nvkm_fb **); 198 int (*fuse )(struct nvkm_device *, int idx, struct nvkm_fuse **); 199 int (*gpio )(struct nvkm_device *, int idx, struct nvkm_gpio **); 200 int (*i2c )(struct nvkm_device *, int idx, struct nvkm_i2c **); 201 int (*ibus )(struct nvkm_device *, int idx, struct nvkm_subdev **); [all …]
|
/drivers/net/ethernet/chelsio/cxgb/ |
D | fpga_defs.h | 215 #define MAC_REG_ADDR(idx, reg) (MAC_REG_BASE + (idx) * 128 + (reg)) argument 217 #define MAC_REG_IDLO(idx) MAC_REG_ADDR(idx, A_GMAC_MACID_LO) argument 218 #define MAC_REG_IDHI(idx) MAC_REG_ADDR(idx, A_GMAC_MACID_HI) argument 219 #define MAC_REG_CSR(idx) MAC_REG_ADDR(idx, A_GMAC_CSR) argument 220 #define MAC_REG_IFS(idx) MAC_REG_ADDR(idx, A_GMAC_IFS) argument 221 #define MAC_REG_LARGEFRAMELENGTH(idx) MAC_REG_ADDR(idx, A_GMAC_JUMBO_FRAME_LEN) argument 222 #define MAC_REG_LINKDLY(idx) MAC_REG_ADDR(idx, A_GMAC_LNK_DLY) argument 223 #define MAC_REG_PAUSETIME(idx) MAC_REG_ADDR(idx, A_GMAC_PAUSETIME) argument 224 #define MAC_REG_CASTLO(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_LO) argument 225 #define MAC_REG_MCASTHI(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_HI) argument [all …]
|
/drivers/net/wireless/mediatek/mt7601u/ |
D | main.c | 57 unsigned int idx = 0; in mt7601u_add_interface() local 58 unsigned int wcid = GROUP_WCID(idx); in mt7601u_add_interface() 65 mvif->idx = idx; in mt7601u_add_interface() 70 mvif->group_wcid.idx = wcid; in mt7601u_add_interface() 81 unsigned int wcid = mvif->group_wcid.idx; in mt7601u_remove_interface() 194 int i, idx = 0; in mt76_wcid_alloc() local 197 idx = ffs(~dev->wcid_mask[i]); in mt76_wcid_alloc() 198 if (!idx) in mt76_wcid_alloc() 201 idx--; in mt76_wcid_alloc() 202 dev->wcid_mask[i] |= BIT(idx); in mt76_wcid_alloc() [all …]
|
/drivers/input/misc/ |
D | ad714x.c | 214 static void ad714x_button_state_machine(struct ad714x_chip *ad714x, int idx) in ad714x_button_state_machine() argument 216 struct ad714x_button_plat *hw = &ad714x->hw->button[idx]; in ad714x_button_state_machine() 217 struct ad714x_button_drv *sw = &ad714x->sw->button[idx]; in ad714x_button_state_machine() 223 dev_dbg(ad714x->dev, "button %d touched\n", idx); in ad714x_button_state_machine() 233 dev_dbg(ad714x->dev, "button %d released\n", idx); in ad714x_button_state_machine() 249 static void ad714x_slider_cal_sensor_val(struct ad714x_chip *ad714x, int idx) in ad714x_slider_cal_sensor_val() argument 251 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; in ad714x_slider_cal_sensor_val() 267 static void ad714x_slider_cal_highest_stage(struct ad714x_chip *ad714x, int idx) in ad714x_slider_cal_highest_stage() argument 269 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; in ad714x_slider_cal_highest_stage() 270 struct ad714x_slider_drv *sw = &ad714x->sw->slider[idx]; in ad714x_slider_cal_highest_stage() [all …]
|
/drivers/media/usb/pvrusb2/ |
D | pvrusb2-encoder.c | 40 unsigned int idx,addr; in pvr2_encoder_write_words() local 61 for (idx = 0; idx < chunkCnt; idx++) { in pvr2_encoder_write_words() 62 addr = idx + offs; in pvr2_encoder_write_words() 66 PVR2_DECOMPOSE_LE(hdw->cmd_buffer, bAddr,data[idx]); in pvr2_encoder_write_words() 86 unsigned int idx; in pvr2_encoder_read_words() local 120 for (idx = 0; idx < chunkCnt; idx++) { in pvr2_encoder_read_words() 121 data[idx] = PVR2_COMPOSE_LE(hdw->cmd_buffer,idx*4); in pvr2_encoder_read_words() 147 unsigned int idx; in pvr2_encoder_cmd() local 215 for (idx = 0; idx < arg_cnt_send; idx++) { in pvr2_encoder_cmd() 216 wrData[idx+4] = argp[idx]; in pvr2_encoder_cmd() [all …]
|
D | pvrusb2-ioread.c | 51 unsigned int idx; in pvr2_ioread_init() local 56 for (idx = 0; idx < BUFFER_COUNT; idx++) { in pvr2_ioread_init() 57 cp->buffer_storage[idx] = kmalloc(BUFFER_SIZE,GFP_KERNEL); in pvr2_ioread_init() 58 if (!(cp->buffer_storage[idx])) break; in pvr2_ioread_init() 61 if (idx < BUFFER_COUNT) { in pvr2_ioread_init() 63 for (idx = 0; idx < BUFFER_COUNT; idx++) { in pvr2_ioread_init() 64 if (!(cp->buffer_storage[idx])) continue; in pvr2_ioread_init() 65 kfree(cp->buffer_storage[idx]); in pvr2_ioread_init() 74 unsigned int idx; in pvr2_ioread_done() local 77 for (idx = 0; idx < BUFFER_COUNT; idx++) { in pvr2_ioread_done() [all …]
|
/drivers/gpu/drm/nouveau/nvkm/subdev/clk/ |
D | gk104.c | 145 read_clk(struct gk104_clk *clk, int idx) in read_clk() argument 148 u32 sctl = nvkm_rd32(device, 0x137250 + (idx * 4)); in read_clk() 151 if (idx < 7) { in read_clk() 153 if (ssel & (1 << idx)) { in read_clk() 154 sclk = read_pll(clk, 0x137000 + (idx * 0x20)); in read_clk() 157 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk() 161 u32 ssrc = nvkm_rd32(device, 0x137160 + (idx * 0x04)); in read_clk() 163 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk() 172 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk() 223 calc_div(struct gk104_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) in calc_div() argument [all …]
|
D | gf100.c | 133 read_clk(struct gf100_clk *clk, int idx) in read_clk() argument 136 u32 sctl = nvkm_rd32(device, 0x137250 + (idx * 4)); in read_clk() 140 if (ssel & (1 << idx)) { in read_clk() 141 if (idx < 7) in read_clk() 142 sclk = read_pll(clk, 0x137000 + (idx * 0x20)); in read_clk() 147 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk() 210 calc_div(struct gf100_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) in calc_div() argument 221 calc_src(struct gf100_clk *clk, int idx, u32 freq, u32 *dsrc, u32 *ddiv) in calc_src() argument 243 sclk = read_vco(clk, 0x137160 + (idx * 4)); in calc_src() 244 if (idx < 7) in calc_src() [all …]
|
/drivers/media/dvb-core/ |
D | dvb_ringbuffer.c | 252 ssize_t dvb_ringbuffer_pkt_read_user(struct dvb_ringbuffer *rbuf, size_t idx, in dvb_ringbuffer_pkt_read_user() argument 259 pktlen = rbuf->data[idx] << 8; in dvb_ringbuffer_pkt_read_user() 260 pktlen |= rbuf->data[(idx + 1) % rbuf->size]; in dvb_ringbuffer_pkt_read_user() 264 idx = (idx + DVB_RINGBUFFER_PKTHDRSIZE + offset) % rbuf->size; in dvb_ringbuffer_pkt_read_user() 266 split = ((idx + len) > rbuf->size) ? rbuf->size - idx : 0; in dvb_ringbuffer_pkt_read_user() 268 if (copy_to_user(buf, rbuf->data+idx, split)) in dvb_ringbuffer_pkt_read_user() 272 idx = 0; in dvb_ringbuffer_pkt_read_user() 274 if (copy_to_user(buf, rbuf->data+idx, todo)) in dvb_ringbuffer_pkt_read_user() 280 ssize_t dvb_ringbuffer_pkt_read(struct dvb_ringbuffer *rbuf, size_t idx, in dvb_ringbuffer_pkt_read() argument 287 pktlen = rbuf->data[idx] << 8; in dvb_ringbuffer_pkt_read() [all …]
|
/drivers/perf/ |
D | qcom_l2_pmu.c | 190 static inline u32 idx_to_reg_bit(u32 idx) in idx_to_reg_bit() argument 192 if (idx == l2_cycle_ctr_idx) in idx_to_reg_bit() 195 return BIT(idx); in idx_to_reg_bit() 223 static inline void cluster_pmu_counter_set_value(u32 idx, u64 value) in cluster_pmu_counter_set_value() argument 225 if (idx == l2_cycle_ctr_idx) in cluster_pmu_counter_set_value() 228 set_l2_indirect_reg(reg_idx(IA_L2PMXEVCNTR, idx), value); in cluster_pmu_counter_set_value() 231 static inline u64 cluster_pmu_counter_get_value(u32 idx) in cluster_pmu_counter_get_value() argument 235 if (idx == l2_cycle_ctr_idx) in cluster_pmu_counter_get_value() 238 value = get_l2_indirect_reg(reg_idx(IA_L2PMXEVCNTR, idx)); in cluster_pmu_counter_get_value() 243 static inline void cluster_pmu_counter_enable(u32 idx) in cluster_pmu_counter_enable() argument [all …]
|
/drivers/irqchip/ |
D | irq-bcm7120-l2.c | 63 unsigned int idx; in bcm7120_l2_intc_irq_handle() local 67 for (idx = 0; idx < b->n_words; idx++) { in bcm7120_l2_intc_irq_handle() 68 int base = idx * IRQS_PER_WORD; in bcm7120_l2_intc_irq_handle() 75 pending = irq_reg_readl(gc, b->stat_offset[idx]) & in bcm7120_l2_intc_irq_handle() 77 data->irq_map_mask[idx]; in bcm7120_l2_intc_irq_handle() 117 unsigned int idx; in bcm7120_l2_intc_init_one() local 134 for (idx = 0; idx < data->n_words; idx++) { in bcm7120_l2_intc_init_one() 136 l1_data->irq_map_mask[idx] |= in bcm7120_l2_intc_init_one() 138 irq * data->n_words + idx); in bcm7120_l2_intc_init_one() 140 l1_data->irq_map_mask[idx] = 0xffffffff; in bcm7120_l2_intc_init_one() [all …]
|
/drivers/media/pci/pt3/ |
D | pt3_dma.c | 25 static u32 get_dma_base(int idx) in get_dma_base() argument 29 i = (idx == 1 || idx == 2) ? 3 - idx : idx; in get_dma_base() 70 static u8 *next_unit(struct pt3_adapter *adap, int *idx, int *ofs) in next_unit() argument 75 (*idx)++; in next_unit() 76 if (*idx == adap->num_bufs) in next_unit() 77 *idx = 0; in next_unit() 79 return &adap->buffer[*idx].data[*ofs]; in next_unit() 84 int idx, ofs; in pt3_proc_dma() local 86 idx = adap->buf_idx; in pt3_proc_dma() 89 if (adap->buffer[idx].data[ofs] == PT3_BUF_CANARY) in pt3_proc_dma() [all …]
|
/drivers/net/wireless/realtek/rtlwifi/rtl8723ae/ |
D | fw.c | 66 u8 idx; in _rtl8723e_fill_h2c_command() local 171 for (idx = 0; idx < 4; idx++) { in _rtl8723e_fill_h2c_command() 172 rtl_write_byte(rtlpriv, box_reg + idx, in _rtl8723e_fill_h2c_command() 173 boxcontent[idx]); in _rtl8723e_fill_h2c_command() 181 for (idx = 0; idx < 4; idx++) { in _rtl8723e_fill_h2c_command() 182 rtl_write_byte(rtlpriv, box_reg + idx, in _rtl8723e_fill_h2c_command() 183 boxcontent[idx]); in _rtl8723e_fill_h2c_command() 191 for (idx = 0; idx < 4; idx++) { in _rtl8723e_fill_h2c_command() 192 rtl_write_byte(rtlpriv, box_reg + idx, in _rtl8723e_fill_h2c_command() 193 boxcontent[idx]); in _rtl8723e_fill_h2c_command() [all …]
|
/drivers/staging/media/atomisp/pci/atomisp2/mmu/ |
D | isp_mmu.c | 63 static unsigned int atomisp_get_pte(phys_addr_t pt, unsigned int idx) in atomisp_get_pte() argument 66 return *(pt_virt + idx); in atomisp_get_pte() 70 unsigned int idx, unsigned int pte) in atomisp_set_pte() argument 73 *(pt_virt + idx) = pte; in atomisp_set_pte() 216 unsigned int idx; in mmu_l2_map() local 227 idx = ISP_PTR_TO_L2_IDX(ptr); in mmu_l2_map() 229 pte = atomisp_get_pte(l2_pt, idx); in mmu_l2_map() 233 l2_pt, idx, ptr, pte, phys); in mmu_l2_map() 243 atomisp_set_pte(l2_pt, idx, pte); in mmu_l2_map() 247 } while (ptr < end && idx < ISP_L2PT_PTES - 1); in mmu_l2_map() [all …]
|
/drivers/gpu/drm/i915/selftests/ |
D | i915_syncmap.c | 33 unsigned int idx) in __sync_print() argument 49 len = scnprintf(buf, *sz, "%x-> ", idx); in __sync_print() 146 static int check_seqno(struct i915_syncmap *leaf, unsigned int idx, u32 seqno) in check_seqno() argument 154 if (__sync_seqno(leaf)[idx] != seqno) { in check_seqno() 156 __func__, idx, __sync_seqno(leaf)[idx], seqno); in check_seqno() 335 unsigned int step, order, idx; in igt_syncmap_join_below() local 372 for (idx = 1; idx < KSYNCMAP; idx++) { in igt_syncmap_join_below() 373 if (i915_syncmap_is_later(&sync, context + idx, 0)) { in igt_syncmap_join_below() 375 context + idx, order, step); in igt_syncmap_join_below() 415 unsigned int idx; in igt_syncmap_neighbours() local [all …]
|