Home
last modified time | relevance | path

Searched refs:idx (Results 1 – 25 of 2348) sorted by relevance

12345678910>>...94

/drivers/net/ethernet/huawei/hinic/
Dhinic_hw_csr.h21 #define HINIC_CSR_DMA_ATTR_ADDR(idx) \ argument
22 (HINIC_DMA_ATTR_BASE + (idx) * HINIC_DMA_ATTR_STRIDE)
26 #define HINIC_CSR_PPF_ELECTION_ADDR(idx) \ argument
27 (HINIC_ELECTION_BASE + (idx) * HINIC_PPF_ELECTION_STRIDE)
34 #define HINIC_CSR_API_CMD_CHAIN_HEAD_HI_ADDR(idx) \ argument
35 (HINIC_CSR_API_CMD_BASE + 0x0 + (idx) * HINIC_CSR_API_CMD_STRIDE)
37 #define HINIC_CSR_API_CMD_CHAIN_HEAD_LO_ADDR(idx) \ argument
38 (HINIC_CSR_API_CMD_BASE + 0x4 + (idx) * HINIC_CSR_API_CMD_STRIDE)
40 #define HINIC_CSR_API_CMD_STATUS_HI_ADDR(idx) \ argument
41 (HINIC_CSR_API_CMD_BASE + 0x8 + (idx) * HINIC_CSR_API_CMD_STRIDE)
[all …]
/drivers/crypto/ccree/
Dcc_aead.c303 unsigned int idx = 0; in hmac_setkey() local
309 hw_desc_init(&desc[idx]); in hmac_setkey()
310 set_cipher_mode(&desc[idx], hash_mode); in hmac_setkey()
311 set_din_sram(&desc[idx], in hmac_setkey()
315 set_flow_mode(&desc[idx], S_DIN_to_HASH); in hmac_setkey()
316 set_setup_mode(&desc[idx], SETUP_LOAD_STATE0); in hmac_setkey()
317 idx++; in hmac_setkey()
320 hw_desc_init(&desc[idx]); in hmac_setkey()
321 set_cipher_mode(&desc[idx], hash_mode); in hmac_setkey()
322 set_din_const(&desc[idx], 0, ctx->hash_len); in hmac_setkey()
[all …]
Dcc_hash.c342 int idx) in cc_fin_result() argument
350 hw_desc_init(&desc[idx]); in cc_fin_result()
351 set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); in cc_fin_result()
352 set_dout_dlli(&desc[idx], state->digest_result_dma_addr, digestsize, in cc_fin_result()
354 set_queue_last_ind(ctx->drvdata, &desc[idx]); in cc_fin_result()
355 set_flow_mode(&desc[idx], S_HASH_to_DOUT); in cc_fin_result()
356 set_setup_mode(&desc[idx], SETUP_WRITE_STATE0); in cc_fin_result()
357 set_cipher_config1(&desc[idx], HASH_PADDING_DISABLED); in cc_fin_result()
358 cc_set_endianity(ctx->hash_mode, &desc[idx]); in cc_fin_result()
359 idx++; in cc_fin_result()
[all …]
/drivers/net/ethernet/microchip/lan966x/
Dlan966x_ethtool.c300 uint idx = i * lan966x->num_stats; in lan966x_stats_update() local
308 lan966x_add_cnt(&lan966x->stats[idx++], in lan966x_stats_update()
362 u32 idx; in lan966x_get_eth_mac_stats() local
366 idx = port->chip_port * lan966x->num_stats; in lan966x_get_eth_mac_stats()
371 lan966x->stats[idx + SYS_COUNT_TX_UC] + in lan966x_get_eth_mac_stats()
372 lan966x->stats[idx + SYS_COUNT_TX_MC] + in lan966x_get_eth_mac_stats()
373 lan966x->stats[idx + SYS_COUNT_TX_BC] + in lan966x_get_eth_mac_stats()
374 lan966x->stats[idx + SYS_COUNT_TX_PMAC_UC] + in lan966x_get_eth_mac_stats()
375 lan966x->stats[idx + SYS_COUNT_TX_PMAC_MC] + in lan966x_get_eth_mac_stats()
376 lan966x->stats[idx + SYS_COUNT_TX_PMAC_BC]; in lan966x_get_eth_mac_stats()
[all …]
/drivers/net/ethernet/microchip/vcap/
Dvcap_api_debugfs_kunit.c41 int idx; in test_val_keyset() local
46 for (idx = 0; idx < kslist->cnt; idx++) { in test_val_keyset()
47 if (kslist->keysets[idx] == VCAP_KFS_ETAG) in test_val_keyset()
48 return kslist->keysets[idx]; in test_val_keyset()
49 if (kslist->keysets[idx] == in test_val_keyset()
51 return kslist->keysets[idx]; in test_val_keyset()
52 if (kslist->keysets[idx] == in test_val_keyset()
54 return kslist->keysets[idx]; in test_val_keyset()
55 if (kslist->keysets[idx] == in test_val_keyset()
57 return kslist->keysets[idx]; in test_val_keyset()
[all …]
/drivers/net/ethernet/microchip/sparx5/
Dsparx5_calendar.c162 u32 cal[7], value, idx, portno; in sparx5_config_auto_calendar() local
216 for (idx = 0; idx < ARRAY_SIZE(cal); idx++) in sparx5_config_auto_calendar()
217 spx5_wr(cal[idx], sparx5, QSYS_CAL_AUTO(idx)); in sparx5_config_auto_calendar()
228 for (idx = 2; idx < 5; idx++) in sparx5_config_auto_calendar()
231 HSCH_OUTB_SHARE_ENA(idx)); in sparx5_config_auto_calendar()
256 u32 idx = 0, len = 0; in sparx5_dsm_cal_len() local
258 while (idx < SPX5_DSM_CAL_LEN) { in sparx5_dsm_cal_len()
259 if (cal[idx] != SPX5_DSM_CAL_EMPTY) in sparx5_dsm_cal_len()
261 idx++; in sparx5_dsm_cal_len()
268 u32 idx = 0, tmp; in sparx5_dsm_cp_cal() local
[all …]
Dsparx5_main.c223 int idx, jdx; in sparx5_create_targets() local
225 for (idx = 0, jdx = 0; jdx < ARRAY_SIZE(sparx5_main_iomap); jdx++) { in sparx5_create_targets()
228 if (idx == iomap->range) { in sparx5_create_targets()
229 range_id[idx] = jdx; in sparx5_create_targets()
230 idx++; in sparx5_create_targets()
233 for (idx = 0; idx < IO_RANGES; idx++) { in sparx5_create_targets()
234 iores[idx] = platform_get_resource(sparx5->pdev, IORESOURCE_MEM, in sparx5_create_targets()
235 idx); in sparx5_create_targets()
236 if (!iores[idx]) { in sparx5_create_targets()
240 iomem[idx] = devm_ioremap(sparx5->dev, in sparx5_create_targets()
[all …]
Dsparx5_vcap_impl.c732 int size, idx; in sparx5_vcap_is_known_etype() local
752 for (idx = 0; idx < size; ++idx) in sparx5_vcap_is_known_etype()
753 if (known_etypes[idx] == etype) in sparx5_vcap_is_known_etype()
769 int idx, jdx, lookup; in sparx5_vcap_validate_keyset() local
804 for (idx = 0; idx < kslist->cnt; ++idx) in sparx5_vcap_validate_keyset()
806 if (kslist->keysets[idx] == keysets[jdx]) in sparx5_vcap_validate_keyset()
807 return kslist->keysets[idx]; in sparx5_vcap_validate_keyset()
925 int idx; in sparx5_vcap_is0_cache_write() local
933 for (idx = 0; idx < count; ++idx) { in sparx5_vcap_is0_cache_write()
935 spx5_wr(keystr[idx] & mskstr[idx], sparx5, in sparx5_vcap_is0_cache_write()
[all …]
/drivers/net/can/sja1000/
Dsja1000_isa.c122 int idx = pdev->id; in sja1000_isa_probe() local
126 idx, port[idx], mem[idx], irq[idx]); in sja1000_isa_probe()
128 if (mem[idx]) { in sja1000_isa_probe()
129 if (!request_mem_region(mem[idx], iosize, DRV_NAME)) { in sja1000_isa_probe()
133 base = ioremap(mem[idx], iosize); in sja1000_isa_probe()
139 if (indirect[idx] > 0 || in sja1000_isa_probe()
140 (indirect[idx] == -1 && indirect[0] > 0)) in sja1000_isa_probe()
142 if (!request_region(port[idx], iosize, DRV_NAME)) { in sja1000_isa_probe()
155 dev->irq = irq[idx]; in sja1000_isa_probe()
157 if (mem[idx]) { in sja1000_isa_probe()
[all …]
/drivers/net/netdevsim/
Dmacsec.c38 int idx; in nsim_macsec_add_secy() local
43 for (idx = 0; idx < NSIM_MACSEC_MAX_SECY_COUNT; idx++) { in nsim_macsec_add_secy()
44 if (!ns->macsec.nsim_secy[idx].used) in nsim_macsec_add_secy()
48 if (idx == NSIM_MACSEC_MAX_SECY_COUNT) { in nsim_macsec_add_secy()
55 __func__, sci_to_cpu(ctx->secy->sci), idx); in nsim_macsec_add_secy()
56 ns->macsec.nsim_secy[idx].used = true; in nsim_macsec_add_secy()
57 ns->macsec.nsim_secy[idx].nsim_rxsc_count = 0; in nsim_macsec_add_secy()
58 ns->macsec.nsim_secy[idx].sci = ctx->secy->sci; in nsim_macsec_add_secy()
67 int idx; in nsim_macsec_upd_secy() local
69 idx = nsim_macsec_find_secy(ns, ctx->secy->sci); in nsim_macsec_upd_secy()
[all …]
/drivers/net/can/cc770/
Dcc770_isa.c167 int idx = pdev->id; in cc770_isa_probe() local
172 idx, port[idx], mem[idx], irq[idx]); in cc770_isa_probe()
173 if (mem[idx]) { in cc770_isa_probe()
174 if (!request_mem_region(mem[idx], iosize, KBUILD_MODNAME)) { in cc770_isa_probe()
178 base = ioremap(mem[idx], iosize); in cc770_isa_probe()
184 if (indirect[idx] > 0 || in cc770_isa_probe()
185 (indirect[idx] == -1 && indirect[0] > 0)) in cc770_isa_probe()
187 if (!request_region(port[idx], iosize, KBUILD_MODNAME)) { in cc770_isa_probe()
200 dev->irq = irq[idx]; in cc770_isa_probe()
202 if (mem[idx]) { in cc770_isa_probe()
[all …]
/drivers/gpu/drm/radeon/
Devergreen_cs.c755 unsigned idx) in evergreen_cs_track_validate_texture() argument
763 texdw[0] = radeon_get_ib_value(p, idx + 0); in evergreen_cs_track_validate_texture()
764 texdw[1] = radeon_get_ib_value(p, idx + 1); in evergreen_cs_track_validate_texture()
765 texdw[2] = radeon_get_ib_value(p, idx + 2); in evergreen_cs_track_validate_texture()
766 texdw[3] = radeon_get_ib_value(p, idx + 3); in evergreen_cs_track_validate_texture()
767 texdw[4] = radeon_get_ib_value(p, idx + 4); in evergreen_cs_track_validate_texture()
768 texdw[5] = radeon_get_ib_value(p, idx + 5); in evergreen_cs_track_validate_texture()
769 texdw[6] = radeon_get_ib_value(p, idx + 6); in evergreen_cs_track_validate_texture()
770 texdw[7] = radeon_get_ib_value(p, idx + 7); in evergreen_cs_track_validate_texture()
1049 unsigned idx, unsigned reg) in evergreen_packet0_check() argument
[all …]
Dr600_cs.c837 r = radeon_cs_packet_parse(p, &wait_reg_mem, p->idx); in r600_cs_common_vline_parse()
848 wait_reg_mem_info = radeon_get_ib_value(p, wait_reg_mem.idx + 1); in r600_cs_common_vline_parse()
864 if ((radeon_get_ib_value(p, wait_reg_mem.idx + 2) << 2) != vline_status[0]) { in r600_cs_common_vline_parse()
869 if (radeon_get_ib_value(p, wait_reg_mem.idx + 5) != RADEON_VLINE_STAT) { in r600_cs_common_vline_parse()
875 r = radeon_cs_packet_parse(p, &p3reloc, p->idx + wait_reg_mem.count + 2); in r600_cs_common_vline_parse()
879 h_idx = p->idx - 2; in r600_cs_common_vline_parse()
880 p->idx += wait_reg_mem.count + 2; in r600_cs_common_vline_parse()
881 p->idx += p3reloc.count + 2; in r600_cs_common_vline_parse()
918 unsigned idx, unsigned reg) in r600_packet0_check() argument
927 idx, reg); in r600_packet0_check()
[all …]
/drivers/clk/uniphier/
Dclk-uniphier-sys.c27 #define UNIPHIER_LD4_SYS_CLK_NAND(idx) \ argument
29 UNIPHIER_CLK_GATE("nand", (idx), "nand-50m", 0x2104, 2)
31 #define UNIPHIER_PRO5_SYS_CLK_NAND(idx) \ argument
33 UNIPHIER_CLK_GATE("nand", (idx), "nand-50m", 0x2104, 2)
35 #define UNIPHIER_LD11_SYS_CLK_NAND(idx) \ argument
37 UNIPHIER_CLK_GATE("nand", (idx), "nand-50m", 0x210c, 0)
39 #define UNIPHIER_SYS_CLK_NAND_4X(idx) \ argument
40 UNIPHIER_CLK_FACTOR("nand-4x", (idx), "nand", 4, 1)
42 #define UNIPHIER_LD11_SYS_CLK_EMMC(idx) \ argument
43 UNIPHIER_CLK_GATE("emmc", (idx), NULL, 0x210c, 2)
[all …]
/drivers/net/ethernet/ti/
Dcpsw_ale.c109 int idx, idx2; in cpsw_ale_get_field() local
112 idx = start / 32; in cpsw_ale_get_field()
115 if (idx != idx2) { in cpsw_ale_get_field()
119 start -= idx * 32; in cpsw_ale_get_field()
120 idx = 2 - idx; /* flip */ in cpsw_ale_get_field()
121 return (hi_val + (ale_entry[idx] >> start)) & BITMASK(bits); in cpsw_ale_get_field()
127 int idx, idx2; in cpsw_ale_set_field() local
130 idx = start / 32; in cpsw_ale_set_field()
133 if (idx != idx2) { in cpsw_ale_set_field()
138 start -= idx * 32; in cpsw_ale_set_field()
[all …]
/drivers/net/ethernet/chelsio/cxgb/
Dfpga_defs.h215 #define MAC_REG_ADDR(idx, reg) (MAC_REG_BASE + (idx) * 128 + (reg)) argument
217 #define MAC_REG_IDLO(idx) MAC_REG_ADDR(idx, A_GMAC_MACID_LO) argument
218 #define MAC_REG_IDHI(idx) MAC_REG_ADDR(idx, A_GMAC_MACID_HI) argument
219 #define MAC_REG_CSR(idx) MAC_REG_ADDR(idx, A_GMAC_CSR) argument
220 #define MAC_REG_IFS(idx) MAC_REG_ADDR(idx, A_GMAC_IFS) argument
221 #define MAC_REG_LARGEFRAMELENGTH(idx) MAC_REG_ADDR(idx, A_GMAC_JUMBO_FRAME_LEN) argument
222 #define MAC_REG_LINKDLY(idx) MAC_REG_ADDR(idx, A_GMAC_LNK_DLY) argument
223 #define MAC_REG_PAUSETIME(idx) MAC_REG_ADDR(idx, A_GMAC_PAUSETIME) argument
224 #define MAC_REG_CASTLO(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_LO) argument
225 #define MAC_REG_MCASTHI(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_HI) argument
[all …]
/drivers/net/wireless/mediatek/mt7601u/
Dmain.c49 unsigned int idx = 0; in mt7601u_add_interface() local
50 unsigned int wcid = GROUP_WCID(idx); in mt7601u_add_interface()
57 mvif->idx = idx; in mt7601u_add_interface()
65 mvif->group_wcid.idx = wcid; in mt7601u_add_interface()
76 unsigned int wcid = mvif->group_wcid.idx; in mt7601u_remove_interface()
189 int i, idx = 0; in mt76_wcid_alloc() local
192 idx = ffs(~dev->wcid_mask[i]); in mt76_wcid_alloc()
193 if (!idx) in mt76_wcid_alloc()
196 idx--; in mt76_wcid_alloc()
197 dev->wcid_mask[i] |= BIT(idx); in mt76_wcid_alloc()
[all …]
/drivers/input/misc/
Dad714x.c213 static void ad714x_button_state_machine(struct ad714x_chip *ad714x, int idx) in ad714x_button_state_machine() argument
215 struct ad714x_button_plat *hw = &ad714x->hw->button[idx]; in ad714x_button_state_machine()
216 struct ad714x_button_drv *sw = &ad714x->sw->button[idx]; in ad714x_button_state_machine()
222 dev_dbg(ad714x->dev, "button %d touched\n", idx); in ad714x_button_state_machine()
232 dev_dbg(ad714x->dev, "button %d released\n", idx); in ad714x_button_state_machine()
248 static void ad714x_slider_cal_sensor_val(struct ad714x_chip *ad714x, int idx) in ad714x_slider_cal_sensor_val() argument
250 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; in ad714x_slider_cal_sensor_val()
266 static void ad714x_slider_cal_highest_stage(struct ad714x_chip *ad714x, int idx) in ad714x_slider_cal_highest_stage() argument
268 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; in ad714x_slider_cal_highest_stage()
269 struct ad714x_slider_drv *sw = &ad714x->sw->slider[idx]; in ad714x_slider_cal_highest_stage()
[all …]
/drivers/gpu/drm/i915/display/
Dintel_display_power_map.c111 .hsw.idx = HSW_PW_CTL_IDX_GLOBAL,
145 .hsw.idx = HSW_PW_CTL_IDX_GLOBAL,
204 .vlv.idx = PUNIT_PWGT_IDX_DISP2D,
211 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_B_LANES_01),
213 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_B_LANES_23),
215 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_C_LANES_01),
217 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_C_LANES_23),
223 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_BC,
291 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_BC,
294 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_D,
[all …]
/drivers/media/usb/pvrusb2/
Dpvrusb2-encoder.c30 unsigned int idx,addr; in pvr2_encoder_write_words() local
51 for (idx = 0; idx < chunkCnt; idx++) { in pvr2_encoder_write_words()
52 addr = idx + offs; in pvr2_encoder_write_words()
56 PVR2_DECOMPOSE_LE(hdw->cmd_buffer, bAddr,data[idx]); in pvr2_encoder_write_words()
76 unsigned int idx; in pvr2_encoder_read_words() local
110 for (idx = 0; idx < chunkCnt; idx++) { in pvr2_encoder_read_words()
111 data[idx] = PVR2_COMPOSE_LE(hdw->cmd_buffer,idx*4); in pvr2_encoder_read_words()
137 unsigned int idx; in pvr2_encoder_cmd() local
205 for (idx = 0; idx < arg_cnt_send; idx++) { in pvr2_encoder_cmd()
206 wrData[idx+4] = argp[idx]; in pvr2_encoder_cmd()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/clk/
Dgk104.c145 read_clk(struct gk104_clk *clk, int idx) in read_clk() argument
148 u32 sctl = nvkm_rd32(device, 0x137250 + (idx * 4)); in read_clk()
151 if (idx < 7) { in read_clk()
153 if (ssel & (1 << idx)) { in read_clk()
154 sclk = read_pll(clk, 0x137000 + (idx * 0x20)); in read_clk()
157 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk()
161 u32 ssrc = nvkm_rd32(device, 0x137160 + (idx * 0x04)); in read_clk()
163 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk()
172 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk()
223 calc_div(struct gk104_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) in calc_div() argument
[all …]
Dgf100.c133 read_clk(struct gf100_clk *clk, int idx) in read_clk() argument
136 u32 sctl = nvkm_rd32(device, 0x137250 + (idx * 4)); in read_clk()
140 if (ssel & (1 << idx)) { in read_clk()
141 if (idx < 7) in read_clk()
142 sclk = read_pll(clk, 0x137000 + (idx * 0x20)); in read_clk()
147 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk()
210 calc_div(struct gf100_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) in calc_div() argument
221 calc_src(struct gf100_clk *clk, int idx, u32 freq, u32 *dsrc, u32 *ddiv) in calc_src() argument
243 sclk = read_vco(clk, 0x137160 + (idx * 4)); in calc_src()
244 if (idx < 7) in calc_src()
[all …]
/drivers/clk/baikal-t1/
Dclk-ccu-pll.c89 int idx; in ccu_pll_find_desc() local
91 for (idx = 0; idx < CCU_PLL_NUM; ++idx) { in ccu_pll_find_desc()
92 if (pll_info[idx].id == clk_id) in ccu_pll_find_desc()
93 return data->plls[idx]; in ccu_pll_find_desc()
150 int idx, ret; in ccu_pll_clk_register() local
152 for (idx = 0; idx < CCU_PLL_NUM; ++idx) { in ccu_pll_clk_register()
153 const struct ccu_pll_info *info = &pll_info[idx]; in ccu_pll_clk_register()
158 if (!data->plls[idx]) in ccu_pll_clk_register()
159 data->plls[idx] = ERR_PTR(-EPROBE_DEFER); in ccu_pll_clk_register()
173 data->plls[idx] = ccu_pll_hw_register(&init); in ccu_pll_clk_register()
[all …]
/drivers/perf/hisilicon/
Dhisi_pcie_pmu.c178 #define EXT_COUNTER_IS_USED(idx) ((idx) & BIT(16)) argument
185 static u32 hisi_pcie_pmu_get_offset(u32 offset, u32 idx) in hisi_pcie_pmu_get_offset() argument
187 return offset + HISI_PCIE_REG_STEP * idx; in hisi_pcie_pmu_get_offset()
191 u32 idx) in hisi_pcie_pmu_readl() argument
193 u32 offset = hisi_pcie_pmu_get_offset(reg_offset, idx); in hisi_pcie_pmu_readl()
198 static void hisi_pcie_pmu_writel(struct hisi_pcie_pmu *pcie_pmu, u32 reg_offset, u32 idx, u32 val) in hisi_pcie_pmu_writel() argument
200 u32 offset = hisi_pcie_pmu_get_offset(reg_offset, idx); in hisi_pcie_pmu_writel()
205 static u64 hisi_pcie_pmu_readq(struct hisi_pcie_pmu *pcie_pmu, u32 reg_offset, u32 idx) in hisi_pcie_pmu_readq() argument
207 u32 offset = hisi_pcie_pmu_get_offset(reg_offset, idx); in hisi_pcie_pmu_readq()
212 static void hisi_pcie_pmu_writeq(struct hisi_pcie_pmu *pcie_pmu, u32 reg_offset, u32 idx, u64 val) in hisi_pcie_pmu_writeq() argument
[all …]
/drivers/media/dvb-core/
Ddvb_ringbuffer.c252 ssize_t dvb_ringbuffer_pkt_read_user(struct dvb_ringbuffer *rbuf, size_t idx, in dvb_ringbuffer_pkt_read_user() argument
259 pktlen = rbuf->data[idx] << 8; in dvb_ringbuffer_pkt_read_user()
260 pktlen |= rbuf->data[(idx + 1) % rbuf->size]; in dvb_ringbuffer_pkt_read_user()
264 idx = (idx + DVB_RINGBUFFER_PKTHDRSIZE + offset) % rbuf->size; in dvb_ringbuffer_pkt_read_user()
266 split = ((idx + len) > rbuf->size) ? rbuf->size - idx : 0; in dvb_ringbuffer_pkt_read_user()
268 if (copy_to_user(buf, rbuf->data+idx, split)) in dvb_ringbuffer_pkt_read_user()
272 idx = 0; in dvb_ringbuffer_pkt_read_user()
274 if (copy_to_user(buf, rbuf->data+idx, todo)) in dvb_ringbuffer_pkt_read_user()
280 ssize_t dvb_ringbuffer_pkt_read(struct dvb_ringbuffer *rbuf, size_t idx, in dvb_ringbuffer_pkt_read() argument
287 pktlen = rbuf->data[idx] << 8; in dvb_ringbuffer_pkt_read()
[all …]

12345678910>>...94