/kernel/linux/linux-5.10/drivers/media/platform/ |
D | m2m-deinterlace.c | 142 struct dma_interleaved_template *xt; member 235 ctx->xt->numf = s_height / 2; in deinterlace_issue_dma() 236 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma() 237 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma() 238 ctx->xt->src_start = p_in; in deinterlace_issue_dma() 239 ctx->xt->dst_start = p_out; in deinterlace_issue_dma() 242 ctx->xt->numf = s_height / 2; in deinterlace_issue_dma() 243 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma() 244 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma() 245 ctx->xt->src_start = p_in + s_size / 2; in deinterlace_issue_dma() [all …]
|
/kernel/linux/linux-5.10/net/netfilter/ |
D | x_tables.c | 58 static struct xt_af *xt; variable 73 mutex_lock(&xt[af].mutex); in xt_register_target() 74 list_add(&target->list, &xt[af].target); in xt_register_target() 75 mutex_unlock(&xt[af].mutex); in xt_register_target() 85 mutex_lock(&xt[af].mutex); in xt_unregister_target() 87 mutex_unlock(&xt[af].mutex); in xt_unregister_target() 123 mutex_lock(&xt[af].mutex); in xt_register_match() 124 list_add(&match->list, &xt[af].match); in xt_register_match() 125 mutex_unlock(&xt[af].mutex); in xt_register_match() 135 mutex_lock(&xt[af].mutex); in xt_unregister_match() [all …]
|
D | nft_compat.c | 60 nft_compat_set_par(struct xt_action_param *par, void *xt, const void *xt_info) in nft_compat_set_par() argument 62 par->target = xt; in nft_compat_set_par() 76 nft_compat_set_par((struct xt_action_param *)&pkt->xt, target, info); in nft_target_eval_xt() 78 ret = target->target(skb, &pkt->xt); in nft_target_eval_xt() 80 if (pkt->xt.hotdrop) in nft_target_eval_xt() 102 nft_compat_set_par((struct xt_action_param *)&pkt->xt, target, info); in nft_target_eval_bridge() 104 ret = target->target(skb, &pkt->xt); in nft_target_eval_bridge() 106 if (pkt->xt.hotdrop) in nft_target_eval_bridge() 355 nft_compat_set_par((struct xt_action_param *)&pkt->xt, match, info); in __nft_match_eval() 357 ret = match->match(skb, (struct xt_action_param *)&pkt->xt); in __nft_match_eval() [all …]
|
D | nf_tables_trace.c | 116 nh_end = pkt->tprot_set ? pkt->xt.thoff : skb->len; in nf_trace_fill_pkt_info() 123 len = min_t(unsigned int, skb->len - pkt->xt.thoff, in nf_trace_fill_pkt_info() 126 pkt->xt.thoff, len)) in nf_trace_fill_pkt_info()
|
D | nft_payload.c | 113 offset = pkt->xt.thoff; in nft_payload_eval() 505 if (pkt->xt.fragoff) in nft_payload_l4csum_offset() 513 if (!nft_payload_udp_checksum(skb, pkt->xt.thoff)) in nft_payload_l4csum_offset() 526 *l4csum_offset += pkt->xt.thoff; in nft_payload_l4csum_offset() 618 offset = pkt->xt.thoff; in nft_payload_set_eval() 649 if (nft_payload_csum_sctp(skb, pkt->xt.thoff)) in nft_payload_set_eval()
|
/kernel/linux/linux-5.10/drivers/media/platform/omap/ |
D | omap_vout_vrfb.c | 142 vout->vrfb_dma_tx.xt = kzalloc(xt_size, GFP_KERNEL); in omap_vout_setup_vrfb_bufs() 143 if (!vout->vrfb_dma_tx.xt) { in omap_vout_setup_vrfb_bufs() 187 kfree(vout->vrfb_dma_tx.xt); in omap_vout_release_vrfb() 236 struct dma_interleaved_template *xt = vout->vrfb_dma_tx.xt; in omap_vout_prepare_vrfb() local 256 xt->src_start = buf_phy_addr; in omap_vout_prepare_vrfb() 257 xt->dst_start = vout->vrfb_context[vb->index].paddr[0]; in omap_vout_prepare_vrfb() 259 xt->numf = vout->pix.height; in omap_vout_prepare_vrfb() 260 xt->frame_size = 1; in omap_vout_prepare_vrfb() 261 xt->sgl[0].size = vout->pix.width * vout->bpp; in omap_vout_prepare_vrfb() 262 xt->sgl[0].icg = dst_icg; in omap_vout_prepare_vrfb() [all …]
|
/kernel/linux/linux-5.10/include/net/netfilter/ |
D | nf_tables_ipv4.h | 16 pkt->xt.thoff = ip_hdrlen(pkt->skb); in nft_set_pktinfo_ipv4() 17 pkt->xt.fragoff = ntohs(ip->frag_off) & IP_OFFSET; in nft_set_pktinfo_ipv4() 43 pkt->xt.thoff = thoff; in __nft_set_pktinfo_ipv4_validate() 44 pkt->xt.fragoff = ntohs(iph->frag_off) & IP_OFFSET; in __nft_set_pktinfo_ipv4_validate() 80 pkt->xt.thoff = thoff; in nft_set_pktinfo_ipv4_ingress() 81 pkt->xt.fragoff = ntohs(iph->frag_off) & IP_OFFSET; in nft_set_pktinfo_ipv4_ingress()
|
D | nf_tables_ipv6.h | 24 pkt->xt.thoff = thoff; in nft_set_pktinfo_ipv6() 25 pkt->xt.fragoff = frag_off; in nft_set_pktinfo_ipv6() 57 pkt->xt.thoff = thoff; in __nft_set_pktinfo_ipv6_validate() 58 pkt->xt.fragoff = frag_off; in __nft_set_pktinfo_ipv6_validate() 105 pkt->xt.thoff = thoff; in nft_set_pktinfo_ipv6_ingress() 106 pkt->xt.fragoff = frag_off; in nft_set_pktinfo_ipv6_ingress()
|
D | nf_tables.h | 28 struct xt_action_param xt; member 33 return pkt->xt.state->net; in nft_net() 38 return pkt->xt.state->hook; in nft_hook() 43 return pkt->xt.state->pf; in nft_pf() 48 return pkt->xt.state->in; in nft_in() 53 return pkt->xt.state->out; in nft_out() 61 pkt->xt.state = state; in nft_set_pktinfo() 69 pkt->xt.thoff = 0; in nft_set_pktinfo_unspec() 70 pkt->xt.fragoff = 0; in nft_set_pktinfo_unspec()
|
/kernel/linux/linux-5.10/arch/xtensa/kernel/ |
D | vmlinux.lds.S | 366 .xt.prop 0 : { KEEP(*(.xt.prop .xt.prop.* .gnu.linkonce.prop.*)) } 367 .xt.insn 0 : { KEEP(*(.xt.insn .xt.insn.* .gnu.linkonce.x*)) } 368 .xt.lit 0 : { KEEP(*(.xt.lit .xt.lit.* .gnu.linkonce.p*)) }
|
/kernel/linux/linux-5.10/drivers/dma/ |
D | dma-axi-dmac.c | 611 struct dma_chan *c, struct dma_interleaved_template *xt, in axi_dmac_prep_interleaved() argument 618 if (xt->frame_size != 1) in axi_dmac_prep_interleaved() 621 if (xt->dir != chan->direction) in axi_dmac_prep_interleaved() 625 if (!xt->src_inc || !axi_dmac_check_addr(chan, xt->src_start)) in axi_dmac_prep_interleaved() 630 if (!xt->dst_inc || !axi_dmac_check_addr(chan, xt->dst_start)) in axi_dmac_prep_interleaved() 634 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved() 635 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved() 638 if (!axi_dmac_check_len(chan, xt->sgl[0].size) || in axi_dmac_prep_interleaved() 639 xt->numf == 0) in axi_dmac_prep_interleaved() 641 if (xt->sgl[0].size + dst_icg > chan->max_length || in axi_dmac_prep_interleaved() [all …]
|
D | at_xdmac.c | 849 struct dma_interleaved_template *xt, in at_xdmac_interleaved_queue_desc() argument 887 if (xt->src_inc) { in at_xdmac_interleaved_queue_desc() 888 if (xt->src_sgl) in at_xdmac_interleaved_queue_desc() 894 if (xt->dst_inc) { in at_xdmac_interleaved_queue_desc() 895 if (xt->dst_sgl) in at_xdmac_interleaved_queue_desc() 915 desc->lld.mbr_sus = dmaengine_get_src_icg(xt, chunk); in at_xdmac_interleaved_queue_desc() 916 desc->lld.mbr_dus = dmaengine_get_dst_icg(xt, chunk); in at_xdmac_interleaved_queue_desc() 938 struct dma_interleaved_template *xt, in at_xdmac_prep_interleaved() argument 948 if (!xt || !xt->numf || (xt->dir != DMA_MEM_TO_MEM)) in at_xdmac_prep_interleaved() 955 if ((xt->numf > 1) && (xt->frame_size > 1)) in at_xdmac_prep_interleaved() [all …]
|
D | imx-dma.c | 957 struct dma_chan *chan, struct dma_interleaved_template *xt, in imxdma_prep_dma_interleaved() argument 966 imxdmac->channel, (unsigned long long)xt->src_start, in imxdma_prep_dma_interleaved() 967 (unsigned long long) xt->dst_start, in imxdma_prep_dma_interleaved() 968 xt->src_sgl ? "true" : "false", xt->dst_sgl ? "true" : "false", in imxdma_prep_dma_interleaved() 969 xt->numf, xt->frame_size); in imxdma_prep_dma_interleaved() 975 if (xt->frame_size != 1 || xt->numf <= 0 || xt->dir != DMA_MEM_TO_MEM) in imxdma_prep_dma_interleaved() 981 desc->src = xt->src_start; in imxdma_prep_dma_interleaved() 982 desc->dest = xt->dst_start; in imxdma_prep_dma_interleaved() 983 desc->x = xt->sgl[0].size; in imxdma_prep_dma_interleaved() 984 desc->y = xt->numf; in imxdma_prep_dma_interleaved() [all …]
|
D | at_hdmac.c | 662 struct dma_interleaved_template *xt, in atc_prep_dma_interleaved() argument 675 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved() 678 first = xt->sgl; in atc_prep_dma_interleaved() 682 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved() 683 xt->frame_size, flags); in atc_prep_dma_interleaved() 691 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved() 692 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved() 694 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved() 695 (dmaengine_get_dst_icg(xt, chunk) != dmaengine_get_dst_icg(xt, first)) || in atc_prep_dma_interleaved() 696 (dmaengine_get_src_icg(xt, chunk) != dmaengine_get_src_icg(xt, first))) { in atc_prep_dma_interleaved() [all …]
|
D | sirf-dma.c | 689 struct dma_chan *chan, struct dma_interleaved_template *xt, in sirfsoc_dma_prep_interleaved() argument 698 if ((xt->dir != DMA_MEM_TO_DEV) && (xt->dir != DMA_DEV_TO_MEM)) { in sirfsoc_dma_prep_interleaved() 726 if ((xt->frame_size == 1) && (xt->numf > 0)) { in sirfsoc_dma_prep_interleaved() 728 sdesc->xlen = xt->sgl[0].size / SIRFSOC_DMA_WORD_LEN; in sirfsoc_dma_prep_interleaved() 729 sdesc->width = (xt->sgl[0].size + xt->sgl[0].icg) / in sirfsoc_dma_prep_interleaved() 731 sdesc->ylen = xt->numf - 1; in sirfsoc_dma_prep_interleaved() 732 if (xt->dir == DMA_MEM_TO_DEV) { in sirfsoc_dma_prep_interleaved() 733 sdesc->addr = xt->src_start; in sirfsoc_dma_prep_interleaved() 736 sdesc->addr = xt->dst_start; in sirfsoc_dma_prep_interleaved()
|
/kernel/linux/linux-5.10/net/bridge/netfilter/ |
D | ebtable_nat.c | 57 return ebt_do_table(skb, state, state->net->xt.frame_nat); in ebt_nat_in() 64 return ebt_do_table(skb, state, state->net->xt.frame_nat); in ebt_nat_out() 91 &net->xt.frame_nat); in frame_nat_net_init() 101 ebt_unregister_table(net, net->xt.frame_nat); in frame_nat_net_exit()
|
D | ebtable_filter.c | 57 return ebt_do_table(skb, state, state->net->xt.frame_filter); in ebt_in_hook() 64 return ebt_do_table(skb, state, state->net->xt.frame_filter); in ebt_out_hook() 91 &net->xt.frame_filter); in frame_filter_net_init() 101 ebt_unregister_table(net, net->xt.frame_filter); in frame_filter_net_exit()
|
D | ebtable_broute.c | 61 ret = ebt_do_table(skb, &state, state.net->xt.broute_table); in ebt_broute() 97 &net->xt.broute_table); in broute_net_init() 107 ebt_unregister_table(net, net->xt.broute_table); in broute_net_exit()
|
/kernel/linux/linux-5.10/drivers/media/platform/xilinx/ |
D | xilinx-dma.c | 354 dma->xt.dir = DMA_DEV_TO_MEM; in xvip_dma_buffer_queue() 355 dma->xt.src_sgl = false; in xvip_dma_buffer_queue() 356 dma->xt.dst_sgl = true; in xvip_dma_buffer_queue() 357 dma->xt.dst_start = addr; in xvip_dma_buffer_queue() 360 dma->xt.dir = DMA_MEM_TO_DEV; in xvip_dma_buffer_queue() 361 dma->xt.src_sgl = true; in xvip_dma_buffer_queue() 362 dma->xt.dst_sgl = false; in xvip_dma_buffer_queue() 363 dma->xt.src_start = addr; in xvip_dma_buffer_queue() 366 dma->xt.frame_size = 1; in xvip_dma_buffer_queue() 369 dma->xt.numf = dma->format.height; in xvip_dma_buffer_queue() [all …]
|
/kernel/linux/linux-5.10/include/linux/ |
D | dmaengine.h | 914 struct dma_chan *chan, struct dma_interleaved_template *xt, 1010 struct dma_chan *chan, struct dma_interleaved_template *xt, in dmaengine_prep_interleaved_dma() argument 1019 return chan->device->device_prep_interleaved_dma(chan, xt, flags); in dmaengine_prep_interleaved_dma() 1300 static inline size_t dmaengine_get_dst_icg(struct dma_interleaved_template *xt, in dmaengine_get_dst_icg() argument 1303 return dmaengine_get_icg(xt->dst_inc, xt->dst_sgl, in dmaengine_get_dst_icg() 1307 static inline size_t dmaengine_get_src_icg(struct dma_interleaved_template *xt, in dmaengine_get_src_icg() argument 1310 return dmaengine_get_icg(xt->src_inc, xt->src_sgl, in dmaengine_get_src_icg()
|
/kernel/linux/linux-5.10/drivers/dma/ti/ |
D | omap-dma.c | 1284 struct dma_chan *chan, struct dma_interleaved_template *xt, in omap_dma_prep_dma_interleaved() argument 1294 if (is_slave_direction(xt->dir)) in omap_dma_prep_dma_interleaved() 1297 if (xt->frame_size != 1 || xt->numf == 0) in omap_dma_prep_dma_interleaved() 1304 data_type = __ffs((xt->src_start | xt->dst_start | xt->sgl[0].size)); in omap_dma_prep_dma_interleaved() 1310 d->dev_addr = xt->src_start; in omap_dma_prep_dma_interleaved() 1312 sg->en = xt->sgl[0].size / BIT(data_type); in omap_dma_prep_dma_interleaved() 1313 sg->fn = xt->numf; in omap_dma_prep_dma_interleaved() 1314 sg->addr = xt->dst_start; in omap_dma_prep_dma_interleaved() 1318 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved() 1319 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved() [all …]
|
D | edma.c | 1280 struct dma_interleaved_template *xt, in edma_prep_dma_interleaved() argument 1291 if (is_slave_direction(xt->dir)) in edma_prep_dma_interleaved() 1294 if (xt->frame_size != 1 || xt->numf == 0) in edma_prep_dma_interleaved() 1297 if (xt->sgl[0].size > SZ_64K || xt->numf > SZ_64K) in edma_prep_dma_interleaved() 1300 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in edma_prep_dma_interleaved() 1302 src_bidx = src_icg + xt->sgl[0].size; in edma_prep_dma_interleaved() 1303 } else if (xt->src_inc) { in edma_prep_dma_interleaved() 1304 src_bidx = xt->sgl[0].size; in edma_prep_dma_interleaved() 1311 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in edma_prep_dma_interleaved() 1313 dst_bidx = dst_icg + xt->sgl[0].size; in edma_prep_dma_interleaved() [all …]
|
/kernel/linux/linux-5.10/drivers/dma/xilinx/ |
D | xilinx_dpdma.c | 697 struct dma_interleaved_template *xt) in xilinx_dpdma_chan_prep_interleaved_dma() argument 702 size_t hsize = xt->sgl[0].size; in xilinx_dpdma_chan_prep_interleaved_dma() 703 size_t stride = hsize + xt->sgl[0].icg; in xilinx_dpdma_chan_prep_interleaved_dma() 705 if (!IS_ALIGNED(xt->src_start, XILINX_DPDMA_ALIGN_BYTES)) { in xilinx_dpdma_chan_prep_interleaved_dma() 722 &xt->src_start, 1); in xilinx_dpdma_chan_prep_interleaved_dma() 726 hw_desc->xfer_size = hsize * xt->numf; in xilinx_dpdma_chan_prep_interleaved_dma() 1195 struct dma_interleaved_template *xt, in xilinx_dpdma_prep_interleaved_dma() argument 1201 if (xt->dir != DMA_MEM_TO_DEV) in xilinx_dpdma_prep_interleaved_dma() 1204 if (!xt->numf || !xt->sgl[0].size) in xilinx_dpdma_prep_interleaved_dma() 1210 desc = xilinx_dpdma_chan_prep_interleaved_dma(chan, xt); in xilinx_dpdma_prep_interleaved_dma()
|
D | xilinx_dma.c | 1995 struct dma_interleaved_template *xt, in xilinx_vdma_dma_prep_interleaved() argument 2003 if (!is_slave_direction(xt->dir)) in xilinx_vdma_dma_prep_interleaved() 2006 if (!xt->numf || !xt->sgl[0].size) in xilinx_vdma_dma_prep_interleaved() 2009 if (xt->frame_size != 1) in xilinx_vdma_dma_prep_interleaved() 2028 hw->vsize = xt->numf; in xilinx_vdma_dma_prep_interleaved() 2029 hw->hsize = xt->sgl[0].size; in xilinx_vdma_dma_prep_interleaved() 2030 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << in xilinx_vdma_dma_prep_interleaved() 2035 if (xt->dir != DMA_MEM_TO_DEV) { in xilinx_vdma_dma_prep_interleaved() 2037 hw->buf_addr = lower_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved() 2038 hw->buf_addr_msb = upper_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/xlnx/ |
D | zynqmp_disp.c | 122 struct dma_interleaved_template xt; member 1114 dma->xt.numf = height; in zynqmp_disp_layer_update() 1117 dma->xt.src_start = paddr; in zynqmp_disp_layer_update() 1118 dma->xt.frame_size = 1; in zynqmp_disp_layer_update() 1119 dma->xt.dir = DMA_MEM_TO_DEV; in zynqmp_disp_layer_update() 1120 dma->xt.src_sgl = true; in zynqmp_disp_layer_update() 1121 dma->xt.dst_sgl = false; in zynqmp_disp_layer_update() 1123 desc = dmaengine_prep_interleaved_dma(dma->chan, &dma->xt, in zynqmp_disp_layer_update()
|