Home
last modified time | relevance | path

Searched refs:sg (Results 1 – 25 of 661) sorted by relevance

12345678910>>...27

/drivers/ata/
Dsata_gemini.c126 bool gemini_sata_bridge_enabled(struct sata_gemini *sg, bool is_ata1) in gemini_sata_bridge_enabled() argument
128 if (!sg->sata_bridge) in gemini_sata_bridge_enabled()
134 if ((sg->muxmode == GEMINI_MUXMODE_2) && in gemini_sata_bridge_enabled()
137 if ((sg->muxmode == GEMINI_MUXMODE_3) && in gemini_sata_bridge_enabled()
145 enum gemini_muxmode gemini_sata_get_muxmode(struct sata_gemini *sg) in gemini_sata_get_muxmode() argument
147 return sg->muxmode; in gemini_sata_get_muxmode()
151 static int gemini_sata_setup_bridge(struct sata_gemini *sg, in gemini_sata_setup_bridge() argument
161 if (sg->muxmode == GEMINI_MUXMODE_2) in gemini_sata_setup_bridge()
163 writel(val, sg->base + GEMINI_SATA0_CTRL); in gemini_sata_setup_bridge()
167 if (sg->muxmode == GEMINI_MUXMODE_3) in gemini_sata_setup_bridge()
[all …]
/drivers/clk/sprd/
Dgate.c13 static void clk_gate_toggle(const struct sprd_gate *sg, bool en) in clk_gate_toggle() argument
15 const struct sprd_clk_common *common = &sg->common; in clk_gate_toggle()
17 bool set = sg->flags & CLK_GATE_SET_TO_DISABLE ? true : false; in clk_gate_toggle()
24 reg |= sg->enable_mask; in clk_gate_toggle()
26 reg &= ~sg->enable_mask; in clk_gate_toggle()
31 static void clk_sc_gate_toggle(const struct sprd_gate *sg, bool en) in clk_sc_gate_toggle() argument
33 const struct sprd_clk_common *common = &sg->common; in clk_sc_gate_toggle()
34 bool set = sg->flags & CLK_GATE_SET_TO_DISABLE ? 1 : 0; in clk_sc_gate_toggle()
45 offset = set ? sg->sc_offset : sg->sc_offset * 2; in clk_sc_gate_toggle()
48 sg->enable_mask); in clk_sc_gate_toggle()
[all …]
/drivers/gpu/drm/i915/
Di915_scatterlist.c19 struct scatterlist *sg, *new_sg; in i915_sg_trim() local
29 for_each_sg(orig_st->sgl, sg, orig_st->nents, i) { in i915_sg_trim()
30 sg_set_page(new_sg, sg_page(sg), sg->length, 0); in i915_sg_trim()
31 sg_dma_address(new_sg) = sg_dma_address(sg); in i915_sg_trim()
32 sg_dma_len(new_sg) = sg_dma_len(sg); in i915_sg_trim()
63 struct scatterlist *sg; in i915_sg_from_mm_node() local
75 sg = st->sgl; in i915_sg_from_mm_node()
84 if (offset != prev_end || sg->length >= max_segment) { in i915_sg_from_mm_node()
86 sg = __sg_next(sg); in i915_sg_from_mm_node()
88 sg_dma_address(sg) = region_start + offset; in i915_sg_from_mm_node()
[all …]
Di915_scatterlist.h49 static inline int __sg_page_count(const struct scatterlist *sg) in __sg_page_count() argument
51 return sg->length >> PAGE_SHIFT; in __sg_page_count()
54 static inline int __sg_dma_page_count(const struct scatterlist *sg) in __sg_dma_page_count() argument
56 return sg_dma_len(sg) >> PAGE_SHIFT; in __sg_dma_page_count()
59 static inline struct scatterlist *____sg_next(struct scatterlist *sg) in ____sg_next() argument
61 ++sg; in ____sg_next()
62 if (unlikely(sg_is_chain(sg))) in ____sg_next()
63 sg = sg_chain_ptr(sg); in ____sg_next()
64 return sg; in ____sg_next()
76 static inline struct scatterlist *__sg_next(struct scatterlist *sg) in __sg_next() argument
[all …]
/drivers/crypto/
Domap-crypto.c17 struct scatterlist **sg, in omap_crypto_copy_sg_lists() argument
20 int n = sg_nents(*sg); in omap_crypto_copy_sg_lists()
24 new_sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL); in omap_crypto_copy_sg_lists()
33 while (*sg && total) { in omap_crypto_copy_sg_lists()
34 int len = (*sg)->length; in omap_crypto_copy_sg_lists()
41 sg_set_page(tmp, sg_page(*sg), len, (*sg)->offset); in omap_crypto_copy_sg_lists()
47 *sg = sg_next(*sg); in omap_crypto_copy_sg_lists()
50 *sg = new_sg; in omap_crypto_copy_sg_lists()
55 static int omap_crypto_copy_sgs(int total, int bs, struct scatterlist **sg, in omap_crypto_copy_sgs() argument
73 scatterwalk_map_and_copy(buf, *sg, 0, total, 0); in omap_crypto_copy_sgs()
[all …]
Ds5p-sss.c366 struct scatterlist *sg; member
438 const struct scatterlist *sg) in s5p_set_dma_indata() argument
440 SSS_WRITE(dev, FCBRDMAS, sg_dma_address(sg)); in s5p_set_dma_indata()
441 SSS_WRITE(dev, FCBRDMAL, sg_dma_len(sg)); in s5p_set_dma_indata()
445 const struct scatterlist *sg) in s5p_set_dma_outdata() argument
447 SSS_WRITE(dev, FCBTDMAS, sg_dma_address(sg)); in s5p_set_dma_outdata()
448 SSS_WRITE(dev, FCBTDMAL, sg_dma_len(sg)); in s5p_set_dma_outdata()
451 static void s5p_free_sg_cpy(struct s5p_aes_dev *dev, struct scatterlist **sg) in s5p_free_sg_cpy() argument
455 if (!*sg) in s5p_free_sg_cpy()
459 free_pages((unsigned long)sg_virt(*sg), get_order(len)); in s5p_free_sg_cpy()
[all …]
/drivers/dma/
Ddma-axi-dmac.c116 struct axi_dmac_sg sg[]; member
208 struct axi_dmac_sg *sg; in axi_dmac_start_transfer() local
225 sg = &desc->sg[desc->num_submitted]; in axi_dmac_start_transfer()
228 if (sg->id != AXI_DMAC_SG_UNUSED) { in axi_dmac_start_transfer()
229 sg->schedule_when_free = true; in axi_dmac_start_transfer()
245 sg->id = axi_dmac_read(dmac, AXI_DMAC_REG_TRANSFER_ID); in axi_dmac_start_transfer()
248 axi_dmac_write(dmac, AXI_DMAC_REG_DEST_ADDRESS, sg->dest_addr); in axi_dmac_start_transfer()
249 axi_dmac_write(dmac, AXI_DMAC_REG_DEST_STRIDE, sg->dest_stride); in axi_dmac_start_transfer()
253 axi_dmac_write(dmac, AXI_DMAC_REG_SRC_ADDRESS, sg->src_addr); in axi_dmac_start_transfer()
254 axi_dmac_write(dmac, AXI_DMAC_REG_SRC_STRIDE, sg->src_stride); in axi_dmac_start_transfer()
[all …]
/drivers/net/ethernet/marvell/octeontx2/nic/
Dotx2_txrx.c90 static void otx2_dma_unmap_skb_frags(struct otx2_nic *pfvf, struct sg_list *sg) in otx2_dma_unmap_skb_frags() argument
94 for (seg = 0; seg < sg->num_segs; seg++) { in otx2_dma_unmap_skb_frags()
95 otx2_dma_unmap_page(pfvf, sg->dma_addr[seg], in otx2_dma_unmap_skb_frags()
96 sg->size[seg], DMA_TO_DEVICE); in otx2_dma_unmap_skb_frags()
98 sg->num_segs = 0; in otx2_dma_unmap_skb_frags()
111 struct sg_list *sg; in otx2_snd_pkt_handler() local
119 sg = &sq->sg[snd_comp->sqe_id]; in otx2_snd_pkt_handler()
120 skb = (struct sk_buff *)sg->skb; in otx2_snd_pkt_handler()
138 otx2_dma_unmap_skb_frags(pfvf, sg); in otx2_snd_pkt_handler()
140 sg->skb = (u64)NULL; in otx2_snd_pkt_handler()
[all …]
/drivers/media/pci/tw68/
Dtw68-risc.c38 struct scatterlist *sg; in tw68_risc_field() local
54 sg = sglist; in tw68_risc_field()
57 while (offset && offset >= sg_dma_len(sg)) { in tw68_risc_field()
58 offset -= sg_dma_len(sg); in tw68_risc_field()
59 sg = sg_next(sg); in tw68_risc_field()
61 if (bpl <= sg_dma_len(sg) - offset) { in tw68_risc_field()
65 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field()
76 done = (sg_dma_len(sg) - offset); in tw68_risc_field()
80 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field()
82 sg = sg_next(sg); in tw68_risc_field()
[all …]
/drivers/crypto/allwinner/sun8i-ss/
Dsun8i-ss-cipher.c27 struct scatterlist *sg; in sun8i_ss_need_fallback() local
35 sg = areq->src; in sun8i_ss_need_fallback()
36 while (sg) { in sun8i_ss_need_fallback()
37 if ((sg->length % 16) != 0) in sun8i_ss_need_fallback()
39 if ((sg_dma_len(sg) % 16) != 0) in sun8i_ss_need_fallback()
41 if (!IS_ALIGNED(sg->offset, 16)) in sun8i_ss_need_fallback()
43 sg = sg_next(sg); in sun8i_ss_need_fallback()
45 sg = areq->dst; in sun8i_ss_need_fallback()
46 while (sg) { in sun8i_ss_need_fallback()
47 if ((sg->length % 16) != 0) in sun8i_ss_need_fallback()
[all …]
/drivers/target/iscsi/cxgbit/
Dcxgbit_ddp.c13 struct scatterlist *sg = sg_pp ? *sg_pp : NULL; in cxgbit_set_one_ppod() local
21 if (sg) { in cxgbit_set_one_ppod()
22 addr = sg_dma_address(sg); in cxgbit_set_one_ppod()
23 len = sg_dma_len(sg); in cxgbit_set_one_ppod()
27 if (sg) { in cxgbit_set_one_ppod()
30 if (offset == (len + sg->offset)) { in cxgbit_set_one_ppod()
32 sg = sg_next(sg); in cxgbit_set_one_ppod()
33 if (sg) { in cxgbit_set_one_ppod()
34 addr = sg_dma_address(sg); in cxgbit_set_one_ppod()
35 len = sg_dma_len(sg); in cxgbit_set_one_ppod()
[all …]
/drivers/crypto/gemini/
Dsl3516-ce-cipher.c28 struct scatterlist *sg; in sl3516_ce_need_fallback() local
49 sg = areq->src; in sl3516_ce_need_fallback()
50 while (sg) { in sl3516_ce_need_fallback()
51 if ((sg->length % 16) != 0) { in sl3516_ce_need_fallback()
55 if ((sg_dma_len(sg) % 16) != 0) { in sl3516_ce_need_fallback()
59 if (!IS_ALIGNED(sg->offset, 16)) { in sl3516_ce_need_fallback()
63 sg = sg_next(sg); in sl3516_ce_need_fallback()
65 sg = areq->dst; in sl3516_ce_need_fallback()
66 while (sg) { in sl3516_ce_need_fallback()
67 if ((sg->length % 16) != 0) { in sl3516_ce_need_fallback()
[all …]
/drivers/gpu/drm/i915/gem/
Di915_gem_internal.c22 struct scatterlist *sg; in internal_free_pages() local
24 for (sg = st->sgl; sg; sg = __sg_next(sg)) { in internal_free_pages()
25 if (sg_page(sg)) in internal_free_pages()
26 __free_pages(sg_page(sg), get_order(sg->length)); in internal_free_pages()
37 struct scatterlist *sg; in i915_gem_object_get_pages_internal() local
75 sg = st->sgl; in i915_gem_object_get_pages_internal()
95 sg_set_page(sg, page, PAGE_SIZE << order, 0); in i915_gem_object_get_pages_internal()
101 sg_mark_end(sg); in i915_gem_object_get_pages_internal()
105 sg = __sg_next(sg); in i915_gem_object_get_pages_internal()
123 sg_set_page(sg, NULL, 0, 0); in i915_gem_object_get_pages_internal()
[all …]
Di915_gem_pages.c55 obj->mm.page_sizes.sg = 0; in __i915_gem_object_set_pages()
58 obj->mm.page_sizes.sg |= BIT(i); in __i915_gem_object_set_pages()
60 GEM_BUG_ON(!HAS_PAGE_SIZES(i915, obj->mm.page_sizes.sg)); in __i915_gem_object_set_pages()
220 obj->mm.page_sizes.phys = obj->mm.page_sizes.sg = 0; in __i915_gem_object_unset_pages()
508 struct scatterlist *sg; in __i915_gem_object_get_sg() local
535 sg = iter->sg_pos; in __i915_gem_object_get_sg()
537 count = dma ? __sg_dma_page_count(sg) : __sg_page_count(sg); in __i915_gem_object_get_sg()
552 ret = radix_tree_insert(&iter->radix, idx, sg); in __i915_gem_object_get_sg()
564 sg = ____sg_next(sg); in __i915_gem_object_get_sg()
565 count = dma ? __sg_dma_page_count(sg) : __sg_page_count(sg); in __i915_gem_object_get_sg()
[all …]
/drivers/dma-buf/
Dudmabuf.c27 struct sg_table *sg; member
64 struct sg_table *sg; in get_sg_table() local
67 sg = kzalloc(sizeof(*sg), GFP_KERNEL); in get_sg_table()
68 if (!sg) in get_sg_table()
70 ret = sg_alloc_table_from_pages(sg, ubuf->pages, ubuf->pagecount, in get_sg_table()
75 ret = dma_map_sgtable(dev, sg, direction, 0); in get_sg_table()
78 return sg; in get_sg_table()
81 sg_free_table(sg); in get_sg_table()
82 kfree(sg); in get_sg_table()
86 static void put_sg_table(struct device *dev, struct sg_table *sg, in put_sg_table() argument
[all …]
/drivers/crypto/qce/
Ddma.c53 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; in qce_sgtable_add() local
56 while (sg) { in qce_sgtable_add()
57 if (!sg_page(sg)) in qce_sgtable_add()
59 sg = sg_next(sg); in qce_sgtable_add()
62 if (!sg) in qce_sgtable_add()
65 while (new_sgl && sg && max_len) { in qce_sgtable_add()
67 sg_set_page(sg, sg_page(new_sgl), new_len, new_sgl->offset); in qce_sgtable_add()
68 sg_last = sg; in qce_sgtable_add()
69 sg = sg_next(sg); in qce_sgtable_add()
77 static int qce_dma_prep_sg(struct dma_chan *chan, struct scatterlist *sg, in qce_dma_prep_sg() argument
[all …]
/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c23 struct sg_table *sg; in omap_gem_map_dma_buf() local
27 sg = kzalloc(sizeof(*sg), GFP_KERNEL); in omap_gem_map_dma_buf()
28 if (!sg) in omap_gem_map_dma_buf()
38 ret = sg_alloc_table(sg, 1, GFP_KERNEL); in omap_gem_map_dma_buf()
42 sg_init_table(sg->sgl, 1); in omap_gem_map_dma_buf()
43 sg_dma_len(sg->sgl) = obj->size; in omap_gem_map_dma_buf()
44 sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(dma_addr)), obj->size, 0); in omap_gem_map_dma_buf()
45 sg_dma_address(sg->sgl) = dma_addr; in omap_gem_map_dma_buf()
50 return sg; in omap_gem_map_dma_buf()
52 kfree(sg); in omap_gem_map_dma_buf()
[all …]
/drivers/crypto/cavium/nitrox/
Dnitrox_req.h482 struct scatterlist *sg; member
573 static inline struct scatterlist *create_single_sg(struct scatterlist *sg, in create_single_sg() argument
576 sg_set_buf(sg, buf, buflen); in create_single_sg()
577 sg++; in create_single_sg()
578 return sg; in create_single_sg()
594 struct scatterlist *sg = to_sg; in create_multi_sg() local
602 sg_set_buf(sg, sg_virt(from_sg), sglen); in create_multi_sg()
604 sg++; in create_multi_sg()
607 return sg; in create_multi_sg()
647 struct scatterlist *sg; in nitrox_creq_set_src_sg() local
[all …]
/drivers/target/tcm_fc/
Dtfc_io.c47 struct scatterlist *sg = NULL; in ft_queue_data_in() local
81 sg = se_cmd->t_data_sg; in ft_queue_data_in()
82 mem_len = sg->length; in ft_queue_data_in()
83 mem_off = sg->offset; in ft_queue_data_in()
84 page = sg_page(sg); in ft_queue_data_in()
99 sg = sg_next(sg); in ft_queue_data_in()
100 mem_len = min((size_t)sg->length, remaining); in ft_queue_data_in()
101 mem_off = sg->offset; in ft_queue_data_in()
102 page = sg_page(sg); in ft_queue_data_in()
202 struct scatterlist *sg = NULL; in ft_recv_write_data() local
[all …]
/drivers/infiniband/core/
Drw.c91 struct rdma_rw_reg_ctx *reg, struct scatterlist *sg, in rdma_rw_init_one_mr() argument
105 ret = ib_map_mr_sg(reg->mr, sg, nents, &offset, PAGE_SIZE); in rdma_rw_init_one_mr()
124 u32 port_num, struct scatterlist *sg, u32 sg_cnt, u32 offset, in rdma_rw_init_mr_wrs() argument
143 ret = rdma_rw_init_one_mr(qp, port_num, reg, sg, sg_cnt, in rdma_rw_init_mr_wrs()
175 sg = sg_next(sg); in rdma_rw_init_mr_wrs()
195 struct scatterlist *sg, u32 sg_cnt, u32 offset, in rdma_rw_init_map_wrs() argument
226 for (j = 0; j < nr_sge; j++, sg = sg_next(sg)) { in rdma_rw_init_map_wrs()
227 sge->addr = sg_dma_address(sg) + offset; in rdma_rw_init_map_wrs()
228 sge->length = sg_dma_len(sg) - offset; in rdma_rw_init_map_wrs()
251 struct scatterlist *sg, u32 offset, u64 remote_addr, u32 rkey, in rdma_rw_init_single_wr() argument
[all …]
Dumem_dmabuf.c15 struct scatterlist *sg; in ib_umem_dmabuf_map_pages() local
35 for_each_sgtable_dma_sg(sgt, sg, i) { in ib_umem_dmabuf_map_pages()
36 if (start < cur + sg_dma_len(sg) && cur < end) in ib_umem_dmabuf_map_pages()
38 if (cur <= start && start < cur + sg_dma_len(sg)) { in ib_umem_dmabuf_map_pages()
41 umem_dmabuf->first_sg = sg; in ib_umem_dmabuf_map_pages()
43 sg_dma_address(sg) += offset; in ib_umem_dmabuf_map_pages()
44 sg_dma_len(sg) -= offset; in ib_umem_dmabuf_map_pages()
47 if (cur < end && end <= cur + sg_dma_len(sg)) { in ib_umem_dmabuf_map_pages()
48 unsigned long trim = cur + sg_dma_len(sg) - end; in ib_umem_dmabuf_map_pages()
50 umem_dmabuf->last_sg = sg; in ib_umem_dmabuf_map_pages()
[all …]
/drivers/staging/ralink-gdma/
Dralink-gdma.c103 struct gdma_dma_sg sg[]; member
273 struct gdma_dma_sg *sg; in rt305x_gdma_start_transfer() local
285 sg = &chan->desc->sg[chan->next_sg]; in rt305x_gdma_start_transfer()
287 src_addr = sg->src_addr; in rt305x_gdma_start_transfer()
294 dst_addr = sg->dst_addr; in rt305x_gdma_start_transfer()
303 src_addr = sg->src_addr; in rt305x_gdma_start_transfer()
304 dst_addr = sg->dst_addr; in rt305x_gdma_start_transfer()
314 ctrl0 |= (sg->len << GDMA_REG_CTRL0_TX_SHIFT) | in rt305x_gdma_start_transfer()
350 struct gdma_dma_sg *sg; in rt3883_gdma_start_transfer() local
362 sg = &chan->desc->sg[chan->next_sg]; in rt3883_gdma_start_transfer()
[all …]
/drivers/gpu/drm/i915/gem/selftests/
Dhuge_gem_object.c33 struct scatterlist *sg, *src, *end; in huge_get_pages() local
46 sg = pages->sgl; in huge_get_pages()
52 sg_mark_end(sg); in huge_get_pages()
56 sg_set_page(sg, page, PAGE_SIZE, 0); in huge_get_pages()
57 sg = __sg_next(sg); in huge_get_pages()
60 for (end = sg, src = pages->sgl; sg; sg = __sg_next(sg)) { in huge_get_pages()
61 sg_set_page(sg, sg_page(src), PAGE_SIZE, 0); in huge_get_pages()
/drivers/usb/storage/
Dprotocol.c126 struct scatterlist *sg = *sgptr; in usb_stor_access_xfer_buf() local
130 if (sg) in usb_stor_access_xfer_buf()
131 nents = sg_nents(sg); in usb_stor_access_xfer_buf()
133 sg = scsi_sglist(srb); in usb_stor_access_xfer_buf()
135 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? in usb_stor_access_xfer_buf()
150 if (*offset + len < miter.piter.sg->length) { in usb_stor_access_xfer_buf()
152 *sgptr = miter.piter.sg; in usb_stor_access_xfer_buf()
155 *sgptr = sg_next(miter.piter.sg); in usb_stor_access_xfer_buf()
173 struct scatterlist *sg = NULL; in usb_stor_set_xfer_buf() local
176 buflen = usb_stor_access_xfer_buf(buffer, buflen, srb, &sg, &offset, in usb_stor_set_xfer_buf()
/drivers/mmc/core/
Dmmc_test.c85 struct scatterlist *sg; member
211 struct mmc_request *mrq, struct scatterlist *sg, unsigned sg_len, in mmc_test_prepare_mrq() argument
242 mrq->data->sg = sg; in mmc_test_prepare_mrq()
298 struct scatterlist sg; in mmc_test_buffer_transfer() local
304 sg_init_one(&sg, buffer, blksz); in mmc_test_buffer_transfer()
306 mmc_test_prepare_mrq(test, &mrq, &sg, 1, addr, 1, blksz, write); in mmc_test_buffer_transfer()
414 struct scatterlist *sg = NULL; in mmc_test_map_sg() local
433 if (sg) in mmc_test_map_sg()
434 sg = sg_next(sg); in mmc_test_map_sg()
436 sg = sglist; in mmc_test_map_sg()
[all …]

12345678910>>...27