/drivers/ata/ |
D | sata_gemini.c | 126 bool gemini_sata_bridge_enabled(struct sata_gemini *sg, bool is_ata1) in gemini_sata_bridge_enabled() argument 128 if (!sg->sata_bridge) in gemini_sata_bridge_enabled() 134 if ((sg->muxmode == GEMINI_MUXMODE_2) && in gemini_sata_bridge_enabled() 137 if ((sg->muxmode == GEMINI_MUXMODE_3) && in gemini_sata_bridge_enabled() 145 enum gemini_muxmode gemini_sata_get_muxmode(struct sata_gemini *sg) in gemini_sata_get_muxmode() argument 147 return sg->muxmode; in gemini_sata_get_muxmode() 151 static int gemini_sata_setup_bridge(struct sata_gemini *sg, in gemini_sata_setup_bridge() argument 161 if (sg->muxmode == GEMINI_MUXMODE_2) in gemini_sata_setup_bridge() 163 writel(val, sg->base + GEMINI_SATA0_CTRL); in gemini_sata_setup_bridge() 167 if (sg->muxmode == GEMINI_MUXMODE_3) in gemini_sata_setup_bridge() [all …]
|
D | pata_ftide010.c | 50 struct sata_gemini *sg; member 274 struct sata_gemini *sg = ftide->sg; in pata_ftide010_gemini_port_start() local 284 ret = gemini_sata_start_bridge(sg, 0); in pata_ftide010_gemini_port_start() 290 ret = gemini_sata_start_bridge(sg, 1); in pata_ftide010_gemini_port_start() 297 ret = gemini_sata_start_bridge(sg, 0); in pata_ftide010_gemini_port_start() 304 ret = gemini_sata_start_bridge(sg, 1); in pata_ftide010_gemini_port_start() 317 struct sata_gemini *sg = ftide->sg; in pata_ftide010_gemini_port_stop() local 321 gemini_sata_stop_bridge(sg, 0); in pata_ftide010_gemini_port_stop() 325 gemini_sata_stop_bridge(sg, 1); in pata_ftide010_gemini_port_stop() 330 gemini_sata_stop_bridge(sg, 0); in pata_ftide010_gemini_port_stop() [all …]
|
/drivers/clk/sprd/ |
D | gate.c | 13 static void clk_gate_toggle(const struct sprd_gate *sg, bool en) in clk_gate_toggle() argument 15 const struct sprd_clk_common *common = &sg->common; in clk_gate_toggle() 17 bool set = sg->flags & CLK_GATE_SET_TO_DISABLE ? true : false; in clk_gate_toggle() 24 reg |= sg->enable_mask; in clk_gate_toggle() 26 reg &= ~sg->enable_mask; in clk_gate_toggle() 31 static void clk_sc_gate_toggle(const struct sprd_gate *sg, bool en) in clk_sc_gate_toggle() argument 33 const struct sprd_clk_common *common = &sg->common; in clk_sc_gate_toggle() 34 bool set = sg->flags & CLK_GATE_SET_TO_DISABLE ? 1 : 0; in clk_sc_gate_toggle() 45 offset = set ? sg->sc_offset : sg->sc_offset * 2; in clk_sc_gate_toggle() 48 sg->enable_mask); in clk_sc_gate_toggle() [all …]
|
/drivers/crypto/ |
D | omap-crypto.c | 17 struct scatterlist **sg, in omap_crypto_copy_sg_lists() argument 20 int n = sg_nents(*sg); in omap_crypto_copy_sg_lists() 24 new_sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL); in omap_crypto_copy_sg_lists() 33 while (*sg && total) { in omap_crypto_copy_sg_lists() 34 int len = (*sg)->length; in omap_crypto_copy_sg_lists() 41 sg_set_page(tmp, sg_page(*sg), len, (*sg)->offset); in omap_crypto_copy_sg_lists() 47 *sg = sg_next(*sg); in omap_crypto_copy_sg_lists() 50 *sg = new_sg; in omap_crypto_copy_sg_lists() 55 static int omap_crypto_copy_sgs(int total, int bs, struct scatterlist **sg, in omap_crypto_copy_sgs() argument 73 scatterwalk_map_and_copy(buf, *sg, 0, total, 0); in omap_crypto_copy_sgs() [all …]
|
D | s5p-sss.c | 366 struct scatterlist *sg; member 438 const struct scatterlist *sg) in s5p_set_dma_indata() argument 440 SSS_WRITE(dev, FCBRDMAS, sg_dma_address(sg)); in s5p_set_dma_indata() 441 SSS_WRITE(dev, FCBRDMAL, sg_dma_len(sg)); in s5p_set_dma_indata() 445 const struct scatterlist *sg) in s5p_set_dma_outdata() argument 447 SSS_WRITE(dev, FCBTDMAS, sg_dma_address(sg)); in s5p_set_dma_outdata() 448 SSS_WRITE(dev, FCBTDMAL, sg_dma_len(sg)); in s5p_set_dma_outdata() 451 static void s5p_free_sg_cpy(struct s5p_aes_dev *dev, struct scatterlist **sg) in s5p_free_sg_cpy() argument 455 if (!*sg) in s5p_free_sg_cpy() 459 free_pages((unsigned long)sg_virt(*sg), get_order(len)); in s5p_free_sg_cpy() [all …]
|
/drivers/gpu/drm/i915/ |
D | i915_scatterlist.c | 18 struct scatterlist *sg, *new_sg; in i915_sg_trim() local 28 for_each_sg(orig_st->sgl, sg, orig_st->nents, i) { in i915_sg_trim() 29 sg_set_page(new_sg, sg_page(sg), sg->length, 0); in i915_sg_trim() 30 sg_dma_address(new_sg) = sg_dma_address(sg); in i915_sg_trim() 31 sg_dma_len(new_sg) = sg_dma_len(sg); in i915_sg_trim() 89 struct scatterlist *sg; in i915_rsgt_from_mm_node() local 105 sg = st->sgl; in i915_rsgt_from_mm_node() 114 if (offset != prev_end || sg->length >= max_segment) { in i915_rsgt_from_mm_node() 116 sg = __sg_next(sg); in i915_rsgt_from_mm_node() 118 sg_dma_address(sg) = region_start + offset; in i915_rsgt_from_mm_node() [all …]
|
D | i915_scatterlist.h | 50 static inline int __sg_page_count(const struct scatterlist *sg) in __sg_page_count() argument 52 return sg->length >> PAGE_SHIFT; in __sg_page_count() 55 static inline int __sg_dma_page_count(const struct scatterlist *sg) in __sg_dma_page_count() argument 57 return sg_dma_len(sg) >> PAGE_SHIFT; in __sg_dma_page_count() 60 static inline struct scatterlist *____sg_next(struct scatterlist *sg) in ____sg_next() argument 62 ++sg; in ____sg_next() 63 if (unlikely(sg_is_chain(sg))) in ____sg_next() 64 sg = sg_chain_ptr(sg); in ____sg_next() 65 return sg; in ____sg_next() 77 static inline struct scatterlist *__sg_next(struct scatterlist *sg) in __sg_next() argument [all …]
|
/drivers/net/ethernet/marvell/octeontx2/nic/ |
D | otx2_txrx.c | 104 static void otx2_dma_unmap_skb_frags(struct otx2_nic *pfvf, struct sg_list *sg) in otx2_dma_unmap_skb_frags() argument 108 for (seg = 0; seg < sg->num_segs; seg++) { in otx2_dma_unmap_skb_frags() 109 otx2_dma_unmap_page(pfvf, sg->dma_addr[seg], in otx2_dma_unmap_skb_frags() 110 sg->size[seg], DMA_TO_DEVICE); in otx2_dma_unmap_skb_frags() 112 sg->num_segs = 0; in otx2_dma_unmap_skb_frags() 120 struct sg_list *sg; in otx2_xdp_snd_pkt_handler() local 124 sg = &sq->sg[snd_comp->sqe_id]; in otx2_xdp_snd_pkt_handler() 126 pa = otx2_iova_to_phys(pfvf->iommu_domain, sg->dma_addr[0]); in otx2_xdp_snd_pkt_handler() 127 otx2_dma_unmap_page(pfvf, sg->dma_addr[0], in otx2_xdp_snd_pkt_handler() 128 sg->size[0], DMA_TO_DEVICE); in otx2_xdp_snd_pkt_handler() [all …]
|
/drivers/dma/ |
D | dma-axi-dmac.c | 120 struct axi_dmac_sg sg[]; member 212 struct axi_dmac_sg *sg; in axi_dmac_start_transfer() local 229 sg = &desc->sg[desc->num_submitted]; in axi_dmac_start_transfer() 232 if (sg->id != AXI_DMAC_SG_UNUSED) { in axi_dmac_start_transfer() 233 sg->schedule_when_free = true; in axi_dmac_start_transfer() 249 sg->id = axi_dmac_read(dmac, AXI_DMAC_REG_TRANSFER_ID); in axi_dmac_start_transfer() 252 axi_dmac_write(dmac, AXI_DMAC_REG_DEST_ADDRESS, sg->dest_addr); in axi_dmac_start_transfer() 253 axi_dmac_write(dmac, AXI_DMAC_REG_DEST_STRIDE, sg->dest_stride); in axi_dmac_start_transfer() 257 axi_dmac_write(dmac, AXI_DMAC_REG_SRC_ADDRESS, sg->src_addr); in axi_dmac_start_transfer() 258 axi_dmac_write(dmac, AXI_DMAC_REG_SRC_STRIDE, sg->src_stride); in axi_dmac_start_transfer() [all …]
|
/drivers/media/pci/tw68/ |
D | tw68-risc.c | 38 struct scatterlist *sg; in tw68_risc_field() local 54 sg = sglist; in tw68_risc_field() 57 while (offset && offset >= sg_dma_len(sg)) { in tw68_risc_field() 58 offset -= sg_dma_len(sg); in tw68_risc_field() 59 sg = sg_next(sg); in tw68_risc_field() 61 if (bpl <= sg_dma_len(sg) - offset) { in tw68_risc_field() 65 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field() 76 done = (sg_dma_len(sg) - offset); in tw68_risc_field() 80 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field() 82 sg = sg_next(sg); in tw68_risc_field() [all …]
|
/drivers/crypto/gemini/ |
D | sl3516-ce-cipher.c | 28 struct scatterlist *sg; in sl3516_ce_need_fallback() local 49 sg = areq->src; in sl3516_ce_need_fallback() 50 while (sg) { in sl3516_ce_need_fallback() 51 if ((sg->length % 16) != 0) { in sl3516_ce_need_fallback() 55 if ((sg_dma_len(sg) % 16) != 0) { in sl3516_ce_need_fallback() 59 if (!IS_ALIGNED(sg->offset, 16)) { in sl3516_ce_need_fallback() 63 sg = sg_next(sg); in sl3516_ce_need_fallback() 65 sg = areq->dst; in sl3516_ce_need_fallback() 66 while (sg) { in sl3516_ce_need_fallback() 67 if ((sg->length % 16) != 0) { in sl3516_ce_need_fallback() [all …]
|
/drivers/target/iscsi/cxgbit/ |
D | cxgbit_ddp.c | 13 struct scatterlist *sg = sg_pp ? *sg_pp : NULL; in cxgbit_set_one_ppod() local 21 if (sg) { in cxgbit_set_one_ppod() 22 addr = sg_dma_address(sg); in cxgbit_set_one_ppod() 23 len = sg_dma_len(sg); in cxgbit_set_one_ppod() 27 if (sg) { in cxgbit_set_one_ppod() 30 if (offset == (len + sg->offset)) { in cxgbit_set_one_ppod() 32 sg = sg_next(sg); in cxgbit_set_one_ppod() 33 if (sg) { in cxgbit_set_one_ppod() 34 addr = sg_dma_address(sg); in cxgbit_set_one_ppod() 35 len = sg_dma_len(sg); in cxgbit_set_one_ppod() [all …]
|
/drivers/crypto/allwinner/sun8i-ss/ |
D | sun8i-ss-cipher.c | 30 struct scatterlist *sg; in sun8i_ss_need_fallback() local 45 sg = areq->src; in sun8i_ss_need_fallback() 46 while (sg) { in sun8i_ss_need_fallback() 47 todo = min(len, sg->length); in sun8i_ss_need_fallback() 52 if (!IS_ALIGNED(sg->offset, 16)) { in sun8i_ss_need_fallback() 57 sg = sg_next(sg); in sun8i_ss_need_fallback() 60 sg = areq->dst; in sun8i_ss_need_fallback() 61 while (sg) { in sun8i_ss_need_fallback() 62 todo = min(len, sg->length); in sun8i_ss_need_fallback() 67 if (!IS_ALIGNED(sg->offset, 16)) { in sun8i_ss_need_fallback() [all …]
|
/drivers/infiniband/hw/mlx5/ |
D | umr.c | 490 struct ib_sge *sg) in mlx5r_umr_unmap_free_xlt() argument 494 dma_unmap_single(ddev, sg->addr, sg->length, DMA_TO_DEVICE); in mlx5r_umr_unmap_free_xlt() 495 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_unmap_free_xlt() 501 static void *mlx5r_umr_create_xlt(struct mlx5_ib_dev *dev, struct ib_sge *sg, in mlx5r_umr_create_xlt() argument 512 sg->length = nents * ent_size; in mlx5r_umr_create_xlt() 513 dma = dma_map_single(ddev, xlt, sg->length, DMA_TO_DEVICE); in mlx5r_umr_create_xlt() 516 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_create_xlt() 519 sg->addr = dma; in mlx5r_umr_create_xlt() 520 sg->lkey = dev->umrc.pd->local_dma_lkey; in mlx5r_umr_create_xlt() 527 unsigned int flags, struct ib_sge *sg) in mlx5r_umr_set_update_xlt_ctrl_seg() argument [all …]
|
/drivers/gpu/drm/i915/gem/ |
D | i915_gem_internal.c | 22 struct scatterlist *sg; in internal_free_pages() local 24 for (sg = st->sgl; sg; sg = __sg_next(sg)) { in internal_free_pages() 25 if (sg_page(sg)) in internal_free_pages() 26 __free_pages(sg_page(sg), get_order(sg->length)); in internal_free_pages() 37 struct scatterlist *sg; in i915_gem_object_get_pages_internal() local 65 sg = st->sgl; in i915_gem_object_get_pages_internal() 85 sg_set_page(sg, page, PAGE_SIZE << order, 0); in i915_gem_object_get_pages_internal() 91 sg_mark_end(sg); in i915_gem_object_get_pages_internal() 95 sg = __sg_next(sg); in i915_gem_object_get_pages_internal() 113 sg_set_page(sg, NULL, 0, 0); in i915_gem_object_get_pages_internal() [all …]
|
D | i915_gem_pages.c | 59 obj->mm.page_sizes.sg = 0; in __i915_gem_object_set_pages() 62 obj->mm.page_sizes.sg |= BIT(i); in __i915_gem_object_set_pages() 64 GEM_BUG_ON(!HAS_PAGE_SIZES(i915, obj->mm.page_sizes.sg)); in __i915_gem_object_set_pages() 229 obj->mm.page_sizes.phys = obj->mm.page_sizes.sg = 0; in __i915_gem_object_unset_pages() 519 struct scatterlist *sg; in __i915_gem_object_get_sg() local 546 sg = iter->sg_pos; in __i915_gem_object_get_sg() 548 count = dma ? __sg_dma_page_count(sg) : __sg_page_count(sg); in __i915_gem_object_get_sg() 563 ret = radix_tree_insert(&iter->radix, idx, sg); in __i915_gem_object_get_sg() 575 sg = ____sg_next(sg); in __i915_gem_object_get_sg() 576 count = dma ? __sg_dma_page_count(sg) : __sg_page_count(sg); in __i915_gem_object_get_sg() [all …]
|
/drivers/dma-buf/ |
D | udmabuf.c | 27 struct sg_table *sg; member 64 struct sg_table *sg; in get_sg_table() local 67 sg = kzalloc(sizeof(*sg), GFP_KERNEL); in get_sg_table() 68 if (!sg) in get_sg_table() 70 ret = sg_alloc_table_from_pages(sg, ubuf->pages, ubuf->pagecount, in get_sg_table() 75 ret = dma_map_sgtable(dev, sg, direction, 0); in get_sg_table() 78 return sg; in get_sg_table() 81 sg_free_table(sg); in get_sg_table() 82 kfree(sg); in get_sg_table() 86 static void put_sg_table(struct device *dev, struct sg_table *sg, in put_sg_table() argument [all …]
|
/drivers/crypto/qce/ |
D | dma.c | 53 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; in qce_sgtable_add() local 56 while (sg) { in qce_sgtable_add() 57 if (!sg_page(sg)) in qce_sgtable_add() 59 sg = sg_next(sg); in qce_sgtable_add() 62 if (!sg) in qce_sgtable_add() 65 while (new_sgl && sg && max_len) { in qce_sgtable_add() 67 sg_set_page(sg, sg_page(new_sgl), new_len, new_sgl->offset); in qce_sgtable_add() 68 sg_last = sg; in qce_sgtable_add() 69 sg = sg_next(sg); in qce_sgtable_add() 77 static int qce_dma_prep_sg(struct dma_chan *chan, struct scatterlist *sg, in qce_dma_prep_sg() argument [all …]
|
/drivers/crypto/cavium/nitrox/ |
D | nitrox_req.h | 482 struct scatterlist *sg; member 573 static inline struct scatterlist *create_single_sg(struct scatterlist *sg, in create_single_sg() argument 576 sg_set_buf(sg, buf, buflen); in create_single_sg() 577 sg++; in create_single_sg() 578 return sg; in create_single_sg() 594 struct scatterlist *sg = to_sg; in create_multi_sg() local 602 sg_set_buf(sg, sg_virt(from_sg), sglen); in create_multi_sg() 604 sg++; in create_multi_sg() 607 return sg; in create_multi_sg() 647 struct scatterlist *sg; in nitrox_creq_set_src_sg() local [all …]
|
/drivers/target/tcm_fc/ |
D | tfc_io.c | 47 struct scatterlist *sg = NULL; in ft_queue_data_in() local 81 sg = se_cmd->t_data_sg; in ft_queue_data_in() 82 mem_len = sg->length; in ft_queue_data_in() 83 mem_off = sg->offset; in ft_queue_data_in() 84 page = sg_page(sg); in ft_queue_data_in() 99 sg = sg_next(sg); in ft_queue_data_in() 100 mem_len = min((size_t)sg->length, remaining); in ft_queue_data_in() 101 mem_off = sg->offset; in ft_queue_data_in() 102 page = sg_page(sg); in ft_queue_data_in() 202 struct scatterlist *sg = NULL; in ft_recv_write_data() local [all …]
|
/drivers/xen/ |
D | swiotlb-xen.c | 314 struct scatterlist *sg; in xen_swiotlb_unmap_sg() local 319 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg() 320 xen_swiotlb_unmap_page(hwdev, sg->dma_address, sg_dma_len(sg), in xen_swiotlb_unmap_sg() 329 struct scatterlist *sg; in xen_swiotlb_map_sg() local 334 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg() 335 sg->dma_address = xen_swiotlb_map_page(dev, sg_page(sg), in xen_swiotlb_map_sg() 336 sg->offset, sg->length, dir, attrs); in xen_swiotlb_map_sg() 337 if (sg->dma_address == DMA_MAPPING_ERROR) in xen_swiotlb_map_sg() 339 sg_dma_len(sg) = sg->length; in xen_swiotlb_map_sg() 353 struct scatterlist *sg; in xen_swiotlb_sync_sg_for_cpu() local [all …]
|
/drivers/gpu/drm/i915/gem/selftests/ |
D | huge_gem_object.c | 33 struct scatterlist *sg, *src, *end; in huge_get_pages() local 46 sg = pages->sgl; in huge_get_pages() 52 sg_mark_end(sg); in huge_get_pages() 56 sg_set_page(sg, page, PAGE_SIZE, 0); in huge_get_pages() 57 sg = __sg_next(sg); in huge_get_pages() 60 for (end = sg, src = pages->sgl; sg; sg = __sg_next(sg)) { in huge_get_pages() 61 sg_set_page(sg, sg_page(src), PAGE_SIZE, 0); in huge_get_pages()
|
/drivers/usb/storage/ |
D | protocol.c | 126 struct scatterlist *sg = *sgptr; in usb_stor_access_xfer_buf() local 130 if (sg) in usb_stor_access_xfer_buf() 131 nents = sg_nents(sg); in usb_stor_access_xfer_buf() 133 sg = scsi_sglist(srb); in usb_stor_access_xfer_buf() 135 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? in usb_stor_access_xfer_buf() 150 if (*offset + len < miter.piter.sg->length) { in usb_stor_access_xfer_buf() 152 *sgptr = miter.piter.sg; in usb_stor_access_xfer_buf() 155 *sgptr = sg_next(miter.piter.sg); in usb_stor_access_xfer_buf() 173 struct scatterlist *sg = NULL; in usb_stor_set_xfer_buf() local 176 buflen = usb_stor_access_xfer_buf(buffer, buflen, srb, &sg, &offset, in usb_stor_set_xfer_buf()
|
/drivers/mmc/core/ |
D | mmc_test.c | 84 struct scatterlist *sg; member 210 struct mmc_request *mrq, struct scatterlist *sg, unsigned sg_len, in mmc_test_prepare_mrq() argument 241 mrq->data->sg = sg; in mmc_test_prepare_mrq() 297 struct scatterlist sg; in mmc_test_buffer_transfer() local 303 sg_init_one(&sg, buffer, blksz); in mmc_test_buffer_transfer() 305 mmc_test_prepare_mrq(test, &mrq, &sg, 1, addr, 1, blksz, write); in mmc_test_buffer_transfer() 413 struct scatterlist *sg = NULL; in mmc_test_map_sg() local 432 if (sg) in mmc_test_map_sg() 433 sg = sg_next(sg); in mmc_test_map_sg() 435 sg = sglist; in mmc_test_map_sg() [all …]
|
/drivers/crypto/bcm/ |
D | util.c | 27 int spu_sg_at_offset(struct scatterlist *sg, unsigned int skip, in spu_sg_at_offset() argument 35 next_index = sg->length; in spu_sg_at_offset() 37 sg = sg_next(sg); in spu_sg_at_offset() 39 if (!sg) in spu_sg_at_offset() 41 next_index += sg->length; in spu_sg_at_offset() 45 *sge = sg; in spu_sg_at_offset() 95 struct scatterlist *sg; in spu_sg_count() local 102 if (spu_sg_at_offset(sg_list, skip, &sg, &offset) < 0) in spu_sg_count() 105 while (sg && (nbytes > 0)) { in spu_sg_count() 107 nbytes -= (sg->length - offset); in spu_sg_count() [all …]
|