/drivers/ata/ |
D | sata_gemini.c | 125 bool gemini_sata_bridge_enabled(struct sata_gemini *sg, bool is_ata1) in gemini_sata_bridge_enabled() argument 127 if (!sg->sata_bridge) in gemini_sata_bridge_enabled() 133 if ((sg->muxmode == GEMINI_MUXMODE_2) && in gemini_sata_bridge_enabled() 136 if ((sg->muxmode == GEMINI_MUXMODE_3) && in gemini_sata_bridge_enabled() 144 enum gemini_muxmode gemini_sata_get_muxmode(struct sata_gemini *sg) in gemini_sata_get_muxmode() argument 146 return sg->muxmode; in gemini_sata_get_muxmode() 150 static int gemini_sata_setup_bridge(struct sata_gemini *sg, in gemini_sata_setup_bridge() argument 160 if (sg->muxmode == GEMINI_MUXMODE_2) in gemini_sata_setup_bridge() 162 writel(val, sg->base + GEMINI_SATA0_CTRL); in gemini_sata_setup_bridge() 166 if (sg->muxmode == GEMINI_MUXMODE_3) in gemini_sata_setup_bridge() [all …]
|
D | pata_ftide010.c | 49 struct sata_gemini *sg; member 273 struct sata_gemini *sg = ftide->sg; in pata_ftide010_gemini_port_start() local 283 ret = gemini_sata_start_bridge(sg, 0); in pata_ftide010_gemini_port_start() 289 ret = gemini_sata_start_bridge(sg, 1); in pata_ftide010_gemini_port_start() 296 ret = gemini_sata_start_bridge(sg, 0); in pata_ftide010_gemini_port_start() 303 ret = gemini_sata_start_bridge(sg, 1); in pata_ftide010_gemini_port_start() 316 struct sata_gemini *sg = ftide->sg; in pata_ftide010_gemini_port_stop() local 320 gemini_sata_stop_bridge(sg, 0); in pata_ftide010_gemini_port_stop() 324 gemini_sata_stop_bridge(sg, 1); in pata_ftide010_gemini_port_stop() 329 gemini_sata_stop_bridge(sg, 0); in pata_ftide010_gemini_port_stop() [all …]
|
/drivers/crypto/ |
D | omap-crypto.c | 20 struct scatterlist **sg, in omap_crypto_copy_sg_lists() argument 23 int n = sg_nents(*sg); in omap_crypto_copy_sg_lists() 27 new_sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL); in omap_crypto_copy_sg_lists() 36 while (*sg && total) { in omap_crypto_copy_sg_lists() 37 int len = (*sg)->length; in omap_crypto_copy_sg_lists() 44 sg_set_page(tmp, sg_page(*sg), len, (*sg)->offset); in omap_crypto_copy_sg_lists() 50 *sg = sg_next(*sg); in omap_crypto_copy_sg_lists() 53 *sg = new_sg; in omap_crypto_copy_sg_lists() 58 static int omap_crypto_copy_sgs(int total, int bs, struct scatterlist **sg, in omap_crypto_copy_sgs() argument 76 scatterwalk_map_and_copy(buf, *sg, 0, total, 0); in omap_crypto_copy_sgs() [all …]
|
D | s5p-sss.c | 257 static void s5p_set_dma_indata(struct s5p_aes_dev *dev, struct scatterlist *sg) in s5p_set_dma_indata() argument 259 SSS_WRITE(dev, FCBRDMAS, sg_dma_address(sg)); in s5p_set_dma_indata() 260 SSS_WRITE(dev, FCBRDMAL, sg_dma_len(sg)); in s5p_set_dma_indata() 263 static void s5p_set_dma_outdata(struct s5p_aes_dev *dev, struct scatterlist *sg) in s5p_set_dma_outdata() argument 265 SSS_WRITE(dev, FCBTDMAS, sg_dma_address(sg)); in s5p_set_dma_outdata() 266 SSS_WRITE(dev, FCBTDMAL, sg_dma_len(sg)); in s5p_set_dma_outdata() 269 static void s5p_free_sg_cpy(struct s5p_aes_dev *dev, struct scatterlist **sg) in s5p_free_sg_cpy() argument 273 if (!*sg) in s5p_free_sg_cpy() 277 free_pages((unsigned long)sg_virt(*sg), get_order(len)); in s5p_free_sg_cpy() 279 kfree(*sg); in s5p_free_sg_cpy() [all …]
|
/drivers/staging/fsl-mc/include/ |
D | dpaa2-fd.h | 313 static inline dma_addr_t dpaa2_sg_get_addr(const struct dpaa2_sg_entry *sg) in dpaa2_sg_get_addr() argument 315 return le64_to_cpu((dma_addr_t)sg->addr); in dpaa2_sg_get_addr() 323 static inline void dpaa2_sg_set_addr(struct dpaa2_sg_entry *sg, dma_addr_t addr) in dpaa2_sg_set_addr() argument 325 sg->addr = cpu_to_le64(addr); in dpaa2_sg_set_addr() 328 static inline bool dpaa2_sg_short_len(const struct dpaa2_sg_entry *sg) in dpaa2_sg_short_len() argument 330 return !!((le16_to_cpu(sg->format_offset) >> SG_SHORT_LEN_FLAG_SHIFT) in dpaa2_sg_short_len() 340 static inline u32 dpaa2_sg_get_len(const struct dpaa2_sg_entry *sg) in dpaa2_sg_get_len() argument 342 if (dpaa2_sg_short_len(sg)) in dpaa2_sg_get_len() 343 return le32_to_cpu(sg->len) & SG_SHORT_LEN_MASK; in dpaa2_sg_get_len() 345 return le32_to_cpu(sg->len); in dpaa2_sg_get_len() [all …]
|
/drivers/media/pci/tw68/ |
D | tw68-risc.c | 46 struct scatterlist *sg; in tw68_risc_field() local 62 sg = sglist; in tw68_risc_field() 65 while (offset && offset >= sg_dma_len(sg)) { in tw68_risc_field() 66 offset -= sg_dma_len(sg); in tw68_risc_field() 67 sg = sg_next(sg); in tw68_risc_field() 69 if (bpl <= sg_dma_len(sg) - offset) { in tw68_risc_field() 73 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field() 84 done = (sg_dma_len(sg) - offset); in tw68_risc_field() 88 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field() 90 sg = sg_next(sg); in tw68_risc_field() [all …]
|
/drivers/gpu/drm/i915/selftests/ |
D | huge_gem_object.c | 31 struct scatterlist *sg; in huge_free_pages() local 33 for (sg = pages->sgl; sg && nreal--; sg = __sg_next(sg)) in huge_free_pages() 34 __free_page(sg_page(sg)); in huge_free_pages() 46 struct scatterlist *sg, *src, *end; in huge_get_pages() local 59 sg = pages->sgl; in huge_get_pages() 65 sg_mark_end(sg); in huge_get_pages() 69 sg_set_page(sg, page, PAGE_SIZE, 0); in huge_get_pages() 70 sg = __sg_next(sg); in huge_get_pages() 73 for (end = sg, src = pages->sgl; sg; sg = __sg_next(sg)) { in huge_get_pages() 74 sg_set_page(sg, sg_page(src), PAGE_SIZE, 0); in huge_get_pages()
|
D | scatterlist.c | 46 struct scatterlist *sg; in expect_pfn_sg() local 50 for_each_sg(pt->st.sgl, sg, pt->st.nents, n) { in expect_pfn_sg() 51 struct page *page = sg_page(sg); in expect_pfn_sg() 60 if (sg->length != npages * PAGE_SIZE) { in expect_pfn_sg() 62 __func__, who, npages * PAGE_SIZE, sg->length); in expect_pfn_sg() 205 struct scatterlist *sg; in alloc_table() local 213 GEM_BUG_ON(overflows_type(count * PAGE_SIZE, sg->length)); in alloc_table() 221 sg = pt->st.sgl; in alloc_table() 234 sg = sg_next(sg); in alloc_table() 235 sg_set_page(sg, pfn_to_page(pfn), npages * PAGE_SIZE, 0); in alloc_table() [all …]
|
/drivers/target/iscsi/cxgbit/ |
D | cxgbit_ddp.c | 16 struct scatterlist *sg = sg_pp ? *sg_pp : NULL; in cxgbit_set_one_ppod() local 24 if (sg) { in cxgbit_set_one_ppod() 25 addr = sg_dma_address(sg); in cxgbit_set_one_ppod() 26 len = sg_dma_len(sg); in cxgbit_set_one_ppod() 30 if (sg) { in cxgbit_set_one_ppod() 33 if (offset == (len + sg->offset)) { in cxgbit_set_one_ppod() 35 sg = sg_next(sg); in cxgbit_set_one_ppod() 36 if (sg) { in cxgbit_set_one_ppod() 37 addr = sg_dma_address(sg); in cxgbit_set_one_ppod() 38 len = sg_dma_len(sg); in cxgbit_set_one_ppod() [all …]
|
/drivers/gpu/drm/i915/ |
D | i915_gem_internal.c | 36 struct scatterlist *sg; in internal_free_pages() local 38 for (sg = st->sgl; sg; sg = __sg_next(sg)) { in internal_free_pages() 39 if (sg_page(sg)) in internal_free_pages() 40 __free_pages(sg_page(sg), get_order(sg->length)); in internal_free_pages() 52 struct scatterlist *sg; in i915_gem_object_get_pages_internal() local 89 sg = st->sgl; in i915_gem_object_get_pages_internal() 107 sg_set_page(sg, page, PAGE_SIZE << order, 0); in i915_gem_object_get_pages_internal() 112 sg_mark_end(sg); in i915_gem_object_get_pages_internal() 116 sg = __sg_next(sg); in i915_gem_object_get_pages_internal() 138 sg_set_page(sg, NULL, 0, 0); in i915_gem_object_get_pages_internal() [all …]
|
/drivers/crypto/qce/ |
D | dma.c | 60 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; in qce_sgtable_add() local 62 while (sg) { in qce_sgtable_add() 63 if (!sg_page(sg)) in qce_sgtable_add() 65 sg = sg_next(sg); in qce_sgtable_add() 68 if (!sg) in qce_sgtable_add() 71 while (new_sgl && sg) { in qce_sgtable_add() 72 sg_set_page(sg, sg_page(new_sgl), new_sgl->length, in qce_sgtable_add() 74 sg_last = sg; in qce_sgtable_add() 75 sg = sg_next(sg); in qce_sgtable_add() 82 static int qce_dma_prep_sg(struct dma_chan *chan, struct scatterlist *sg, in qce_dma_prep_sg() argument [all …]
|
/drivers/dma/ |
D | dma-axi-dmac.c | 92 struct axi_dmac_sg sg[]; member 184 struct axi_dmac_sg *sg; in axi_dmac_start_transfer() local 201 sg = &desc->sg[desc->num_submitted]; in axi_dmac_start_transfer() 209 sg->id = axi_dmac_read(dmac, AXI_DMAC_REG_TRANSFER_ID); in axi_dmac_start_transfer() 212 axi_dmac_write(dmac, AXI_DMAC_REG_DEST_ADDRESS, sg->dest_addr); in axi_dmac_start_transfer() 213 axi_dmac_write(dmac, AXI_DMAC_REG_DEST_STRIDE, sg->dest_stride); in axi_dmac_start_transfer() 217 axi_dmac_write(dmac, AXI_DMAC_REG_SRC_ADDRESS, sg->src_addr); in axi_dmac_start_transfer() 218 axi_dmac_write(dmac, AXI_DMAC_REG_SRC_STRIDE, sg->src_stride); in axi_dmac_start_transfer() 228 axi_dmac_write(dmac, AXI_DMAC_REG_X_LENGTH, sg->x_len - 1); in axi_dmac_start_transfer() 229 axi_dmac_write(dmac, AXI_DMAC_REG_Y_LENGTH, sg->y_len - 1); in axi_dmac_start_transfer() [all …]
|
/drivers/staging/android/ion/ |
D | ion_chunk_heap.c | 43 struct scatterlist *sg; in ion_chunk_heap_allocate() local 63 sg = table->sgl; in ion_chunk_heap_allocate() 69 sg_set_page(sg, pfn_to_page(PFN_DOWN(paddr)), in ion_chunk_heap_allocate() 71 sg = sg_next(sg); in ion_chunk_heap_allocate() 78 sg = table->sgl; in ion_chunk_heap_allocate() 80 gen_pool_free(chunk_heap->pool, page_to_phys(sg_page(sg)), in ion_chunk_heap_allocate() 81 sg->length); in ion_chunk_heap_allocate() 82 sg = sg_next(sg); in ion_chunk_heap_allocate() 95 struct scatterlist *sg; in ion_chunk_heap_free() local 103 for_each_sg(table->sgl, sg, table->nents, i) { in ion_chunk_heap_free() [all …]
|
D | ion_heap.c | 31 struct scatterlist *sg; in ion_heap_map_kernel() local 48 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_kernel() 49 int npages_this_entry = PAGE_ALIGN(sg->length) / PAGE_SIZE; in ion_heap_map_kernel() 50 struct page *page = sg_page(sg); in ion_heap_map_kernel() 77 struct scatterlist *sg; in ion_heap_map_user() local 81 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_user() 82 struct page *page = sg_page(sg); in ion_heap_map_user() 84 unsigned long len = sg->length; in ion_heap_map_user() 86 if (offset >= sg->length) { in ion_heap_map_user() 87 offset -= sg->length; in ion_heap_map_user() [all …]
|
/drivers/target/tcm_fc/ |
D | tfc_io.c | 60 struct scatterlist *sg = NULL; in ft_queue_data_in() local 94 sg = se_cmd->t_data_sg; in ft_queue_data_in() 95 mem_len = sg->length; in ft_queue_data_in() 96 mem_off = sg->offset; in ft_queue_data_in() 97 page = sg_page(sg); in ft_queue_data_in() 112 sg = sg_next(sg); in ft_queue_data_in() 113 mem_len = min((size_t)sg->length, remaining); in ft_queue_data_in() 114 mem_off = sg->offset; in ft_queue_data_in() 115 page = sg_page(sg); in ft_queue_data_in() 216 struct scatterlist *sg = NULL; in ft_recv_write_data() local [all …]
|
/drivers/gpu/drm/omapdrm/ |
D | omap_gem_dmabuf.c | 33 struct sg_table *sg; in omap_gem_map_dma_buf() local 37 sg = kzalloc(sizeof(*sg), GFP_KERNEL); in omap_gem_map_dma_buf() 38 if (!sg) in omap_gem_map_dma_buf() 48 ret = sg_alloc_table(sg, 1, GFP_KERNEL); in omap_gem_map_dma_buf() 52 sg_init_table(sg->sgl, 1); in omap_gem_map_dma_buf() 53 sg_dma_len(sg->sgl) = obj->size; in omap_gem_map_dma_buf() 54 sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(dma_addr)), obj->size, 0); in omap_gem_map_dma_buf() 55 sg_dma_address(sg->sgl) = dma_addr; in omap_gem_map_dma_buf() 60 return sg; in omap_gem_map_dma_buf() 62 kfree(sg); in omap_gem_map_dma_buf() [all …]
|
/drivers/crypto/caam/ |
D | sg_sw_qm2.h | 56 sg_to_qm_sg(struct scatterlist *sg, int sg_count, in sg_to_qm_sg() argument 59 while (sg_count && sg) { in sg_to_qm_sg() 60 dma_to_qm_sg_one(qm_sg_ptr, sg_dma_address(sg), in sg_to_qm_sg() 61 sg_dma_len(sg), offset); in sg_to_qm_sg() 63 sg = sg_next(sg); in sg_to_qm_sg() 73 static inline void sg_to_qm_sg_last(struct scatterlist *sg, int sg_count, in sg_to_qm_sg_last() argument 77 qm_sg_ptr = sg_to_qm_sg(sg, sg_count, qm_sg_ptr, offset); in sg_to_qm_sg_last()
|
D | sg_sw_qm.h | 84 sg_to_qm_sg(struct scatterlist *sg, int sg_count, in sg_to_qm_sg() argument 87 while (sg_count && sg) { in sg_to_qm_sg() 88 dma_to_qm_sg_one(qm_sg_ptr, sg_dma_address(sg), in sg_to_qm_sg() 89 sg_dma_len(sg), offset); in sg_to_qm_sg() 91 sg = sg_next(sg); in sg_to_qm_sg() 101 static inline void sg_to_qm_sg_last(struct scatterlist *sg, int sg_count, in sg_to_qm_sg_last() argument 104 qm_sg_ptr = sg_to_qm_sg(sg, sg_count, qm_sg_ptr, offset); in sg_to_qm_sg_last()
|
/drivers/usb/storage/ |
D | protocol.c | 142 struct scatterlist *sg = *sgptr; in usb_stor_access_xfer_buf() local 146 if (sg) in usb_stor_access_xfer_buf() 147 nents = sg_nents(sg); in usb_stor_access_xfer_buf() 149 sg = scsi_sglist(srb); in usb_stor_access_xfer_buf() 151 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? in usb_stor_access_xfer_buf() 166 if (*offset + len < miter.piter.sg->length) { in usb_stor_access_xfer_buf() 168 *sgptr = miter.piter.sg; in usb_stor_access_xfer_buf() 171 *sgptr = sg_next(miter.piter.sg); in usb_stor_access_xfer_buf() 189 struct scatterlist *sg = NULL; in usb_stor_set_xfer_buf() local 192 buflen = usb_stor_access_xfer_buf(buffer, buflen, srb, &sg, &offset, in usb_stor_set_xfer_buf()
|
/drivers/gpu/drm/udl/ |
D | udl_dmabuf.c | 101 obj->sg = drm_prime_pages_to_sg(obj->pages, page_count); in udl_map_dma_buf() 102 if (IS_ERR(obj->sg)) { in udl_map_dma_buf() 104 return ERR_CAST(obj->sg); in udl_map_dma_buf() 109 ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); in udl_map_dma_buf() 117 rd = obj->sg->sgl; in udl_map_dma_buf() 217 struct sg_table *sg, in udl_prime_create() argument 230 obj->sg = sg; in udl_prime_create() 237 drm_prime_sg_to_page_addr_arrays(sg, obj->pages, NULL, npages); in udl_prime_create() 247 struct sg_table *sg; in udl_gem_prime_import() local 261 sg = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); in udl_gem_prime_import() [all …]
|
/drivers/mmc/core/ |
D | mmc_test.c | 88 struct scatterlist *sg; member 218 struct mmc_request *mrq, struct scatterlist *sg, unsigned sg_len, in mmc_test_prepare_mrq() argument 249 mrq->data->sg = sg; in mmc_test_prepare_mrq() 305 struct scatterlist sg; in mmc_test_buffer_transfer() local 311 sg_init_one(&sg, buffer, blksz); in mmc_test_buffer_transfer() 313 mmc_test_prepare_mrq(test, &mrq, &sg, 1, addr, 1, blksz, write); in mmc_test_buffer_transfer() 421 struct scatterlist *sg = NULL; in mmc_test_map_sg() local 440 if (sg) in mmc_test_map_sg() 441 sg = sg_next(sg); in mmc_test_map_sg() 443 sg = sglist; in mmc_test_map_sg() [all …]
|
/drivers/xen/ |
D | swiotlb-xen.c | 540 struct scatterlist *sg; in xen_swiotlb_unmap_sg_attrs() local 545 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg_attrs() 546 xen_unmap_single(hwdev, sg->dma_address, sg_dma_len(sg), dir, attrs); in xen_swiotlb_unmap_sg_attrs() 571 struct scatterlist *sg; in xen_swiotlb_map_sg_attrs() local 576 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg_attrs() 577 phys_addr_t paddr = sg_phys(sg); in xen_swiotlb_map_sg_attrs() 582 !dma_capable(hwdev, dev_addr, sg->length) || in xen_swiotlb_map_sg_attrs() 583 range_straddles_page_boundary(paddr, sg->length)) { in xen_swiotlb_map_sg_attrs() 586 sg_phys(sg), in xen_swiotlb_map_sg_attrs() 587 sg->length, in xen_swiotlb_map_sg_attrs() [all …]
|
/drivers/scsi/aacraid/ |
D | commctrl.c | 557 if (user_srbcmd->sg.count > ARRAY_SIZE(sg_list)) { in aac_send_raw_srb() 559 user_srbcmd->sg.count)); in aac_send_raw_srb() 563 if ((data_dir == DMA_NONE) && user_srbcmd->sg.count) { in aac_send_raw_srb() 569 ((user_srbcmd->sg.count & 0xff) * sizeof(struct sgentry)); in aac_send_raw_srb() 570 actual_fibsize64 = actual_fibsize + (user_srbcmd->sg.count & 0xff) * in aac_send_raw_srb() 578 actual_fibsize, actual_fibsize64, user_srbcmd->sg.count, in aac_send_raw_srb() 623 cpu_to_le32(user_srbcmd->sg.count); in aac_send_raw_srb() 625 user_srbcmd->sg.count * sizeof(struct aac_hba_sgl); in aac_send_raw_srb() 652 struct user_sgmap *usg32 = &user_srbcmd->sg; in aac_send_raw_srb() 654 (struct user_sgmap64 *)&user_srbcmd->sg; in aac_send_raw_srb() [all …]
|
/drivers/target/ |
D | target_core_iblock.c | 429 struct scatterlist *sg = &cmd->t_data_sg[0]; in iblock_execute_zero_out() local 433 buf = kmap(sg_page(sg)) + sg->offset; in iblock_execute_zero_out() 441 kunmap(sg_page(sg)); in iblock_execute_zero_out() 463 struct scatterlist *sg; in iblock_execute_write_same() local 476 sg = &cmd->t_data_sg[0]; in iblock_execute_write_same() 479 sg->length != cmd->se_dev->dev_attrib.block_size) { in iblock_execute_write_same() 481 " block_size: %u\n", cmd->t_data_nents, sg->length, in iblock_execute_write_same() 506 while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset) in iblock_execute_write_same() 507 != sg->length) { in iblock_execute_write_same() 519 block_lba += sg->length >> IBLOCK_LBA_SHIFT; in iblock_execute_write_same() [all …]
|
/drivers/crypto/bcm/ |
D | util.c | 38 int spu_sg_at_offset(struct scatterlist *sg, unsigned int skip, in spu_sg_at_offset() argument 46 next_index = sg->length; in spu_sg_at_offset() 48 sg = sg_next(sg); in spu_sg_at_offset() 50 if (!sg) in spu_sg_at_offset() 52 next_index += sg->length; in spu_sg_at_offset() 56 *sge = sg; in spu_sg_at_offset() 106 struct scatterlist *sg; in spu_sg_count() local 113 if (spu_sg_at_offset(sg_list, skip, &sg, &offset) < 0) in spu_sg_count() 116 while (sg && (nbytes > 0)) { in spu_sg_count() 118 nbytes -= (sg->length - offset); in spu_sg_count() [all …]
|