Home
last modified time | relevance | path

Searched refs:sg (Results 1 – 25 of 38) sorted by relevance

12

/include/linux/
Dscatterlist.h30 #define sg_dma_address(sg) ((sg)->dma_address) argument
33 #define sg_dma_len(sg) ((sg)->dma_length) argument
35 #define sg_dma_len(sg) ((sg)->length) argument
67 #define sg_is_chain(sg) ((sg)->page_link & 0x01) argument
68 #define sg_is_last(sg) ((sg)->page_link & 0x02) argument
69 #define sg_chain_ptr(sg) \ argument
70 ((struct scatterlist *) ((sg)->page_link & ~0x03))
82 static inline void sg_assign_page(struct scatterlist *sg, struct page *page) in sg_assign_page() argument
84 unsigned long page_link = sg->page_link & 0x3; in sg_assign_page()
92 BUG_ON(sg->sg_magic != SG_MAGIC); in sg_assign_page()
[all …]
Dpci-dma-compat.h65 pci_map_sg(struct pci_dev *hwdev, struct scatterlist *sg, in pci_map_sg() argument
68 …return dma_map_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)directio… in pci_map_sg()
72 pci_unmap_sg(struct pci_dev *hwdev, struct scatterlist *sg, in pci_unmap_sg() argument
75 dma_unmap_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)direction); in pci_unmap_sg()
93 pci_dma_sync_sg_for_cpu(struct pci_dev *hwdev, struct scatterlist *sg, in pci_dma_sync_sg_for_cpu() argument
96 …dma_sync_sg_for_cpu(hwdev == NULL ? NULL : &hwdev->dev, sg, nelems, (enum dma_data_direction)direc… in pci_dma_sync_sg_for_cpu()
100 pci_dma_sync_sg_for_device(struct pci_dev *hwdev, struct scatterlist *sg, in pci_dma_sync_sg_for_device() argument
103 …dma_sync_sg_for_device(hwdev == NULL ? NULL : &hwdev->dev, sg, nelems, (enum dma_data_direction)di… in pci_dma_sync_sg_for_device()
Ddma-debug.h47 extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg,
86 struct scatterlist *sg,
90 struct scatterlist *sg,
130 static inline void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument
192 struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument
198 struct scatterlist *sg, in debug_dma_sync_sg_for_device() argument
Dswiotlb.h82 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sg, int nents,
86 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
104 swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
112 swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
Ddma-iommu.h53 int iommu_dma_map_sg(struct device *dev, struct scatterlist *sg,
62 void iommu_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents,
Ddma-mapping.h96 int (*map_sg)(struct device *dev, struct scatterlist *sg,
100 struct scatterlist *sg, int nents,
116 struct scatterlist *sg, int nents,
119 struct scatterlist *sg, int nents,
216 static inline int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_map_sg_attrs() argument
224 for_each_sg(sg, s, nents, i) in dma_map_sg_attrs()
227 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
229 debug_dma_map_sg(dev, sg, nents, ents, dir); in dma_map_sg_attrs()
234 static inline void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument
241 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs()
[all …]
Dtifm.h148 int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents,
150 void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents,
Dvirtio.h38 struct scatterlist sg[], unsigned int num,
43 struct scatterlist sg[], unsigned int num,
Diommu.h177 struct scatterlist *sg, unsigned int nents, int prot);
230 struct scatterlist *sg,unsigned int nents,
323 unsigned long iova, struct scatterlist *sg, in iommu_map_sg() argument
326 return domain->ops->map_sg(domain, iova, sg, nents, prot); in iommu_map_sg()
414 unsigned long iova, struct scatterlist *sg, in iommu_map_sg() argument
Dmemstick.h247 struct scatterlist sg; member
323 const struct scatterlist *sg);
Dvia-core.h163 int viafb_dma_copy_out_sg(unsigned int offset, struct scatterlist *sg, int nsg);
/include/crypto/
Dscatterwalk.h25 struct scatterlist *sg, in scatterwalk_crypto_chain() argument
29 head->length += sg->length; in scatterwalk_crypto_chain()
30 sg = sg_next(sg); in scatterwalk_crypto_chain()
33 if (sg) in scatterwalk_crypto_chain()
34 sg_chain(head, num, sg); in scatterwalk_crypto_chain()
42 return !(((sg_page(walk_in->sg) - sg_page(walk_out->sg)) << PAGE_SHIFT) + in scatterwalk_samebuf()
48 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen()
74 return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in scatterwalk_page()
83 struct scatterlist *sg) in scatterwalk_start() argument
85 walk->sg = sg; in scatterwalk_start()
[all …]
Dalgapi.h80 struct scatterlist *sg; member
289 walk->in.sg = src; in blkcipher_walk_init()
290 walk->out.sg = dst; in blkcipher_walk_init()
299 walk->in.sg = src; in ablkcipher_walk_init()
300 walk->out.sg = dst; in ablkcipher_walk_init()
Dif_alg.h66 struct scatterlist sg[ALG_MAX_PAGES + 1]; member
/include/linux/platform_data/
Ddma-ste-dma40.h184 struct scatterlist sg; in stedma40_slave_mem() local
185 sg_init_table(&sg, 1); in stedma40_slave_mem()
186 sg.dma_address = addr; in stedma40_slave_mem()
187 sg.length = size; in stedma40_slave_mem()
189 return dmaengine_prep_slave_sg(chan, &sg, 1, direction, flags); in stedma40_slave_mem()
/include/rdma/
Drw.h63 struct scatterlist *sg, u32 sg_cnt, u32 sg_offset,
66 struct scatterlist *sg, u32 sg_cnt,
70 u8 port_num, struct scatterlist *sg, u32 sg_cnt,
75 u8 port_num, struct scatterlist *sg, u32 sg_cnt,
Dib_verbs.h1783 struct scatterlist *sg, int nents,
1786 struct scatterlist *sg, int nents,
1789 struct scatterlist *sg, int nents,
1793 struct scatterlist *sg, int nents,
2008 struct scatterlist *sg,
3030 struct scatterlist *sg, int nents, in ib_dma_map_sg() argument
3034 return dev->dma_ops->map_sg(dev, sg, nents, direction); in ib_dma_map_sg()
3035 return dma_map_sg(dev->dma_device, sg, nents, direction); in ib_dma_map_sg()
3046 struct scatterlist *sg, int nents, in ib_dma_unmap_sg() argument
3050 dev->dma_ops->unmap_sg(dev, sg, nents, direction); in ib_dma_unmap_sg()
[all …]
/include/soc/fsl/
Dqman.h197 static inline dma_addr_t qm_sg_addr(const struct qm_sg_entry *sg) in qm_sg_addr() argument
199 return be64_to_cpu(sg->data) & 0xffffffffffLLU; in qm_sg_addr()
202 static inline u64 qm_sg_entry_get64(const struct qm_sg_entry *sg) in qm_sg_entry_get64() argument
204 return be64_to_cpu(sg->data) & 0xffffffffffLLU; in qm_sg_entry_get64()
207 static inline void qm_sg_entry_set64(struct qm_sg_entry *sg, u64 addr) in qm_sg_entry_set64() argument
209 sg->addr_hi = upper_32_bits(addr); in qm_sg_entry_set64()
210 sg->addr_lo = cpu_to_be32(lower_32_bits(addr)); in qm_sg_entry_set64()
213 static inline bool qm_sg_entry_is_final(const struct qm_sg_entry *sg) in qm_sg_entry_is_final() argument
215 return be32_to_cpu(sg->cfg) & QM_SG_FIN; in qm_sg_entry_is_final()
218 static inline bool qm_sg_entry_is_ext(const struct qm_sg_entry *sg) in qm_sg_entry_is_ext() argument
[all …]
/include/scsi/
Dscsi_cmnd.h162 extern void *scsi_kmap_atomic_sg(struct scatterlist *sg, int sg_count,
196 #define scsi_for_each_sg(cmd, sg, nseg, __i) \ argument
197 for_each_sg(scsi_sglist(cmd), sg, nseg, __i)
323 #define scsi_for_each_prot_sg(cmd, sg, nseg, __i) \ argument
324 for_each_sg(scsi_prot_sglist(cmd), sg, nseg, __i)
Dlibiscsi_tcp.h47 struct scatterlist *sg; member
/include/xen/
Dswiotlb-xen.h42 xen_swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
50 xen_swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
/include/linux/dma/
Dipu-dma.h149 struct scatterlist *sg; /* scatterlist for this */ member
163 struct scatterlist *sg[2]; /* scatterlist elements in buffer-0 and -1 */ member
/include/drm/ttm/
Dttm_bo_api.h225 struct sg_table *sg; member
484 struct sg_table *sg,
/include/linux/mmc/
Ddw_mmc.h158 struct scatterlist *sg; member
/include/linux/mtd/
Dubi.h141 struct scatterlist sg[UBI_MAX_SG_COUNT]; member

12