/drivers/dma/ptdma/ |
D | ptdma-pci.c | 38 struct pt_device *pt; in pt_alloc_struct() local 40 pt = devm_kzalloc(dev, sizeof(*pt), GFP_KERNEL); in pt_alloc_struct() 42 if (!pt) in pt_alloc_struct() 44 pt->dev = dev; in pt_alloc_struct() 46 INIT_LIST_HEAD(&pt->cmd); in pt_alloc_struct() 48 return pt; in pt_alloc_struct() 51 static int pt_get_msix_irqs(struct pt_device *pt) in pt_get_msix_irqs() argument 53 struct pt_msix *pt_msix = pt->pt_msix; in pt_get_msix_irqs() 54 struct device *dev = pt->dev; in pt_get_msix_irqs() 66 pt->pt_irq = pt_msix->msix_entry.vector; in pt_get_msix_irqs() [all …]
|
D | ptdma-dev.c | 118 static inline void pt_core_disable_queue_interrupts(struct pt_device *pt) in pt_core_disable_queue_interrupts() argument 120 iowrite32(0, pt->cmd_q.reg_control + 0x000C); in pt_core_disable_queue_interrupts() 123 static inline void pt_core_enable_queue_interrupts(struct pt_device *pt) in pt_core_enable_queue_interrupts() argument 125 iowrite32(SUPPORTED_INTERRUPTS, pt->cmd_q.reg_control + 0x000C); in pt_core_enable_queue_interrupts() 132 struct pt_cmd_queue *cmd_q = &cmd->pt->cmd_q; in pt_do_cmd_complete() 141 pt_log_error(cmd_q->pt, cmd_q->cmd_error); in pt_do_cmd_complete() 150 struct pt_device *pt = data; in pt_core_irq_handler() local 151 struct pt_cmd_queue *cmd_q = &pt->cmd_q; in pt_core_irq_handler() 154 pt_core_disable_queue_interrupts(pt); in pt_core_irq_handler() 155 pt->total_interrupts++; in pt_core_irq_handler() [all …]
|
D | ptdma-dmaengine.c | 43 struct pt_device *pt = desc->pt; in pt_do_cleanup() local 45 kmem_cache_free(pt->dma_desc_cache, desc); in pt_do_cleanup() 51 struct pt_device *pt; in pt_dma_start_desc() local 58 pt = pt_cmd->pt; in pt_dma_start_desc() 59 cmd_q = &pt->cmd_q; in pt_dma_start_desc() 62 pt->tdata.cmd = pt_cmd; in pt_dma_start_desc() 162 desc = kmem_cache_zalloc(chan->pt->dma_desc_cache, GFP_NOWAIT); in pt_alloc_dma_desc() 168 desc->pt = chan->pt; in pt_alloc_dma_desc() 191 pt_cmd->pt = chan->pt; in pt_create_desc() 256 pt_stop_queue(&chan->pt->cmd_q); in pt_pause() [all …]
|
D | ptdma-debugfs.c | 25 struct pt_device *pt = s->private; in pt_debugfs_info_show() local 28 seq_printf(s, "Device name: %s\n", dev_name(pt->dev)); in pt_debugfs_info_show() 30 seq_printf(s, " # Cmds: %d\n", pt->cmd_count); in pt_debugfs_info_show() 32 regval = ioread32(pt->io_regs + CMD_PT_VERSION); in pt_debugfs_info_show() 48 struct pt_device *pt = s->private; in pt_debugfs_stats_show() local 50 seq_printf(s, "Total Interrupts Handled: %ld\n", pt->total_interrupts); in pt_debugfs_stats_show() 85 void ptdma_debugfs_setup(struct pt_device *pt) in ptdma_debugfs_setup() argument 93 debugfs_create_file("info", 0400, pt->dma_dev.dbg_dev_root, pt, in ptdma_debugfs_setup() 96 debugfs_create_file("stats", 0400, pt->dma_dev.dbg_dev_root, pt, in ptdma_debugfs_setup() 99 cmd_q = &pt->cmd_q; in ptdma_debugfs_setup() [all …]
|
D | ptdma.h | 165 struct pt_device *pt; member 177 struct pt_device *pt; member 186 struct pt_device *pt; member 190 struct pt_device *pt; member 311 int pt_dmaengine_register(struct pt_device *pt); 312 void pt_dmaengine_unregister(struct pt_device *pt); 314 void ptdma_debugfs_setup(struct pt_device *pt); 315 int pt_core_init(struct pt_device *pt); 316 void pt_core_destroy(struct pt_device *pt);
|
/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
D | base.c | 34 struct nvkm_mmu_pt *pt; member 42 nvkm_mmu_ptp_put(struct nvkm_mmu *mmu, bool force, struct nvkm_mmu_pt *pt) in nvkm_mmu_ptp_put() argument 44 const int slot = pt->base >> pt->ptp->shift; in nvkm_mmu_ptp_put() 45 struct nvkm_mmu_ptp *ptp = pt->ptp; in nvkm_mmu_ptp_put() 56 nvkm_mmu_ptc_put(mmu, force, &ptp->pt); in nvkm_mmu_ptp_put() 61 kfree(pt); in nvkm_mmu_ptp_put() 67 struct nvkm_mmu_pt *pt; in nvkm_mmu_ptp_get() local 71 if (!(pt = kzalloc(sizeof(*pt), GFP_KERNEL))) in nvkm_mmu_ptp_get() 78 kfree(pt); in nvkm_mmu_ptp_get() 82 ptp->pt = nvkm_mmu_ptc_get(mmu, 0x1000, 0x1000, false); in nvkm_mmu_ptp_get() [all …]
|
D | vmmnv44.c | 27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_fill() argument 33 tmp[0] = nvkm_ro32(pt->memory, pteo + 0x0); in nv44_vmm_pgt_fill() 34 tmp[1] = nvkm_ro32(pt->memory, pteo + 0x4); in nv44_vmm_pgt_fill() 35 tmp[2] = nvkm_ro32(pt->memory, pteo + 0x8); in nv44_vmm_pgt_fill() 36 tmp[3] = nvkm_ro32(pt->memory, pteo + 0xc); in nv44_vmm_pgt_fill() 66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill() 67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill() 68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill() 69 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); in nv44_vmm_pgt_fill() 73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_pte() argument [all …]
|
D | vmmgp100.c | 35 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_unmap() argument 40 nvkm_kmap(pt->memory); in gp100_vmm_pfn_unmap() 42 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); in gp100_vmm_pfn_unmap() 43 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); in gp100_vmm_pfn_unmap() 51 nvkm_done(pt->memory); in gp100_vmm_pfn_unmap() 56 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_clear() argument 59 nvkm_kmap(pt->memory); in gp100_vmm_pfn_clear() 61 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); in gp100_vmm_pfn_clear() 62 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); in gp100_vmm_pfn_clear() 65 VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); in gp100_vmm_pfn_clear() [all …]
|
D | vmmgf100.c | 32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_pte() argument 44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 51 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_sgl() argument 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl() 65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_dma() argument 70 nvkm_kmap(pt->memory); in gf100_vmm_pgt_dma() 73 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_dma() 76 nvkm_done(pt->memory); in gf100_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma() [all …]
|
D | vmmnv41.c | 27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument 32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte() 38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument 41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl() 45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_dma() argument 49 nvkm_kmap(pt->memory); in nv41_vmm_pgt_dma() 52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma() 54 nvkm_done(pt->memory); in nv41_vmm_pgt_dma() 56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma() 62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() argument [all …]
|
D | vmmnv04.c | 28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument 33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte() 39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_sgl() argument 42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl() 46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_dma() argument 50 nvkm_kmap(pt->memory); in nv04_vmm_pgt_dma() 52 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); in nv04_vmm_pgt_dma() 53 nvkm_done(pt->memory); in nv04_vmm_pgt_dma() 55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma() 61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() argument [all …]
|
D | vmmnv50.c | 32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_pte() argument 53 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_pte() 58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_sgl() argument 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl() 65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_dma() argument 70 nvkm_kmap(pt->memory); in nv50_vmm_pgt_dma() 73 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_dma() 76 nvkm_done(pt->memory); in nv50_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_dma() 84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_mem() argument [all …]
|
/drivers/gpu/drm/i915/gt/ |
D | intel_ppgtt.c | 17 struct i915_page_table *pt; in alloc_pt() local 19 pt = kmalloc(sizeof(*pt), I915_GFP_ALLOW_FAIL); in alloc_pt() 20 if (unlikely(!pt)) in alloc_pt() 23 pt->base = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in alloc_pt() 24 if (IS_ERR(pt->base)) { in alloc_pt() 25 kfree(pt); in alloc_pt() 29 atomic_set(&pt->used, 0); in alloc_pt() 30 return pt; in alloc_pt() 59 pd->pt.base = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in alloc_pd() 60 if (IS_ERR(pd->pt.base)) { in alloc_pd() [all …]
|
D | gen6_ppgtt.c | 17 const struct i915_page_table *pt) in gen6_write_pde() argument 19 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde() 81 struct i915_page_table * const pt = in gen6_ppgtt_clear_range() local 88 GEM_BUG_ON(count > atomic_read(&pt->used)); in gen6_ppgtt_clear_range() 89 if (!atomic_sub_return(count, &pt->used)) in gen6_ppgtt_clear_range() 99 vaddr = px_vaddr(pt); in gen6_ppgtt_clear_range() 149 struct i915_page_table *pt; in gen6_flush_pd() local 157 gen6_for_each_pde(pt, pd, start, end, pde) in gen6_flush_pd() 158 gen6_write_pde(ppgtt, pde, pt); in gen6_flush_pd() 174 struct i915_page_table *pt; in gen6_alloc_va_range() local [all …]
|
D | gen8_ppgtt.c | 115 #define as_pd(x) container_of((x), typeof(struct i915_page_directory), pt) 189 free_px(vm, &pd->pt, lvl); in __gen8_ppgtt_cleanup() 222 struct i915_page_table *pt = pd->entry[idx]; in __gen8_ppgtt_clear() local 224 if (atomic_fetch_inc(&pt->used) >> gen8_pd_shift(1) && in __gen8_ppgtt_clear() 229 __gen8_ppgtt_cleanup(vm, as_pd(pt), I915_PDES, lvl); in __gen8_ppgtt_clear() 235 start = __gen8_ppgtt_clear(vm, as_pd(pt), in __gen8_ppgtt_clear() 245 atomic_read(&pt->used)); in __gen8_ppgtt_clear() 246 GEM_BUG_ON(!count || count >= atomic_read(&pt->used)); in __gen8_ppgtt_clear() 248 vaddr = px_vaddr(pt); in __gen8_ppgtt_clear() 253 atomic_sub(count, &pt->used); in __gen8_ppgtt_clear() [all …]
|
/drivers/gpu/drm/i915/selftests/ |
D | scatterlist.c | 41 static noinline int expect_pfn_sg(struct pfn_table *pt, in expect_pfn_sg() argument 50 pfn = pt->start; in expect_pfn_sg() 51 for_each_sg(pt->st.sgl, sg, pt->st.nents, n) { in expect_pfn_sg() 53 unsigned int npages = npages_fn(n, pt->st.nents, rnd); in expect_pfn_sg() 72 if (pfn != pt->end) { in expect_pfn_sg() 74 __func__, who, pt->end, pfn); in expect_pfn_sg() 81 static noinline int expect_pfn_sg_page_iter(struct pfn_table *pt, in expect_pfn_sg_page_iter() argument 88 pfn = pt->start; in expect_pfn_sg_page_iter() 89 for_each_sg_page(pt->st.sgl, &sgiter, pt->st.nents, 0) { in expect_pfn_sg_page_iter() 103 if (pfn != pt->end) { in expect_pfn_sg_page_iter() [all …]
|
/drivers/gpu/drm/gma500/ |
D | mmu.c | 223 static void psb_mmu_free_pt(struct psb_mmu_pt *pt) in psb_mmu_free_pt() argument 225 __free_page(pt->p); in psb_mmu_free_pt() 226 kfree(pt); in psb_mmu_free_pt() 234 struct psb_mmu_pt *pt; in psb_mmu_free_pagedir() local 247 pt = pd->tables[i]; in psb_mmu_free_pagedir() 248 if (pt) in psb_mmu_free_pagedir() 249 psb_mmu_free_pt(pt); in psb_mmu_free_pagedir() 262 struct psb_mmu_pt *pt = kmalloc(sizeof(*pt), GFP_KERNEL); in psb_mmu_alloc_pt() local 271 if (!pt) in psb_mmu_alloc_pt() 274 pt->p = alloc_page(GFP_DMA32); in psb_mmu_alloc_pt() [all …]
|
/drivers/dma-buf/ |
D | sw_sync.c | 133 struct sync_pt *pt = dma_fence_to_sync_pt(fence); in timeline_fence_release() local 138 if (!list_empty(&pt->link)) { in timeline_fence_release() 139 list_del(&pt->link); in timeline_fence_release() 140 rb_erase(&pt->node, &parent->pt_tree); in timeline_fence_release() 195 struct sync_pt *pt, *next; in sync_timeline_signal() local 203 list_for_each_entry_safe(pt, next, &obj->pt_list, link) { in sync_timeline_signal() 204 if (!timeline_fence_signaled(&pt->base)) in sync_timeline_signal() 207 dma_fence_get(&pt->base); in sync_timeline_signal() 209 list_move_tail(&pt->link, &signalled); in sync_timeline_signal() 210 rb_erase(&pt->node, &obj->pt_tree); in sync_timeline_signal() [all …]
|
/drivers/gpu/drm/ttm/ |
D | ttm_pool.c | 223 static void ttm_pool_type_give(struct ttm_pool_type *pt, struct page *p) in ttm_pool_type_give() argument 225 unsigned int i, num_pages = 1 << pt->order; in ttm_pool_type_give() 234 spin_lock(&pt->lock); in ttm_pool_type_give() 235 list_add(&p->lru, &pt->pages); in ttm_pool_type_give() 236 spin_unlock(&pt->lock); in ttm_pool_type_give() 237 atomic_long_add(1 << pt->order, &allocated_pages); in ttm_pool_type_give() 241 static struct page *ttm_pool_type_take(struct ttm_pool_type *pt) in ttm_pool_type_take() argument 245 spin_lock(&pt->lock); in ttm_pool_type_take() 246 p = list_first_entry_or_null(&pt->pages, typeof(*p), lru); in ttm_pool_type_take() 248 atomic_long_sub(1 << pt->order, &allocated_pages); in ttm_pool_type_take() [all …]
|
/drivers/media/common/saa7146/ |
D | saa7146_core.c | 164 void *saa7146_vmalloc_build_pgtable(struct pci_dev *pci, long length, struct saa7146_pgtable *pt) in saa7146_vmalloc_build_pgtable() argument 173 if (!(pt->slist = vmalloc_to_sg(mem, pages))) in saa7146_vmalloc_build_pgtable() 176 if (saa7146_pgtable_alloc(pci, pt)) in saa7146_vmalloc_build_pgtable() 179 pt->nents = pages; in saa7146_vmalloc_build_pgtable() 180 slen = dma_map_sg(&pci->dev, pt->slist, pt->nents, DMA_FROM_DEVICE); in saa7146_vmalloc_build_pgtable() 184 if (0 != saa7146_pgtable_build_single(pci, pt, pt->slist, slen)) in saa7146_vmalloc_build_pgtable() 190 dma_unmap_sg(&pci->dev, pt->slist, pt->nents, DMA_FROM_DEVICE); in saa7146_vmalloc_build_pgtable() 192 saa7146_pgtable_free(pci, pt); in saa7146_vmalloc_build_pgtable() 194 kfree(pt->slist); in saa7146_vmalloc_build_pgtable() 195 pt->slist = NULL; in saa7146_vmalloc_build_pgtable() [all …]
|
D | saa7146_hlp.c | 725 vdma1.base_page = buf->pt[0].dma | ME1 | sfmt->swap; in calculate_video_dma_grab_packed() 728 vdma1.prot_addr = buf->pt[0].offset; in calculate_video_dma_grab_packed() 729 vdma1.base_even = buf->pt[0].offset+(vdma1.pitch/2)*height; in calculate_video_dma_grab_packed() 732 vdma1.base_even = buf->pt[0].offset; in calculate_video_dma_grab_packed() 734 vdma1.prot_addr = buf->pt[0].offset+(vdma1.pitch/2)*height; in calculate_video_dma_grab_packed() 776 vdma2->prot_addr = buf->pt[1].offset; in calc_planar_422() 777 vdma2->base_even = ((vdma2->pitch/2)*height)+buf->pt[1].offset; in calc_planar_422() 780 vdma3->prot_addr = buf->pt[2].offset; in calc_planar_422() 781 vdma3->base_even = ((vdma3->pitch/2)*height)+buf->pt[2].offset; in calc_planar_422() 784 vdma3->base_even = buf->pt[2].offset; in calc_planar_422() [all …]
|
/drivers/mtd/maps/ |
D | impa7.c | 60 static struct { u_long addr; u_long size; } pt[NUM_FLASHBANKS] = { in init_impa7() local 69 pt[i].size, pt[i].addr); in init_impa7() 71 impa7_map[i].phys = pt[i].addr; in init_impa7() 72 impa7_map[i].virt = ioremap(pt[i].addr, pt[i].size); in init_impa7()
|
/drivers/net/ethernet/stmicro/stmmac/ |
D | stmmac_selftests.c | 239 struct packet_type pt; member 248 struct packet_type *pt, in stmmac_test_loopback_validate() argument 251 struct stmmac_test_priv *tpriv = pt->af_packet_priv; in stmmac_test_loopback_validate() 334 tpriv->pt.type = htons(ETH_P_IP); in __stmmac_test_loopback() 335 tpriv->pt.func = stmmac_test_loopback_validate; in __stmmac_test_loopback() 336 tpriv->pt.dev = priv->dev; in __stmmac_test_loopback() 337 tpriv->pt.af_packet_priv = tpriv; in __stmmac_test_loopback() 341 dev_add_pack(&tpriv->pt); in __stmmac_test_loopback() 364 dev_remove_pack(&tpriv->pt); in __stmmac_test_loopback() 716 struct packet_type *pt, in stmmac_test_flowctrl_validate() argument [all …]
|
/drivers/net/fddi/skfp/ |
D | pmf.c | 310 const struct s_p_tab *pt ; in smt_build_pmf_response() local 423 pt = smt_get_ptab(pa->p_type) ; in smt_build_pmf_response() 424 if (pt && pt->p_access == AC_GROUP && !set) { in smt_build_pmf_response() 425 pt++ ; in smt_build_pmf_response() 426 while (pt->p_access == AC_G || in smt_build_pmf_response() 427 pt->p_access == AC_GR) { in smt_build_pmf_response() 428 smt_add_para(smc,&pcon,pt->p_num, in smt_build_pmf_response() 430 pt++ ; in smt_build_pmf_response() 455 if (pt && pt->p_access == AC_S) { in smt_build_pmf_response() 546 const struct s_p_tab *pt ; in smt_add_para() local [all …]
|
/drivers/net/ethernet/mellanox/mlx5/core/ |
D | en_selftest.c | 173 struct packet_type pt; member 182 struct packet_type *pt, in mlx5e_test_loopback_validate() argument 185 struct mlx5e_lbt_priv *lbtp = pt->af_packet_priv; in mlx5e_test_loopback_validate() 243 lbtp->pt.type = htons(ETH_P_IP); in mlx5e_test_loopback_setup() 244 lbtp->pt.func = mlx5e_test_loopback_validate; in mlx5e_test_loopback_setup() 245 lbtp->pt.dev = priv->netdev; in mlx5e_test_loopback_setup() 246 lbtp->pt.af_packet_priv = lbtp; in mlx5e_test_loopback_setup() 247 dev_add_pack(&lbtp->pt); in mlx5e_test_loopback_setup() 264 dev_remove_pack(&lbtp->pt); in mlx5e_test_loopback_cleanup()
|