Home
last modified time | relevance | path

Searched refs:pt (Results 1 – 25 of 105) sorted by relevance

12345

/drivers/gpu/drm/i915/gt/
Dintel_ppgtt.c15 struct i915_page_table *pt; in alloc_pt() local
17 pt = kmalloc(sizeof(*pt), I915_GFP_ALLOW_FAIL); in alloc_pt()
18 if (unlikely(!pt)) in alloc_pt()
21 pt->base = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in alloc_pt()
22 if (IS_ERR(pt->base)) { in alloc_pt()
23 kfree(pt); in alloc_pt()
27 atomic_set(&pt->used, 0); in alloc_pt()
28 return pt; in alloc_pt()
57 pd->pt.base = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in alloc_pd()
58 if (IS_ERR(pd->pt.base)) { in alloc_pd()
[all …]
Dgen6_ppgtt.c17 const struct i915_page_table *pt) in gen6_write_pde() argument
19 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde()
90 struct i915_page_table * const pt = in gen6_ppgtt_clear_range() local
97 GEM_BUG_ON(count > atomic_read(&pt->used)); in gen6_ppgtt_clear_range()
98 if (!atomic_sub_return(count, &pt->used)) in gen6_ppgtt_clear_range()
108 vaddr = kmap_atomic_px(pt); in gen6_ppgtt_clear_range()
161 struct i915_page_table *pt; in gen6_flush_pd() local
169 gen6_for_each_pde(pt, pd, start, end, pde) in gen6_flush_pd()
170 gen6_write_pde(ppgtt, pde, pt); in gen6_flush_pd()
186 struct i915_page_table *pt; in gen6_alloc_va_range() local
[all …]
Dgen8_ppgtt.c110 #define as_pd(x) container_of((x), typeof(struct i915_page_directory), pt)
184 free_px(vm, &pd->pt, lvl); in __gen8_ppgtt_cleanup()
214 struct i915_page_table *pt = pd->entry[idx]; in __gen8_ppgtt_clear() local
216 if (atomic_fetch_inc(&pt->used) >> gen8_pd_shift(1) && in __gen8_ppgtt_clear()
221 __gen8_ppgtt_cleanup(vm, as_pd(pt), I915_PDES, lvl); in __gen8_ppgtt_clear()
227 start = __gen8_ppgtt_clear(vm, as_pd(pt), in __gen8_ppgtt_clear()
237 atomic_read(&pt->used)); in __gen8_ppgtt_clear()
238 GEM_BUG_ON(!count || count >= atomic_read(&pt->used)); in __gen8_ppgtt_clear()
240 vaddr = kmap_atomic_px(pt); in __gen8_ppgtt_clear()
246 atomic_sub(count, &pt->used); in __gen8_ppgtt_clear()
[all …]
Dgen6_ppgtt.h58 #define gen6_for_each_pde(pt, pd, start, length, iter) \ argument
61 (pt = i915_pt_entry(pd, iter), true); \
66 #define gen6_for_all_pdes(pt, pd, iter) \ argument
69 (pt = i915_pt_entry(pd, iter), true); \
/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dbase.c34 struct nvkm_mmu_pt *pt; member
42 nvkm_mmu_ptp_put(struct nvkm_mmu *mmu, bool force, struct nvkm_mmu_pt *pt) in nvkm_mmu_ptp_put() argument
44 const int slot = pt->base >> pt->ptp->shift; in nvkm_mmu_ptp_put()
45 struct nvkm_mmu_ptp *ptp = pt->ptp; in nvkm_mmu_ptp_put()
56 nvkm_mmu_ptc_put(mmu, force, &ptp->pt); in nvkm_mmu_ptp_put()
61 kfree(pt); in nvkm_mmu_ptp_put()
67 struct nvkm_mmu_pt *pt; in nvkm_mmu_ptp_get() local
71 if (!(pt = kzalloc(sizeof(*pt), GFP_KERNEL))) in nvkm_mmu_ptp_get()
78 kfree(pt); in nvkm_mmu_ptp_get()
82 ptp->pt = nvkm_mmu_ptc_get(mmu, 0x1000, 0x1000, false); in nvkm_mmu_ptp_get()
[all …]
Dvmmnv44.c27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_fill() argument
33 tmp[0] = nvkm_ro32(pt->memory, pteo + 0x0); in nv44_vmm_pgt_fill()
34 tmp[1] = nvkm_ro32(pt->memory, pteo + 0x4); in nv44_vmm_pgt_fill()
35 tmp[2] = nvkm_ro32(pt->memory, pteo + 0x8); in nv44_vmm_pgt_fill()
36 tmp[3] = nvkm_ro32(pt->memory, pteo + 0xc); in nv44_vmm_pgt_fill()
66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill()
67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill()
68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill()
69 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); in nv44_vmm_pgt_fill()
73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_pte() argument
[all …]
Dvmmgp100.c35 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_unmap() argument
40 nvkm_kmap(pt->memory); in gp100_vmm_pfn_unmap()
42 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); in gp100_vmm_pfn_unmap()
43 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); in gp100_vmm_pfn_unmap()
51 nvkm_done(pt->memory); in gp100_vmm_pfn_unmap()
56 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_clear() argument
59 nvkm_kmap(pt->memory); in gp100_vmm_pfn_clear()
61 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); in gp100_vmm_pfn_clear()
62 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); in gp100_vmm_pfn_clear()
65 VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); in gp100_vmm_pfn_clear()
[all …]
Dvmmgf100.c32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_pte() argument
44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
51 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl()
65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_dma() argument
70 nvkm_kmap(pt->memory); in gf100_vmm_pgt_dma()
73 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_dma()
76 nvkm_done(pt->memory); in gf100_vmm_pgt_dma()
80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma()
[all …]
Dvmmnv41.c27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument
32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte()
38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument
41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl()
45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_dma() argument
49 nvkm_kmap(pt->memory); in nv41_vmm_pgt_dma()
52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma()
54 nvkm_done(pt->memory); in nv41_vmm_pgt_dma()
56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma()
62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() argument
[all …]
Dvmmnv04.c28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument
33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte()
39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_sgl() argument
42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl()
46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_dma() argument
50 nvkm_kmap(pt->memory); in nv04_vmm_pgt_dma()
52 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); in nv04_vmm_pgt_dma()
53 nvkm_done(pt->memory); in nv04_vmm_pgt_dma()
55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma()
61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() argument
[all …]
Dvmmnv50.c32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_pte() argument
53 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_pte()
58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl()
65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_dma() argument
70 nvkm_kmap(pt->memory); in nv50_vmm_pgt_dma()
73 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_dma()
76 nvkm_done(pt->memory); in nv50_vmm_pgt_dma()
80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_dma()
84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_mem() argument
[all …]
Dvmm.c79 struct nvkm_vmm_pt *pt[NVKM_VMM_LEVELS_MAX]; member
142 struct nvkm_vmm_pt *pgd = it->pt[it->lvl + 1]; in nvkm_vmm_unref_pdes()
143 struct nvkm_vmm_pt *pgt = it->pt[it->lvl]; in nvkm_vmm_unref_pdes()
144 struct nvkm_mmu_pt *pt = pgt->pt[type]; in nvkm_vmm_unref_pdes() local
154 pgt->pt[type] = NULL; in nvkm_vmm_unref_pdes()
157 if (pgd->pt[0]) { in nvkm_vmm_unref_pdes()
159 func->sparse(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes()
162 func->unmap(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes()
190 nvkm_mmu_ptc_put(vmm->mmu, vmm->bootstrapped, &pt); in nvkm_vmm_unref_pdes()
244 pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes()
[all …]
Dnv44.c35 struct nvkm_memory *pt = mmu->vmm->pd->pt[0]->memory; in nv44_mmu_init() local
43 addr -= ((nvkm_memory_addr(pt) >> 19) + 1) << 19; in nv44_mmu_init()
47 nvkm_wr32(device, 0x100804, (nvkm_memory_size(pt) / 4) * 4096); in nv44_mmu_init()
/drivers/gpu/drm/i915/selftests/
Dscatterlist.c41 static noinline int expect_pfn_sg(struct pfn_table *pt, in expect_pfn_sg() argument
50 pfn = pt->start; in expect_pfn_sg()
51 for_each_sg(pt->st.sgl, sg, pt->st.nents, n) { in expect_pfn_sg()
53 unsigned int npages = npages_fn(n, pt->st.nents, rnd); in expect_pfn_sg()
72 if (pfn != pt->end) { in expect_pfn_sg()
74 __func__, who, pt->end, pfn); in expect_pfn_sg()
81 static noinline int expect_pfn_sg_page_iter(struct pfn_table *pt, in expect_pfn_sg_page_iter() argument
88 pfn = pt->start; in expect_pfn_sg_page_iter()
89 for_each_sg_page(pt->st.sgl, &sgiter, pt->st.nents, 0) { in expect_pfn_sg_page_iter()
103 if (pfn != pt->end) { in expect_pfn_sg_page_iter()
[all …]
/drivers/gpu/drm/gma500/
Dmmu.c231 static void psb_mmu_free_pt(struct psb_mmu_pt *pt) in psb_mmu_free_pt() argument
233 __free_page(pt->p); in psb_mmu_free_pt()
234 kfree(pt); in psb_mmu_free_pt()
242 struct psb_mmu_pt *pt; in psb_mmu_free_pagedir() local
255 pt = pd->tables[i]; in psb_mmu_free_pagedir()
256 if (pt) in psb_mmu_free_pagedir()
257 psb_mmu_free_pt(pt); in psb_mmu_free_pagedir()
270 struct psb_mmu_pt *pt = kmalloc(sizeof(*pt), GFP_KERNEL); in psb_mmu_alloc_pt() local
279 if (!pt) in psb_mmu_alloc_pt()
282 pt->p = alloc_page(GFP_DMA32); in psb_mmu_alloc_pt()
[all …]
/drivers/dma-buf/
Dsw_sync.c133 struct sync_pt *pt = dma_fence_to_sync_pt(fence); in timeline_fence_release() local
138 if (!list_empty(&pt->link)) { in timeline_fence_release()
139 list_del(&pt->link); in timeline_fence_release()
140 rb_erase(&pt->node, &parent->pt_tree); in timeline_fence_release()
195 struct sync_pt *pt, *next; in sync_timeline_signal() local
203 list_for_each_entry_safe(pt, next, &obj->pt_list, link) { in sync_timeline_signal()
204 if (!timeline_fence_signaled(&pt->base)) in sync_timeline_signal()
207 dma_fence_get(&pt->base); in sync_timeline_signal()
209 list_move_tail(&pt->link, &signalled); in sync_timeline_signal()
210 rb_erase(&pt->node, &obj->pt_tree); in sync_timeline_signal()
[all …]
/drivers/media/common/saa7146/
Dsaa7146_core.c164 void *saa7146_vmalloc_build_pgtable(struct pci_dev *pci, long length, struct saa7146_pgtable *pt) in saa7146_vmalloc_build_pgtable() argument
173 if (!(pt->slist = vmalloc_to_sg(mem, pages))) in saa7146_vmalloc_build_pgtable()
176 if (saa7146_pgtable_alloc(pci, pt)) in saa7146_vmalloc_build_pgtable()
179 pt->nents = pages; in saa7146_vmalloc_build_pgtable()
180 slen = pci_map_sg(pci,pt->slist,pt->nents,PCI_DMA_FROMDEVICE); in saa7146_vmalloc_build_pgtable()
184 if (0 != saa7146_pgtable_build_single(pci, pt, pt->slist, slen)) in saa7146_vmalloc_build_pgtable()
190 pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); in saa7146_vmalloc_build_pgtable()
192 saa7146_pgtable_free(pci, pt); in saa7146_vmalloc_build_pgtable()
194 kfree(pt->slist); in saa7146_vmalloc_build_pgtable()
195 pt->slist = NULL; in saa7146_vmalloc_build_pgtable()
[all …]
Dsaa7146_hlp.c725 vdma1.base_page = buf->pt[0].dma | ME1 | sfmt->swap; in calculate_video_dma_grab_packed()
728 vdma1.prot_addr = buf->pt[0].offset; in calculate_video_dma_grab_packed()
729 vdma1.base_even = buf->pt[0].offset+(vdma1.pitch/2)*height; in calculate_video_dma_grab_packed()
732 vdma1.base_even = buf->pt[0].offset; in calculate_video_dma_grab_packed()
734 vdma1.prot_addr = buf->pt[0].offset+(vdma1.pitch/2)*height; in calculate_video_dma_grab_packed()
776 vdma2->prot_addr = buf->pt[1].offset; in calc_planar_422()
777 vdma2->base_even = ((vdma2->pitch/2)*height)+buf->pt[1].offset; in calc_planar_422()
780 vdma3->prot_addr = buf->pt[2].offset; in calc_planar_422()
781 vdma3->base_even = ((vdma3->pitch/2)*height)+buf->pt[2].offset; in calc_planar_422()
784 vdma3->base_even = buf->pt[2].offset; in calc_planar_422()
[all …]
/drivers/mtd/maps/
Dimpa7.c60 static struct { u_long addr; u_long size; } pt[NUM_FLASHBANKS] = { in init_impa7() local
69 pt[i].size, pt[i].addr); in init_impa7()
71 impa7_map[i].phys = pt[i].addr; in init_impa7()
72 impa7_map[i].virt = ioremap(pt[i].addr, pt[i].size); in init_impa7()
/drivers/net/ethernet/stmicro/stmmac/
Dstmmac_selftests.c239 struct packet_type pt; member
248 struct packet_type *pt, in stmmac_test_loopback_validate() argument
251 struct stmmac_test_priv *tpriv = pt->af_packet_priv; in stmmac_test_loopback_validate()
334 tpriv->pt.type = htons(ETH_P_IP); in __stmmac_test_loopback()
335 tpriv->pt.func = stmmac_test_loopback_validate; in __stmmac_test_loopback()
336 tpriv->pt.dev = priv->dev; in __stmmac_test_loopback()
337 tpriv->pt.af_packet_priv = tpriv; in __stmmac_test_loopback()
341 dev_add_pack(&tpriv->pt); in __stmmac_test_loopback()
364 dev_remove_pack(&tpriv->pt); in __stmmac_test_loopback()
716 struct packet_type *pt, in stmmac_test_flowctrl_validate() argument
[all …]
/drivers/net/fddi/skfp/
Dpmf.c314 const struct s_p_tab *pt ; in smt_build_pmf_response() local
427 pt = smt_get_ptab(pa->p_type) ; in smt_build_pmf_response()
428 if (pt && pt->p_access == AC_GROUP && !set) { in smt_build_pmf_response()
429 pt++ ; in smt_build_pmf_response()
430 while (pt->p_access == AC_G || in smt_build_pmf_response()
431 pt->p_access == AC_GR) { in smt_build_pmf_response()
432 smt_add_para(smc,&pcon,pt->p_num, in smt_build_pmf_response()
434 pt++ ; in smt_build_pmf_response()
459 if (pt && pt->p_access == AC_S) { in smt_build_pmf_response()
550 const struct s_p_tab *pt ; in smt_add_para() local
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
Den_selftest.c173 struct packet_type pt; member
182 struct packet_type *pt, in mlx5e_test_loopback_validate() argument
185 struct mlx5e_lbt_priv *lbtp = pt->af_packet_priv; in mlx5e_test_loopback_validate()
243 lbtp->pt.type = htons(ETH_P_IP); in mlx5e_test_loopback_setup()
244 lbtp->pt.func = mlx5e_test_loopback_validate; in mlx5e_test_loopback_setup()
245 lbtp->pt.dev = priv->netdev; in mlx5e_test_loopback_setup()
246 lbtp->pt.af_packet_priv = lbtp; in mlx5e_test_loopback_setup()
247 dev_add_pack(&lbtp->pt); in mlx5e_test_loopback_setup()
264 dev_remove_pack(&lbtp->pt); in mlx5e_test_loopback_cleanup()
/drivers/md/
Ddm-thin.c2510 static bool passdown_enabled(struct pool_c *pt) in passdown_enabled() argument
2512 return pt->adjusted_pf.discard_passdown; in passdown_enabled()
2517 struct pool_c *pt = pool->ti->private; in set_discard_callbacks() local
2519 if (passdown_enabled(pt)) { in set_discard_callbacks()
2531 struct pool_c *pt = pool->ti->private; in set_pool_mode() local
2606 pool->pf.error_if_no_space = pt->requested_pf.error_if_no_space; in set_pool_mode()
2621 pt->adjusted_pf.mode = new_mode; in set_pool_mode()
2817 static bool data_dev_supports_discard(struct pool_c *pt) in data_dev_supports_discard() argument
2819 struct request_queue *q = bdev_get_queue(pt->data_dev->bdev); in data_dev_supports_discard()
2833 static void disable_passdown_if_not_supported(struct pool_c *pt) in disable_passdown_if_not_supported() argument
[all …]
/drivers/staging/media/ipu3/
Dipu3-mmu.c123 u32 *pt; in imgu_mmu_alloc_page_table() local
126 pt = (u32 *)__get_free_page(GFP_KERNEL); in imgu_mmu_alloc_page_table()
127 if (!pt) in imgu_mmu_alloc_page_table()
131 pt[pte] = pteval; in imgu_mmu_alloc_page_table()
133 set_memory_uc((unsigned long)pt, IPU3_PT_ORDER); in imgu_mmu_alloc_page_table()
135 return pt; in imgu_mmu_alloc_page_table()
142 static void imgu_mmu_free_page_table(u32 *pt) in imgu_mmu_free_page_table() argument
144 set_memory_wb((unsigned long)pt, IPU3_PT_ORDER); in imgu_mmu_free_page_table()
145 free_page((unsigned long)pt); in imgu_mmu_free_page_table()
/drivers/phy/socionext/
Dphy-uniphier-usb3hs.c84 struct uniphier_u3hsphy_trim_param *pt);
89 struct uniphier_u3hsphy_trim_param *pt) in uniphier_u3hsphy_trim_ld20() argument
92 *pconfig |= FIELD_PREP(HSPHY_CFG0_RTERM_MASK, pt->rterm); in uniphier_u3hsphy_trim_ld20()
95 *pconfig |= FIELD_PREP(HSPHY_CFG0_SEL_T_MASK, pt->sel_t); in uniphier_u3hsphy_trim_ld20()
98 *pconfig |= FIELD_PREP(HSPHY_CFG0_HS_I_MASK, pt->hs_i); in uniphier_u3hsphy_trim_ld20()
123 struct uniphier_u3hsphy_trim_param *pt) in uniphier_u3hsphy_get_nvparams() argument
127 ret = uniphier_u3hsphy_get_nvparam(priv, "rterm", &pt->rterm); in uniphier_u3hsphy_get_nvparams()
131 ret = uniphier_u3hsphy_get_nvparam(priv, "sel_t", &pt->sel_t); in uniphier_u3hsphy_get_nvparams()
135 ret = uniphier_u3hsphy_get_nvparam(priv, "hs_i", &pt->hs_i); in uniphier_u3hsphy_get_nvparams()

12345