Home
last modified time | relevance | path

Searched refs:pd (Results 1 – 25 of 300) sorted by relevance

12345678910>>...12

/drivers/net/ethernet/smsc/
Dsmsc9420.c98 static inline u32 smsc9420_reg_read(struct smsc9420_pdata *pd, u32 offset) in smsc9420_reg_read() argument
100 return ioread32(pd->ioaddr + offset); in smsc9420_reg_read()
104 smsc9420_reg_write(struct smsc9420_pdata *pd, u32 offset, u32 value) in smsc9420_reg_write() argument
106 iowrite32(value, pd->ioaddr + offset); in smsc9420_reg_write()
109 static inline void smsc9420_pci_flush_write(struct smsc9420_pdata *pd) in smsc9420_pci_flush_write() argument
112 smsc9420_reg_read(pd, ID_REV); in smsc9420_pci_flush_write()
117 struct smsc9420_pdata *pd = (struct smsc9420_pdata *)bus->priv; in smsc9420_mii_read() local
122 spin_lock_irqsave(&pd->phy_lock, flags); in smsc9420_mii_read()
125 if ((smsc9420_reg_read(pd, MII_ACCESS) & MII_ACCESS_MII_BUSY_)) { in smsc9420_mii_read()
126 netif_warn(pd, drv, pd->dev, "MII is busy???\n"); in smsc9420_mii_read()
[all …]
/drivers/i2c/busses/
Di2c-sh_mobile.c153 void (*setup)(struct sh_mobile_i2c_data *pd);
197 static void iic_wr(struct sh_mobile_i2c_data *pd, int offs, unsigned char data) in iic_wr() argument
200 data |= pd->icic; in iic_wr()
202 iowrite8(data, pd->reg + offs); in iic_wr()
205 static unsigned char iic_rd(struct sh_mobile_i2c_data *pd, int offs) in iic_rd() argument
207 return ioread8(pd->reg + offs); in iic_rd()
210 static void iic_set_clr(struct sh_mobile_i2c_data *pd, int offs, in iic_set_clr() argument
213 iic_wr(pd, offs, (iic_rd(pd, offs) | set) & ~clr); in iic_set_clr()
250 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data *pd) in sh_mobile_i2c_init() argument
257 clk_prepare_enable(pd->clk); in sh_mobile_i2c_init()
[all …]
Di2c-simtec.c44 struct simtec_i2c_data *pd = pw; in simtec_i2c_setsda() local
45 writeb(CMD_SET_SDA | (state ? STATE_SDA : 0), pd->reg); in simtec_i2c_setsda()
50 struct simtec_i2c_data *pd = pw; in simtec_i2c_setscl() local
51 writeb(CMD_SET_SCL | (state ? STATE_SCL : 0), pd->reg); in simtec_i2c_setscl()
56 struct simtec_i2c_data *pd = pw; in simtec_i2c_getsda() local
57 return readb(pd->reg) & STATE_SDA ? 1 : 0; in simtec_i2c_getsda()
62 struct simtec_i2c_data *pd = pw; in simtec_i2c_getscl() local
63 return readb(pd->reg) & STATE_SCL ? 1 : 0; in simtec_i2c_getscl()
70 struct simtec_i2c_data *pd; in simtec_i2c_probe() local
75 pd = kzalloc(sizeof(struct simtec_i2c_data), GFP_KERNEL); in simtec_i2c_probe()
[all …]
Di2c-pca-platform.c42 static int i2c_pca_pf_readbyte8(void *pd, int reg) in i2c_pca_pf_readbyte8() argument
44 struct i2c_pca_pf_data *i2c = pd; in i2c_pca_pf_readbyte8()
48 static int i2c_pca_pf_readbyte16(void *pd, int reg) in i2c_pca_pf_readbyte16() argument
50 struct i2c_pca_pf_data *i2c = pd; in i2c_pca_pf_readbyte16()
54 static int i2c_pca_pf_readbyte32(void *pd, int reg) in i2c_pca_pf_readbyte32() argument
56 struct i2c_pca_pf_data *i2c = pd; in i2c_pca_pf_readbyte32()
60 static void i2c_pca_pf_writebyte8(void *pd, int reg, int val) in i2c_pca_pf_writebyte8() argument
62 struct i2c_pca_pf_data *i2c = pd; in i2c_pca_pf_writebyte8()
66 static void i2c_pca_pf_writebyte16(void *pd, int reg, int val) in i2c_pca_pf_writebyte16() argument
68 struct i2c_pca_pf_data *i2c = pd; in i2c_pca_pf_writebyte16()
[all …]
/drivers/block/
Dpktcdvd.c75 #define pkt_err(pd, fmt, ...) \ argument
76 pr_err("%s: " fmt, pd->name, ##__VA_ARGS__)
77 #define pkt_notice(pd, fmt, ...) \ argument
78 pr_notice("%s: " fmt, pd->name, ##__VA_ARGS__)
79 #define pkt_info(pd, fmt, ...) \ argument
80 pr_info("%s: " fmt, pd->name, ##__VA_ARGS__)
82 #define pkt_dbg(level, pd, fmt, ...) \ argument
86 pd->name, __func__, ##__VA_ARGS__); \
88 pr_notice("%s: " fmt, pd->name, ##__VA_ARGS__); \
110 static sector_t get_zone(sector_t sector, struct pktcdvd_device *pd) in get_zone() argument
[all …]
/drivers/media/pci/dt3155/
Ddt3155.c139 struct dt3155_priv *pd = vb2_get_drv_priv(vq); in dt3155_queue_setup() local
140 unsigned size = pd->width * pd->height; in dt3155_queue_setup()
153 struct dt3155_priv *pd = vb2_get_drv_priv(vb->vb2_queue); in dt3155_buf_prepare() local
155 vb2_set_plane_payload(vb, 0, pd->width * pd->height); in dt3155_buf_prepare()
161 struct dt3155_priv *pd = vb2_get_drv_priv(q); in dt3155_start_streaming() local
162 struct vb2_buffer *vb = &pd->curr_buf->vb2_buf; in dt3155_start_streaming()
165 pd->sequence = 0; in dt3155_start_streaming()
167 iowrite32(dma_addr, pd->regs + EVEN_DMA_START); in dt3155_start_streaming()
168 iowrite32(dma_addr + pd->width, pd->regs + ODD_DMA_START); in dt3155_start_streaming()
169 iowrite32(pd->width, pd->regs + EVEN_DMA_STRIDE); in dt3155_start_streaming()
[all …]
/drivers/soc/samsung/
Dpm_domains.c40 struct generic_pm_domain pd; member
50 struct exynos_pm_domain *pd; in exynos_pd_power() local
56 pd = container_of(domain, struct exynos_pm_domain, pd); in exynos_pd_power()
57 base = pd->base; in exynos_pd_power()
60 if (IS_ERR(pd->asb_clk[i])) in exynos_pd_power()
62 clk_prepare_enable(pd->asb_clk[i]); in exynos_pd_power()
68 if (IS_ERR(pd->clk[i])) in exynos_pd_power()
70 pd->pclk[i] = clk_get_parent(pd->clk[i]); in exynos_pd_power()
71 if (clk_set_parent(pd->clk[i], pd->oscclk)) in exynos_pd_power()
73 pd->name, i); in exynos_pd_power()
[all …]
/drivers/gpu/drm/gma500/
Dmmu.c138 void psb_mmu_set_pd_context(struct psb_mmu_pd *pd, int hw_context) in psb_mmu_set_pd_context() argument
140 struct drm_device *dev = pd->driver->dev; in psb_mmu_set_pd_context()
145 down_write(&pd->driver->sem); in psb_mmu_set_pd_context()
146 PSB_WSGX32(page_to_pfn(pd->p) << PAGE_SHIFT, offset); in psb_mmu_set_pd_context()
148 psb_mmu_flush_pd_locked(pd->driver, 1); in psb_mmu_set_pd_context()
149 pd->hw_context = hw_context; in psb_mmu_set_pd_context()
150 up_write(&pd->driver->sem); in psb_mmu_set_pd_context()
178 struct psb_mmu_pd *pd = kmalloc(sizeof(*pd), GFP_KERNEL); in psb_mmu_alloc_pd() local
182 if (!pd) in psb_mmu_alloc_pd()
185 pd->p = alloc_page(GFP_DMA32); in psb_mmu_alloc_pd()
[all …]
/drivers/isdn/hysdn/
Dhysdn_proclog.c71 struct procdata *pd = card->proclog; in hysdn_addlog() local
75 if (!pd) in hysdn_addlog()
78 cp = pd->logtmp; in hysdn_addlog()
88 printk(KERN_INFO "%s", pd->logtmp); in hysdn_addlog()
90 put_log_buffer(card, pd->logtmp); in hysdn_addlog()
104 struct procdata *pd = card->proclog; in put_log_buffer() local
108 if (!pd) in put_log_buffer()
114 if (pd->if_used <= 0) in put_log_buffer()
121 ib->proc_ctrl = pd; /* point to own control structure */ in put_log_buffer()
123 ib->usage_cnt = pd->if_used; in put_log_buffer()
[all …]
/drivers/soc/rockchip/
Dpm_domains.c98 static bool rockchip_pmu_domain_is_idle(struct rockchip_pm_domain *pd) in rockchip_pmu_domain_is_idle() argument
100 struct rockchip_pmu *pmu = pd->pmu; in rockchip_pmu_domain_is_idle()
101 const struct rockchip_domain_info *pd_info = pd->info; in rockchip_pmu_domain_is_idle()
108 static int rockchip_pmu_set_idle_request(struct rockchip_pm_domain *pd, in rockchip_pmu_set_idle_request() argument
111 const struct rockchip_domain_info *pd_info = pd->info; in rockchip_pmu_set_idle_request()
112 struct rockchip_pmu *pmu = pd->pmu; in rockchip_pmu_set_idle_request()
127 while (rockchip_pmu_domain_is_idle(pd) != idle) in rockchip_pmu_set_idle_request()
133 static int rockchip_pmu_save_qos(struct rockchip_pm_domain *pd) in rockchip_pmu_save_qos() argument
137 for (i = 0; i < pd->num_qos; i++) { in rockchip_pmu_save_qos()
138 regmap_read(pd->qos_regmap[i], in rockchip_pmu_save_qos()
[all …]
/drivers/media/tuners/
Dtda18271-maps.c25 u8 pd; /* post div */ member
37 { .lomax = 32000, .pd = 0x5f, .d = 0xf0 },
38 { .lomax = 35000, .pd = 0x5e, .d = 0xe0 },
39 { .lomax = 37000, .pd = 0x5d, .d = 0xd0 },
40 { .lomax = 41000, .pd = 0x5c, .d = 0xc0 },
41 { .lomax = 44000, .pd = 0x5b, .d = 0xb0 },
42 { .lomax = 49000, .pd = 0x5a, .d = 0xa0 },
43 { .lomax = 54000, .pd = 0x59, .d = 0x90 },
44 { .lomax = 61000, .pd = 0x58, .d = 0x80 },
45 { .lomax = 65000, .pd = 0x4f, .d = 0x78 },
[all …]
/drivers/dma/
Dpch_dma.c140 #define dma_readl(pd, name) \ argument
141 readl((pd)->membase + PCH_DMA_##name)
142 #define dma_writel(pd, name, val) \ argument
143 writel((val), (pd)->membase + PCH_DMA_##name)
187 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq() local
196 val = dma_readl(pd, CTL2); in pdc_enable_irq()
203 dma_writel(pd, CTL2, val); in pdc_enable_irq()
212 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir() local
218 val = dma_readl(pd, CTL0); in pdc_set_dir()
233 dma_writel(pd, CTL0, val); in pdc_set_dir()
[all …]
/drivers/infiniband/hw/usnic/
Dusnic_uiom.c196 struct usnic_uiom_pd *pd) in usnic_uiom_unmap_sorted_intervals() argument
207 iommu_unmap(pd->domain, va, PAGE_SIZE); in usnic_uiom_unmap_sorted_intervals()
214 static void __usnic_uiom_reg_release(struct usnic_uiom_pd *pd, in __usnic_uiom_reg_release() argument
228 spin_lock(&pd->lock); in __usnic_uiom_reg_release()
229 usnic_uiom_remove_interval(&pd->rb_root, vpn_start, in __usnic_uiom_reg_release()
231 usnic_uiom_unmap_sorted_intervals(&rm_intervals, pd); in __usnic_uiom_reg_release()
241 spin_unlock(&pd->lock); in __usnic_uiom_reg_release()
255 struct usnic_uiom_pd *pd = uiomr->pd; in usnic_uiom_map_sorted_intervals() local
284 err = iommu_map(pd->domain, va_start, pa_start, in usnic_uiom_map_sorted_intervals()
301 err = iommu_map(pd->domain, va_start, pa_start, in usnic_uiom_map_sorted_intervals()
[all …]
/drivers/soc/renesas/
Drcar-sysc.c181 struct rcar_sysc_pd *pd = to_rcar_pd(genpd); in rcar_sysc_pd_power_off() local
185 if (pd->flags & PD_NO_CR) { in rcar_sysc_pd_power_off()
190 if (pd->flags & PD_BUSY) { in rcar_sysc_pd_power_off()
195 return rcar_sysc_power_down(&pd->ch); in rcar_sysc_pd_power_off()
200 struct rcar_sysc_pd *pd = to_rcar_pd(genpd); in rcar_sysc_pd_power_on() local
204 if (pd->flags & PD_NO_CR) { in rcar_sysc_pd_power_on()
209 return rcar_sysc_power_up(&pd->ch); in rcar_sysc_pd_power_on()
214 static void __init rcar_sysc_pd_setup(struct rcar_sysc_pd *pd) in rcar_sysc_pd_setup() argument
216 struct generic_pm_domain *genpd = &pd->genpd; in rcar_sysc_pd_setup()
217 const char *name = pd->genpd.name; in rcar_sysc_pd_setup()
[all …]
/drivers/infiniband/hw/mthca/
Dmthca_pd.c39 int mthca_pd_alloc(struct mthca_dev *dev, int privileged, struct mthca_pd *pd) in mthca_pd_alloc() argument
43 pd->privileged = privileged; in mthca_pd_alloc()
45 atomic_set(&pd->sqp_count, 0); in mthca_pd_alloc()
46 pd->pd_num = mthca_alloc(&dev->pd_table.alloc); in mthca_pd_alloc()
47 if (pd->pd_num == -1) in mthca_pd_alloc()
51 err = mthca_mr_alloc_notrans(dev, pd->pd_num, in mthca_pd_alloc()
54 &pd->ntmr); in mthca_pd_alloc()
56 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_alloc()
62 void mthca_pd_free(struct mthca_dev *dev, struct mthca_pd *pd) in mthca_pd_free() argument
64 if (pd->privileged) in mthca_pd_free()
[all …]
/drivers/infiniband/core/
Dverbs.c233 struct ib_pd *pd; in __ib_alloc_pd() local
236 pd = device->alloc_pd(device, NULL, NULL); in __ib_alloc_pd()
237 if (IS_ERR(pd)) in __ib_alloc_pd()
238 return pd; in __ib_alloc_pd()
240 pd->device = device; in __ib_alloc_pd()
241 pd->uobject = NULL; in __ib_alloc_pd()
242 pd->__internal_mr = NULL; in __ib_alloc_pd()
243 atomic_set(&pd->usecnt, 0); in __ib_alloc_pd()
244 pd->flags = flags; in __ib_alloc_pd()
247 pd->local_dma_lkey = device->local_dma_lkey; in __ib_alloc_pd()
[all …]
/drivers/infiniband/hw/hns/
Dhns_roce_pd.c64 struct hns_roce_pd *pd; in hns_roce_alloc_pd() local
67 pd = kmalloc(sizeof(*pd), GFP_KERNEL); in hns_roce_alloc_pd()
68 if (!pd) in hns_roce_alloc_pd()
71 ret = hns_roce_pd_alloc(to_hr_dev(ib_dev), &pd->pdn); in hns_roce_alloc_pd()
73 kfree(pd); in hns_roce_alloc_pd()
79 if (ib_copy_to_udata(udata, &pd->pdn, sizeof(u64))) { in hns_roce_alloc_pd()
80 hns_roce_pd_free(to_hr_dev(ib_dev), pd->pdn); in hns_roce_alloc_pd()
82 kfree(pd); in hns_roce_alloc_pd()
87 return &pd->ibpd; in hns_roce_alloc_pd()
90 int hns_roce_dealloc_pd(struct ib_pd *pd) in hns_roce_dealloc_pd() argument
[all …]
/drivers/regulator/
Dmax8952.c140 struct max8952_platform_data *pd; in max8952_parse_dt() local
145 pd = devm_kzalloc(dev, sizeof(*pd), GFP_KERNEL); in max8952_parse_dt()
146 if (!pd) in max8952_parse_dt()
149 pd->gpio_vid0 = of_get_named_gpio(np, "max8952,vid-gpios", 0); in max8952_parse_dt()
150 pd->gpio_vid1 = of_get_named_gpio(np, "max8952,vid-gpios", 1); in max8952_parse_dt()
151 pd->gpio_en = of_get_named_gpio(np, "max8952,en-gpio", 0); in max8952_parse_dt()
153 if (of_property_read_u32(np, "max8952,default-mode", &pd->default_mode)) in max8952_parse_dt()
157 pd->dvs_mode, ARRAY_SIZE(pd->dvs_mode)); in max8952_parse_dt()
163 for (i = 0; i < ARRAY_SIZE(pd->dvs_mode); ++i) { in max8952_parse_dt()
164 if (pd->dvs_mode[i] < 770000 || pd->dvs_mode[i] > 1400000) { in max8952_parse_dt()
[all …]
/drivers/infiniband/sw/rdmavt/
Dpd.c66 struct rvt_pd *pd; in rvt_alloc_pd() local
69 pd = kmalloc(sizeof(*pd), GFP_KERNEL); in rvt_alloc_pd()
70 if (!pd) { in rvt_alloc_pd()
84 kfree(pd); in rvt_alloc_pd()
93 pd->user = udata ? 1 : 0; in rvt_alloc_pd()
95 ret = &pd->ibpd; in rvt_alloc_pd()
109 struct rvt_pd *pd = ibpd_to_rvtpd(ibpd); in rvt_dealloc_pd() local
116 kfree(pd); in rvt_dealloc_pd()
/drivers/misc/eeprom/
Deeprom_93xx46.c281 struct eeprom_93xx46_platform_data *pd = edev->pdata; in eeprom_93xx46_eral() local
322 if (pd->finish) in eeprom_93xx46_eral()
323 pd->finish(edev); in eeprom_93xx46_eral()
378 struct eeprom_93xx46_platform_data *pd; in eeprom_93xx46_probe_dt() local
384 pd = devm_kzalloc(&spi->dev, sizeof(*pd), GFP_KERNEL); in eeprom_93xx46_probe_dt()
385 if (!pd) in eeprom_93xx46_probe_dt()
395 pd->flags |= EE_ADDR8; in eeprom_93xx46_probe_dt()
397 pd->flags |= EE_ADDR16; in eeprom_93xx46_probe_dt()
404 pd->flags |= EE_READONLY; in eeprom_93xx46_probe_dt()
416 pd->select = gpio_to_desc(gpio); in eeprom_93xx46_probe_dt()
[all …]
/drivers/ata/
Dpata_pxa.c51 struct pata_pxa_data *pd = d; in pxa_ata_dma_irq() local
54 status = dmaengine_tx_status(pd->dma_chan, pd->dma_cookie, NULL); in pxa_ata_dma_irq()
56 complete(&pd->dma_done); in pxa_ata_dma_irq()
64 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_qc_prep() local
72 tx = dmaengine_prep_slave_sg(pd->dma_chan, qc->sg, qc->n_elem, dir, in pxa_qc_prep()
79 tx->callback_param = pd; in pxa_qc_prep()
80 pd->dma_cookie = dmaengine_submit(tx); in pxa_qc_prep()
97 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_bmdma_start() local
98 init_completion(&pd->dma_done); in pxa_bmdma_start()
99 dma_async_issue_pending(pd->dma_chan); in pxa_bmdma_start()
[all …]
/drivers/hsi/controllers/
Domap_ssi_core.c360 static int ssi_get_iomem(struct platform_device *pd, in ssi_get_iomem() argument
365 struct hsi_controller *ssi = platform_get_drvdata(pd); in ssi_get_iomem()
367 mem = platform_get_resource_byname(pd, IORESOURCE_MEM, name); in ssi_get_iomem()
381 struct platform_device *pd) in ssi_add_controller() argument
388 dev_err(&pd->dev, "not enough memory for omap ssi\n"); in ssi_add_controller()
398 ssi->device.parent = &pd->dev; in ssi_add_controller()
402 err = ssi_get_iomem(pd, "sys", &omap_ssi->sys, NULL); in ssi_add_controller()
405 err = ssi_get_iomem(pd, "gdd", &omap_ssi->gdd, NULL); in ssi_add_controller()
408 err = platform_get_irq_byname(pd, "gdd_mpu"); in ssi_add_controller()
410 dev_err(&pd->dev, "GDD IRQ resource missing\n"); in ssi_add_controller()
[all …]
/drivers/input/misc/
Dsoc_button_array.c74 struct platform_device *pd; in soc_button_device_create() local
116 pd = platform_device_alloc("gpio-keys", PLATFORM_DEVID_AUTO); in soc_button_device_create()
117 if (!pd) { in soc_button_device_create()
122 error = platform_device_add_data(pd, gpio_keys_pdata, in soc_button_device_create()
127 error = platform_device_add(pd); in soc_button_device_create()
131 return pd; in soc_button_device_create()
134 platform_device_put(pd); in soc_button_device_create()
159 struct platform_device *pd; in soc_button_probe() local
176 pd = soc_button_device_create(pdev, button_info, i == 0); in soc_button_probe()
177 if (IS_ERR(pd)) { in soc_button_probe()
[all …]
/drivers/infiniband/hw/mlx4/
Dmr.c58 struct ib_mr *mlx4_ib_get_dma_mr(struct ib_pd *pd, int acc) in mlx4_ib_get_dma_mr() argument
67 err = mlx4_mr_alloc(to_mdev(pd->device)->dev, to_mpd(pd)->pdn, 0, in mlx4_ib_get_dma_mr()
72 err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
82 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
134 struct ib_mr *mlx4_ib_reg_user_mr(struct ib_pd *pd, u64 start, u64 length, in mlx4_ib_reg_user_mr() argument
138 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_reg_user_mr()
150 mr->umem = ib_umem_get(pd->uobject->context, start, length, in mlx4_ib_reg_user_mr()
160 err = mlx4_mr_alloc(dev->dev, to_mpd(pd)->pdn, virt_addr, length, in mlx4_ib_reg_user_mr()
178 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_reg_user_mr()
191 int mr_access_flags, struct ib_pd *pd, in mlx4_ib_rereg_user_mr() argument
[all …]
/drivers/spi/
Dspi-lm70llp.c82 struct pardevice *pd; member
198 struct pardevice *pd; in spi_lm70llp_attach() local
235 pd = parport_register_dev_model(p, DRVNAME, &lm70llp_cb, 0); in spi_lm70llp_attach()
237 if (!pd) { in spi_lm70llp_attach()
241 pp->pd = pd; in spi_lm70llp_attach()
243 status = parport_claim(pd); in spi_lm70llp_attach()
252 dev_warn(&pd->dev, "spi_bitbang_start failed with status %d\n", in spi_lm70llp_attach()
280 dev_warn(&pd->dev, "spi_new_device failed\n"); in spi_lm70llp_attach()
295 parport_release(pp->pd); in spi_lm70llp_attach()
297 parport_unregister_device(pd); in spi_lm70llp_attach()
[all …]

12345678910>>...12