Home
last modified time | relevance | path

Searched refs:mdev (Results 1 – 25 of 89) sorted by relevance

1234

/drivers/infiniband/hw/mthca/
Dmthca_main.c137 static int mthca_tune_pci(struct mthca_dev *mdev) in mthca_tune_pci() argument
143 if (pci_find_capability(mdev->pdev, PCI_CAP_ID_PCIX)) { in mthca_tune_pci()
144 if (pcix_set_mmrbc(mdev->pdev, pcix_get_max_mmrbc(mdev->pdev))) { in mthca_tune_pci()
145 mthca_err(mdev, "Couldn't set PCI-X max read count, " in mthca_tune_pci()
149 } else if (!(mdev->mthca_flags & MTHCA_FLAG_PCIE)) in mthca_tune_pci()
150 mthca_info(mdev, "No PCI-X capability, not setting RBC.\n"); in mthca_tune_pci()
152 if (pci_is_pcie(mdev->pdev)) { in mthca_tune_pci()
153 if (pcie_set_readrq(mdev->pdev, 4096)) { in mthca_tune_pci()
154 mthca_err(mdev, "Couldn't write PCI Express read request, " in mthca_tune_pci()
158 } else if (mdev->mthca_flags & MTHCA_FLAG_PCIE) in mthca_tune_pci()
[all …]
Dmthca_reset.c41 int mthca_reset(struct mthca_dev *mdev) in mthca_reset() argument
69 if (!(mdev->mthca_flags & MTHCA_FLAG_PCIE)) { in mthca_reset()
72 while ((bridge = pci_get_device(mdev->pdev->vendor, in mthca_reset()
73 mdev->pdev->device + 2, in mthca_reset()
76 bridge->subordinate == mdev->pdev->bus) { in mthca_reset()
77 mthca_dbg(mdev, "Found bridge: %s\n", in mthca_reset()
89 mthca_warn(mdev, "No bridge found for %s\n", in mthca_reset()
90 pci_name(mdev->pdev)); in mthca_reset()
99 mthca_err(mdev, "Couldn't allocate memory to save HCA " in mthca_reset()
107 if (pci_read_config_dword(mdev->pdev, i * 4, hca_header + i)) { in mthca_reset()
[all …]
/drivers/media/platform/s5p-tv/
Dmixer_drv.c45 void mxr_get_mbus_fmt(struct mxr_device *mdev, in mxr_get_mbus_fmt() argument
51 mutex_lock(&mdev->mutex); in mxr_get_mbus_fmt()
52 sd = to_outsd(mdev); in mxr_get_mbus_fmt()
55 mutex_unlock(&mdev->mutex); in mxr_get_mbus_fmt()
58 void mxr_streamer_get(struct mxr_device *mdev) in mxr_streamer_get() argument
60 mutex_lock(&mdev->mutex); in mxr_streamer_get()
61 ++mdev->n_streamer; in mxr_streamer_get()
62 mxr_dbg(mdev, "%s(%d)\n", __func__, mdev->n_streamer); in mxr_streamer_get()
63 if (mdev->n_streamer == 1) { in mxr_streamer_get()
64 struct v4l2_subdev *sd = to_outsd(mdev); in mxr_streamer_get()
[all …]
Dmixer_reg.c22 static inline u32 vp_read(struct mxr_device *mdev, u32 reg_id) in vp_read() argument
24 return readl(mdev->res.vp_regs + reg_id); in vp_read()
27 static inline void vp_write(struct mxr_device *mdev, u32 reg_id, u32 val) in vp_write() argument
29 writel(val, mdev->res.vp_regs + reg_id); in vp_write()
32 static inline void vp_write_mask(struct mxr_device *mdev, u32 reg_id, in vp_write_mask() argument
35 u32 old = vp_read(mdev, reg_id); in vp_write_mask()
38 writel(val, mdev->res.vp_regs + reg_id); in vp_write_mask()
41 static inline u32 mxr_read(struct mxr_device *mdev, u32 reg_id) in mxr_read() argument
43 return readl(mdev->res.mxr_regs + reg_id); in mxr_read()
46 static inline void mxr_write(struct mxr_device *mdev, u32 reg_id, u32 val) in mxr_write() argument
[all …]
Dmixer_video.c36 struct mxr_device *mdev, char *module_name) in find_and_register_subdev() argument
45 mxr_warn(mdev, "module %s is missing\n", module_name); in find_and_register_subdev()
52 mxr_warn(mdev, "module %s provides no subdev!\n", module_name); in find_and_register_subdev()
56 ret = v4l2_device_register_subdev(&mdev->v4l2_dev, sd); in find_and_register_subdev()
58 mxr_warn(mdev, "failed to register subdev %s\n", sd->name); in find_and_register_subdev()
66 int mxr_acquire_video(struct mxr_device *mdev, in mxr_acquire_video() argument
69 struct device *dev = mdev->dev; in mxr_acquire_video()
70 struct v4l2_device *v4l2_dev = &mdev->v4l2_dev; in mxr_acquire_video()
75 strlcpy(v4l2_dev->name, dev_name(mdev->dev), sizeof(v4l2_dev->name)); in mxr_acquire_video()
79 mxr_err(mdev, "could not register v4l2 device.\n"); in mxr_acquire_video()
[all …]
Dmixer.h155 struct mxr_device *mdev; member
278 static inline struct mxr_output *to_output(struct mxr_device *mdev) in to_output() argument
280 return mdev->output[mdev->current_output]; in to_output()
284 static inline struct v4l2_subdev *to_outsd(struct mxr_device *mdev) in to_outsd() argument
286 struct mxr_output *out = to_output(mdev); in to_outsd()
294 int mxr_acquire_video(struct mxr_device *mdev,
298 void mxr_release_video(struct mxr_device *mdev);
300 struct mxr_layer *mxr_graph_layer_create(struct mxr_device *mdev, int idx);
301 struct mxr_layer *mxr_vp_layer_create(struct mxr_device *mdev, int idx);
302 struct mxr_layer *mxr_base_layer_create(struct mxr_device *mdev,
[all …]
/drivers/block/drbd/
Ddrbd_worker.c70 struct drbd_conf *mdev; in drbd_md_io_complete() local
73 mdev = container_of(md_io, struct drbd_conf, md_io); in drbd_md_io_complete()
88 drbd_md_put_buffer(mdev); in drbd_md_io_complete()
90 wake_up(&mdev->misc_wait); in drbd_md_io_complete()
92 if (mdev->ldev) /* special case: drbd_md_read() during drbd_adm_attach() */ in drbd_md_io_complete()
93 put_ldev(mdev); in drbd_md_io_complete()
102 struct drbd_conf *mdev = peer_req->w.mdev; in drbd_endio_read_sec_final() local
104 spin_lock_irqsave(&mdev->tconn->req_lock, flags); in drbd_endio_read_sec_final()
105 mdev->read_cnt += peer_req->i.size >> 9; in drbd_endio_read_sec_final()
107 if (list_empty(&mdev->read_ee)) in drbd_endio_read_sec_final()
[all …]
Ddrbd_actlog.c108 void *drbd_md_get_buffer(struct drbd_conf *mdev) in drbd_md_get_buffer() argument
112 wait_event(mdev->misc_wait, in drbd_md_get_buffer()
113 (r = atomic_cmpxchg(&mdev->md_io_in_use, 0, 1)) == 0 || in drbd_md_get_buffer()
114 mdev->state.disk <= D_FAILED); in drbd_md_get_buffer()
116 return r ? NULL : page_address(mdev->md_io_page); in drbd_md_get_buffer()
119 void drbd_md_put_buffer(struct drbd_conf *mdev) in drbd_md_put_buffer() argument
121 if (atomic_dec_and_test(&mdev->md_io_in_use)) in drbd_md_put_buffer()
122 wake_up(&mdev->misc_wait); in drbd_md_put_buffer()
125 void wait_until_done_or_force_detached(struct drbd_conf *mdev, struct drbd_backing_dev *bdev, in wait_until_done_or_force_detached() argument
137 dt = wait_event_timeout(mdev->misc_wait, in wait_until_done_or_force_detached()
[all …]
Ddrbd_main.c169 int _get_ldev_if_state(struct drbd_conf *mdev, enum drbd_disk_state mins) in _get_ldev_if_state() argument
173 atomic_inc(&mdev->local_cnt); in _get_ldev_if_state()
174 io_allowed = (mdev->state.disk >= mins); in _get_ldev_if_state()
176 if (atomic_dec_and_test(&mdev->local_cnt)) in _get_ldev_if_state()
177 wake_up(&mdev->misc_wait); in _get_ldev_if_state()
310 void tl_abort_disk_io(struct drbd_conf *mdev) in tl_abort_disk_io() argument
312 struct drbd_tconn *tconn = mdev->tconn; in tl_abort_disk_io()
319 if (req->w.mdev != mdev) in tl_abort_disk_io()
499 struct drbd_conf *mdev; in conn_lowest_minor() local
503 mdev = idr_get_next(&tconn->volumes, &vnr); in conn_lowest_minor()
[all …]
Ddrbd_receiver.c66 static int drbd_disconnected(struct drbd_conf *mdev);
153 static struct page *__drbd_alloc_pages(struct drbd_conf *mdev, in __drbd_alloc_pages() argument
199 static void reclaim_finished_net_peer_reqs(struct drbd_conf *mdev, in reclaim_finished_net_peer_reqs() argument
210 list_for_each_safe(le, tle, &mdev->net_ee) { in reclaim_finished_net_peer_reqs()
218 static void drbd_kick_lo_and_reclaim_net(struct drbd_conf *mdev) in drbd_kick_lo_and_reclaim_net() argument
223 spin_lock_irq(&mdev->tconn->req_lock); in drbd_kick_lo_and_reclaim_net()
224 reclaim_finished_net_peer_reqs(mdev, &reclaimed); in drbd_kick_lo_and_reclaim_net()
225 spin_unlock_irq(&mdev->tconn->req_lock); in drbd_kick_lo_and_reclaim_net()
228 drbd_free_net_peer_req(mdev, peer_req); in drbd_kick_lo_and_reclaim_net()
243 struct page *drbd_alloc_pages(struct drbd_conf *mdev, unsigned int number, in drbd_alloc_pages() argument
[all …]
Ddrbd_nl.c105 struct drbd_conf *mdev; member
214 adm_ctx.mdev = minor_to_mdev(d_in->minor); in drbd_adm_prepare()
217 if (!adm_ctx.mdev && (flags & DRBD_ADM_NEED_MINOR)) { in drbd_adm_prepare()
231 if (adm_ctx.mdev) { in drbd_adm_prepare()
247 if (adm_ctx.mdev && adm_ctx.tconn && in drbd_adm_prepare()
248 adm_ctx.mdev->tconn != adm_ctx.tconn) { in drbd_adm_prepare()
251 adm_ctx.mdev->tconn->name); in drbd_adm_prepare()
255 if (adm_ctx.mdev && in drbd_adm_prepare()
257 adm_ctx.volume != adm_ctx.mdev->vnr) { in drbd_adm_prepare()
260 adm_ctx.mdev->vnr, adm_ctx.mdev->tconn->name); in drbd_adm_prepare()
[all …]
Ddrbd_proc.c69 static void drbd_syncer_progress(struct drbd_conf *mdev, struct seq_file *seq) in drbd_syncer_progress() argument
76 drbd_get_syncer_progress(mdev, &rs_left, &res); in drbd_syncer_progress()
88 if (mdev->state.conn == C_VERIFY_S || mdev->state.conn == C_VERIFY_T) in drbd_syncer_progress()
95 if (mdev->rs_total > (4UL << (30 - BM_BLOCK_SHIFT))) in drbd_syncer_progress()
98 (unsigned long) Bit2KB(mdev->rs_total >> 10)); in drbd_syncer_progress()
102 (unsigned long) Bit2KB(mdev->rs_total)); in drbd_syncer_progress()
117 i = (mdev->rs_last_mark + 2) % DRBD_SYNC_MARKS; in drbd_syncer_progress()
118 dt = (jiffies - mdev->rs_mark_time[i]) / HZ; in drbd_syncer_progress()
124 db = mdev->rs_mark_left[i] - rs_left; in drbd_syncer_progress()
137 i = (mdev->rs_last_mark + DRBD_SYNC_MARKS-1) % DRBD_SYNC_MARKS; in drbd_syncer_progress()
[all …]
Ddrbd_req.c34 static bool drbd_may_do_local_read(struct drbd_conf *mdev, sector_t sector, int size);
37 static void _drbd_start_io_acct(struct drbd_conf *mdev, struct drbd_request *req) in _drbd_start_io_acct() argument
42 part_round_stats(cpu, &mdev->vdisk->part0); in _drbd_start_io_acct()
43 part_stat_inc(cpu, &mdev->vdisk->part0, ios[rw]); in _drbd_start_io_acct()
44 part_stat_add(cpu, &mdev->vdisk->part0, sectors[rw], req->i.size >> 9); in _drbd_start_io_acct()
47 part_inc_in_flight(&mdev->vdisk->part0, rw); in _drbd_start_io_acct()
52 static void _drbd_end_io_acct(struct drbd_conf *mdev, struct drbd_request *req) in _drbd_end_io_acct() argument
58 part_stat_add(cpu, &mdev->vdisk->part0, ticks[rw], duration); in _drbd_end_io_acct()
59 part_round_stats(cpu, &mdev->vdisk->part0); in _drbd_end_io_acct()
60 part_dec_in_flight(&mdev->vdisk->part0, rw); in _drbd_end_io_acct()
[all …]
Ddrbd_state.c33 extern void tl_abort_disk_io(struct drbd_conf *mdev);
53 static void after_state_ch(struct drbd_conf *mdev, union drbd_state os,
58 static union drbd_state sanitize_state(struct drbd_conf *mdev, union drbd_state ns,
68 struct drbd_conf *mdev; in conn_all_vols_unconf() local
73 idr_for_each_entry(&tconn->volumes, mdev, vnr) { in conn_all_vols_unconf()
74 if (mdev->state.disk != D_DISKLESS || in conn_all_vols_unconf()
75 mdev->state.conn != C_STANDALONE || in conn_all_vols_unconf()
76 mdev->state.role != R_SECONDARY) { in conn_all_vols_unconf()
108 struct drbd_conf *mdev; in conn_highest_role() local
112 idr_for_each_entry(&tconn->volumes, mdev, vnr) in conn_highest_role()
[all …]
Ddrbd_int.h103 #define DEV (disk_to_dev(mdev->vdisk))
148 _drbd_insert_fault(struct drbd_conf *mdev, unsigned int type);
151 drbd_insert_fault(struct drbd_conf *mdev, unsigned int type) { in drbd_insert_fault() argument
155 _drbd_insert_fault(mdev, type); in drbd_insert_fault()
256 extern void INFO_bm_xfer_stats(struct drbd_conf *mdev,
539 struct drbd_conf *mdev; member
785 int (*io_fn)(struct drbd_conf *mdev);
786 void (*done)(struct drbd_conf *mdev, int rv);
1056 static inline unsigned int mdev_to_minor(struct drbd_conf *mdev) in mdev_to_minor() argument
1058 return mdev->minor; in mdev_to_minor()
[all …]
Ddrbd_bitmap.c116 static void __bm_print_lock_info(struct drbd_conf *mdev, const char *func) in __bm_print_lock_info() argument
118 struct drbd_bitmap *b = mdev->bitmap; in __bm_print_lock_info()
122 drbd_task_to_thread_name(mdev->tconn, current), in __bm_print_lock_info()
124 drbd_task_to_thread_name(mdev->tconn, b->bm_task)); in __bm_print_lock_info()
127 void drbd_bm_lock(struct drbd_conf *mdev, char *why, enum bm_flag flags) in drbd_bm_lock() argument
129 struct drbd_bitmap *b = mdev->bitmap; in drbd_bm_lock()
141 drbd_task_to_thread_name(mdev->tconn, current), in drbd_bm_lock()
143 drbd_task_to_thread_name(mdev->tconn, b->bm_task)); in drbd_bm_lock()
154 void drbd_bm_unlock(struct drbd_conf *mdev) in drbd_bm_unlock() argument
156 struct drbd_bitmap *b = mdev->bitmap; in drbd_bm_unlock()
[all …]
/drivers/sh/maple/
Dmaple.c139 struct maple_device *mdev; in maple_release_device() local
142 mdev = to_maple_dev(dev); in maple_release_device()
143 mq = mdev->mq; in maple_release_device()
146 kfree(mdev); in maple_release_device()
157 int maple_add_packet(struct maple_device *mdev, u32 function, u32 command, in maple_add_packet() argument
172 mdev->mq->command = command; in maple_add_packet()
173 mdev->mq->length = length; in maple_add_packet()
176 mdev->mq->sendbuf = sendbuf; in maple_add_packet()
179 list_add_tail(&mdev->mq->list, &maple_waitq); in maple_add_packet()
186 static struct mapleq *maple_allocq(struct maple_device *mdev) in maple_allocq() argument
[all …]
/drivers/mtd/maps/
Dvmu-flash.c26 struct maple_device *mdev; member
62 struct maple_device *mdev; in ofs_to_block() local
68 mdev = mpart->mdev; in ofs_to_block()
69 card = maple_get_drvdata(mdev); in ofs_to_block()
93 struct maple_device *mdev; in vmu_blockread() local
96 mdev = mq->dev; in vmu_blockread()
97 card = maple_get_drvdata(mdev); in vmu_blockread()
116 struct maple_device *mdev; in maple_vmu_read_block() local
123 mdev = mpart->mdev; in maple_vmu_read_block()
125 card = maple_get_drvdata(mdev); in maple_vmu_read_block()
[all …]
/drivers/net/ethernet/mellanox/mlx4/
Den_main.c91 level, DRV_NAME, dev_name(&priv->mdev->pdev->dev), in en_print()
109 if (mlx4_is_mfunc(priv->mdev->dev) && in mlx4_en_update_loopback_state()
116 if (mlx4_is_mfunc(priv->mdev->dev) || priv->validate_loopback) in mlx4_en_update_loopback_state()
120 static int mlx4_en_get_profile(struct mlx4_en_dev *mdev) in mlx4_en_get_profile() argument
122 struct mlx4_en_profile *params = &mdev->profile; in mlx4_en_get_profile()
128 if (params->udp_rss && !(mdev->dev->caps.flags in mlx4_en_get_profile()
130 mlx4_warn(mdev, "UDP RSS is not supported on this device.\n"); in mlx4_en_get_profile()
158 struct mlx4_en_dev *mdev = (struct mlx4_en_dev *) endev_ptr; in mlx4_en_event() local
164 if (!mdev->pndev[port]) in mlx4_en_event()
166 priv = netdev_priv(mdev->pndev[port]); in mlx4_en_event()
[all …]
Den_clock.c41 struct mlx4_en_dev *mdev = priv->mdev; in mlx4_en_timestamp_config() local
45 mutex_lock(&mdev->state_lock); in mlx4_en_timestamp_config()
75 mutex_unlock(&mdev->state_lock); in mlx4_en_timestamp_config()
84 struct mlx4_en_dev *mdev = in mlx4_en_read_clock() local
86 struct mlx4_dev *dev = mdev->dev; in mlx4_en_read_clock()
102 void mlx4_en_fill_hwtstamps(struct mlx4_en_dev *mdev, in mlx4_en_fill_hwtstamps() argument
108 nsec = timecounter_cyc2time(&mdev->clock, timestamp); in mlx4_en_fill_hwtstamps()
114 void mlx4_en_init_timestamp(struct mlx4_en_dev *mdev) in mlx4_en_init_timestamp() argument
116 struct mlx4_dev *dev = mdev->dev; in mlx4_en_init_timestamp()
119 memset(&mdev->cycles, 0, sizeof(mdev->cycles)); in mlx4_en_init_timestamp()
[all …]
Den_netdev.c148 rc = mlx4_flow_detach(priv->mdev->dev, filter->reg_id); in mlx4_en_filter_work()
153 rc = mlx4_flow_attach(priv->mdev->dev, &rule, &filter->reg_id); in mlx4_en_filter_work()
217 rc = mlx4_flow_detach(priv->mdev->dev, filter->reg_id); in mlx4_en_filter_free()
296 queue_work(priv->mdev->workqueue, &filter->work); in mlx4_en_filter_rfs()
363 struct mlx4_en_dev *mdev = priv->mdev; in mlx4_en_vlan_rx_add_vid() local
372 mutex_lock(&mdev->state_lock); in mlx4_en_vlan_rx_add_vid()
373 if (mdev->device_up && priv->port_up) { in mlx4_en_vlan_rx_add_vid()
374 err = mlx4_SET_VLAN_FLTR(mdev->dev, priv); in mlx4_en_vlan_rx_add_vid()
378 if (mlx4_register_vlan(mdev->dev, priv->port, vid, &idx)) in mlx4_en_vlan_rx_add_vid()
380 mutex_unlock(&mdev->state_lock); in mlx4_en_vlan_rx_add_vid()
[all …]
/drivers/media/
Dmedia-devnode.c62 struct media_devnode *mdev = to_media_devnode(cd); in media_devnode_release() local
67 cdev_del(&mdev->cdev); in media_devnode_release()
70 clear_bit(mdev->minor, media_devnode_nums); in media_devnode_release()
75 if (mdev->release) in media_devnode_release()
76 mdev->release(mdev); in media_devnode_release()
86 struct media_devnode *mdev = media_devnode_data(filp); in media_read() local
88 if (!mdev->fops->read) in media_read()
90 if (!media_devnode_is_registered(mdev)) in media_read()
92 return mdev->fops->read(filp, buf, sz, off); in media_read()
98 struct media_devnode *mdev = media_devnode_data(filp); in media_write() local
[all …]
Dmedia-device.c68 static struct media_entity *find_entity(struct media_device *mdev, u32 id) in find_entity() argument
75 spin_lock(&mdev->lock); in find_entity()
77 media_device_for_each_entity(entity, mdev) { in find_entity()
80 spin_unlock(&mdev->lock); in find_entity()
85 spin_unlock(&mdev->lock); in find_entity()
90 static long media_device_enum_entities(struct media_device *mdev, in media_device_enum_entities() argument
100 ent = find_entity(mdev, u_ent.id); in media_device_enum_entities()
132 static long __media_device_enum_links(struct media_device *mdev, in __media_device_enum_links() argument
137 entity = find_entity(mdev, links->entity); in __media_device_enum_links()
177 static long media_device_enum_links(struct media_device *mdev, in media_device_enum_links() argument
[all …]
/drivers/gpu/drm/mgag200/
Dmgag200_main.c79 static int mga_probe_vram(struct mga_device *mdev, void __iomem *mem) in mga_probe_vram() argument
90 for (offset = 0x100000; offset < mdev->mc.vram_window; offset += 0x4000) { in mga_probe_vram()
117 static int mga_vram_init(struct mga_device *mdev) in mga_vram_init() argument
125 mdev->mc.vram_base = pci_resource_start(mdev->dev->pdev, 0); in mga_vram_init()
126 mdev->mc.vram_window = pci_resource_len(mdev->dev->pdev, 0); in mga_vram_init()
128 aper->ranges[0].base = mdev->mc.vram_base; in mga_vram_init()
129 aper->ranges[0].size = mdev->mc.vram_window; in mga_vram_init()
134 if (!devm_request_mem_region(mdev->dev->dev, mdev->mc.vram_base, mdev->mc.vram_window, in mga_vram_init()
140 mem = pci_iomap(mdev->dev->pdev, 0, 0); in mga_vram_init()
142 mdev->mc.vram_size = mga_probe_vram(mdev, mem); in mga_vram_init()
[all …]
/drivers/w1/masters/
Dmxc_w1.c88 struct mxc_w1_device *mdev = data; in mxc_w1_ds2_touch_bit() local
89 void __iomem *ctrl_addr = mdev->regs + MXC_W1_CONTROL; in mxc_w1_ds2_touch_bit()
108 struct mxc_w1_device *mdev; in mxc_w1_probe() local
112 mdev = devm_kzalloc(&pdev->dev, sizeof(struct mxc_w1_device), in mxc_w1_probe()
114 if (!mdev) in mxc_w1_probe()
117 mdev->clk = devm_clk_get(&pdev->dev, NULL); in mxc_w1_probe()
118 if (IS_ERR(mdev->clk)) in mxc_w1_probe()
119 return PTR_ERR(mdev->clk); in mxc_w1_probe()
121 mdev->clkdiv = (clk_get_rate(mdev->clk) / 1000000) - 1; in mxc_w1_probe()
124 mdev->regs = devm_ioremap_resource(&pdev->dev, res); in mxc_w1_probe()
[all …]

1234