Home
last modified time | relevance | path

Searched refs:bdev (Results 1 – 25 of 267) sorted by relevance

1234567891011

/drivers/bluetooth/
Dbtmtkuart.c144 #define btmtkuart_is_standalone(bdev) \ argument
145 ((bdev)->data->flags & BTMTKUART_FLAG_STANDALONE_HW)
146 #define btmtkuart_is_builtin_soc(bdev) \ argument
147 !((bdev)->data->flags & BTMTKUART_FLAG_STANDALONE_HW)
152 struct btmtkuart_dev *bdev = hci_get_drvdata(hdev); in mtk_hci_wmt_sync() local
173 set_bit(BTMTKUART_TX_WAIT_VND_EVT, &bdev->tx_state); in mtk_hci_wmt_sync()
177 clear_bit(BTMTKUART_TX_WAIT_VND_EVT, &bdev->tx_state); in mtk_hci_wmt_sync()
190 err = wait_on_bit_timeout(&bdev->tx_state, BTMTKUART_TX_WAIT_VND_EVT, in mtk_hci_wmt_sync()
194 clear_bit(BTMTKUART_TX_WAIT_VND_EVT, &bdev->tx_state); in mtk_hci_wmt_sync()
200 clear_bit(BTMTKUART_TX_WAIT_VND_EVT, &bdev->tx_state); in mtk_hci_wmt_sync()
[all …]
Dbtmtksdio.c173 struct btmtksdio_dev *bdev = hci_get_drvdata(hdev); in mtk_hci_wmt_sync() local
192 set_bit(BTMTKSDIO_TX_WAIT_VND_EVT, &bdev->tx_state); in mtk_hci_wmt_sync()
196 clear_bit(BTMTKSDIO_TX_WAIT_VND_EVT, &bdev->tx_state); in mtk_hci_wmt_sync()
209 err = wait_on_bit_timeout(&bdev->tx_state, BTMTKSDIO_TX_WAIT_VND_EVT, in mtk_hci_wmt_sync()
213 clear_bit(BTMTKSDIO_TX_WAIT_VND_EVT, &bdev->tx_state); in mtk_hci_wmt_sync()
219 clear_bit(BTMTKSDIO_TX_WAIT_VND_EVT, &bdev->tx_state); in mtk_hci_wmt_sync()
224 wmt_evt = (struct btmtk_hci_wmt_evt *)bdev->evt_skb->data; in mtk_hci_wmt_sync()
254 kfree_skb(bdev->evt_skb); in mtk_hci_wmt_sync()
255 bdev->evt_skb = NULL; in mtk_hci_wmt_sync()
260 static int btmtksdio_tx_packet(struct btmtksdio_dev *bdev, in btmtksdio_tx_packet() argument
[all …]
Dhci_bcm.c310 struct bcm_device *bdev = data; in bcm_host_wake() local
312 bt_dev_dbg(bdev, "Host wake IRQ"); in bcm_host_wake()
314 pm_runtime_get(bdev->dev); in bcm_host_wake()
315 pm_runtime_mark_last_busy(bdev->dev); in bcm_host_wake()
316 pm_runtime_put_autosuspend(bdev->dev); in bcm_host_wake()
323 struct bcm_device *bdev = bcm->dev; in bcm_request_irq() local
327 if (!bcm_device_exists(bdev)) { in bcm_request_irq()
332 if (bdev->irq <= 0) { in bcm_request_irq()
337 err = devm_request_irq(bdev->dev, bdev->irq, bcm_host_wake, in bcm_request_irq()
338 bdev->irq_active_low ? IRQF_TRIGGER_FALLING : in bcm_request_irq()
[all …]
/drivers/dma/qcom/
Dbam_dma.c350 struct bam_device *bdev; member
407 static inline void __iomem *bam_addr(struct bam_device *bdev, u32 pipe, in bam_addr() argument
410 const struct reg_offset_data r = bdev->layout[reg]; in bam_addr()
412 return bdev->regs + r.base_offset + in bam_addr()
415 r.ee_mult * bdev->ee; in bam_addr()
426 struct bam_device *bdev = bchan->bdev; in bam_reset_channel() local
431 writel_relaxed(1, bam_addr(bdev, bchan->id, BAM_P_RST)); in bam_reset_channel()
432 writel_relaxed(0, bam_addr(bdev, bchan->id, BAM_P_RST)); in bam_reset_channel()
451 struct bam_device *bdev = bchan->bdev; in bam_chan_init_hw() local
462 bam_addr(bdev, bchan->id, BAM_P_DESC_FIFO_ADDR)); in bam_chan_init_hw()
[all …]
/drivers/s390/block/
Dscm_blk.c147 static bool scm_permit_request(struct scm_blk_dev *bdev, struct request *req) in scm_permit_request() argument
149 return rq_data_dir(req) != WRITE || bdev->state != SCM_WR_PROHIBIT; in scm_permit_request()
182 struct scm_blk_dev *bdev = scmrq->bdev; in scm_request_prepare() local
183 struct scm_device *scmdev = bdev->gendisk->private_data; in scm_request_prepare()
219 static inline void scm_request_init(struct scm_blk_dev *bdev, in scm_request_init() argument
228 aobrq->scmdev = bdev->scmdev; in scm_request_init()
231 scmrq->bdev = bdev; in scm_request_init()
240 struct scm_blk_dev *bdev = scmrq->bdev; in scm_request_requeue() local
246 atomic_dec(&bdev->queued_reqs); in scm_request_requeue()
248 blk_mq_kick_requeue_list(bdev->rq); in scm_request_requeue()
[all …]
Dscm_drv.c19 struct scm_blk_dev *bdev = dev_get_drvdata(&scmdev->dev); in scm_notify() local
31 scm_blk_set_available(bdev); in scm_notify()
38 struct scm_blk_dev *bdev; in scm_probe() local
47 bdev = kzalloc(sizeof(*bdev), GFP_KERNEL); in scm_probe()
48 if (!bdev) in scm_probe()
51 dev_set_drvdata(&scmdev->dev, bdev); in scm_probe()
52 ret = scm_blk_dev_setup(bdev, scmdev); in scm_probe()
55 kfree(bdev); in scm_probe()
65 struct scm_blk_dev *bdev = dev_get_drvdata(&scmdev->dev); in scm_remove() local
67 scm_blk_dev_cleanup(bdev); in scm_remove()
[all …]
Ddasd_genhd.c105 struct block_device *bdev; in dasd_scan_partitions() local
108 bdev = blkdev_get_by_dev(disk_devt(block->gdp), FMODE_READ, NULL); in dasd_scan_partitions()
109 if (IS_ERR(bdev)) { in dasd_scan_partitions()
112 PTR_ERR(bdev)); in dasd_scan_partitions()
133 block->bdev = bdev; in dasd_scan_partitions()
143 struct block_device *bdev; in dasd_destroy_partitions() local
149 bdev = block->bdev; in dasd_destroy_partitions()
150 block->bdev = NULL; in dasd_destroy_partitions()
152 mutex_lock(&bdev->bd_disk->open_mutex); in dasd_destroy_partitions()
153 bdev_disk_changed(bdev->bd_disk, true); in dasd_destroy_partitions()
[all …]
/drivers/staging/media/atomisp/pci/hmm/
Dhmm_bo.c58 static int __bo_init(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, in __bo_init() argument
61 check_bodev_null_return(bdev, -EINVAL); in __bo_init()
62 var_equal_return(hmm_bo_device_inited(bdev), 0, -EINVAL, in __bo_init()
76 bo->bdev = bdev; in __bo_init()
79 bo->start = bdev->start; in __bo_init()
122 rb_erase(&this->node, &this->bdev->free_rbtree); in __bo_search_and_remove_from_free_rbtree()
247 static struct hmm_buffer_object *__bo_break_up(struct hmm_bo_device *bdev, in __bo_break_up() argument
255 new_bo = kmem_cache_alloc(bdev->bo_cache, GFP_KERNEL); in __bo_break_up()
260 ret = __bo_init(bdev, new_bo, pgnr); in __bo_break_up()
263 kmem_cache_free(bdev->bo_cache, new_bo); in __bo_break_up()
[all …]
/drivers/gpu/drm/ttm/
Dttm_device.c126 struct ttm_device *bdev; in ttm_global_swapout() local
130 list_for_each_entry(bdev, &glob->device_list, device_list) { in ttm_global_swapout()
131 ret = ttm_device_swapout(bdev, ctx, gfp_flags); in ttm_global_swapout()
133 list_move_tail(&bdev->device_list, &glob->device_list); in ttm_global_swapout()
142 int ttm_device_swapout(struct ttm_device *bdev, struct ttm_operation_ctx *ctx, in ttm_device_swapout() argument
150 spin_lock(&bdev->lru_lock); in ttm_device_swapout()
152 man = ttm_manager_type(bdev, i); in ttm_device_swapout()
169 spin_unlock(&bdev->lru_lock); in ttm_device_swapout()
176 struct ttm_device *bdev = in ttm_device_delayed_workqueue() local
179 if (!ttm_bo_delayed_delete(bdev, false)) in ttm_device_delayed_workqueue()
[all …]
Dttm_bo.c67 man = ttm_manager_type(bo->bdev, mem_type); in ttm_bo_mem_space_debug()
74 struct ttm_device *bdev = bo->bdev; in ttm_bo_del_from_lru() local
78 if (bdev->funcs->del_from_lru_notify) in ttm_bo_del_from_lru()
79 bdev->funcs->del_from_lru_notify(bo); in ttm_bo_del_from_lru()
94 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_to_lru_tail() local
108 man = ttm_manager_type(bdev, mem->mem_type); in ttm_bo_move_to_lru_tail()
111 if (bdev->funcs->del_from_lru_notify) in ttm_bo_move_to_lru_tail()
112 bdev->funcs->del_from_lru_notify(bo); in ttm_bo_move_to_lru_tail()
142 man = ttm_manager_type(pos->first->bdev, TTM_PL_TT); in ttm_bo_bulk_move_lru_tail()
157 man = ttm_manager_type(pos->first->bdev, TTM_PL_VRAM); in ttm_bo_bulk_move_lru_tail()
[all …]
Dttm_tt.c60 struct ttm_device *bdev = bo->bdev; in ttm_tt_create() local
83 bo->ttm = bdev->funcs->ttm_tt_create(bo, page_flags); in ttm_tt_create()
125 void ttm_tt_destroy_common(struct ttm_device *bdev, struct ttm_tt *ttm) in ttm_tt_destroy_common() argument
127 ttm_tt_unpopulate(bdev, ttm); in ttm_tt_destroy_common()
136 void ttm_tt_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in ttm_tt_destroy() argument
138 bdev->funcs->ttm_tt_destroy(bdev, ttm); in ttm_tt_destroy()
250 int ttm_tt_swapout(struct ttm_device *bdev, struct ttm_tt *ttm, in ttm_tt_swapout() argument
285 ttm_tt_unpopulate(bdev, ttm); in ttm_tt_swapout()
297 static void ttm_tt_add_mapping(struct ttm_device *bdev, struct ttm_tt *ttm) in ttm_tt_add_mapping() argument
305 ttm->pages[i]->mapping = bdev->dev_mapping; in ttm_tt_add_mapping()
[all …]
Dttm_bo_util.c50 int ttm_mem_io_reserve(struct ttm_device *bdev, in ttm_mem_io_reserve() argument
57 if (!bdev->funcs->io_mem_reserve) in ttm_mem_io_reserve()
60 return bdev->funcs->io_mem_reserve(bdev, mem); in ttm_mem_io_reserve()
63 void ttm_mem_io_free(struct ttm_device *bdev, in ttm_mem_io_free() argument
72 if (bdev->funcs->io_mem_free) in ttm_mem_io_free()
73 bdev->funcs->io_mem_free(bdev, mem); in ttm_mem_io_free()
139 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_memcpy() local
141 ttm_manager_type(bo->bdev, dst_mem->mem_type); in ttm_bo_move_memcpy()
145 ttm_manager_type(bdev, src_mem->mem_type); in ttm_bo_move_memcpy()
155 ret = ttm_tt_populate(bdev, ttm, ctx); in ttm_bo_move_memcpy()
[all …]
/drivers/comedi/drivers/
Dcomedi_bond.c81 struct bonded_device *bdev = *devs++; in bonding_dio_insn_bits() local
83 if (base_chan < bdev->nchans) { in bonding_dio_insn_bits()
92 b_chans = bdev->nchans - base_chan; in bonding_dio_insn_bits()
100 ret = comedi_dio_bitfield2(bdev->dev, bdev->subdev, in bonding_dio_insn_bits()
117 base_chan -= bdev->nchans; in bonding_dio_insn_bits()
131 struct bonded_device *bdev; in bonding_dio_insn_config() local
138 for (bdev = *devs++; chan >= bdev->nchans; bdev = *devs++) in bonding_dio_insn_config()
139 chan -= bdev->nchans; in bonding_dio_insn_config()
154 ret = comedi_dio_config(bdev->dev, bdev->subdev, chan, data[0]); in bonding_dio_insn_config()
157 ret = comedi_dio_get_config(bdev->dev, bdev->subdev, chan, in bonding_dio_insn_config()
[all …]
/drivers/input/misc/
Dcobalt_btns.c37 struct buttons_dev *bdev = input_get_drvdata(input); in handle_buttons() local
41 status = ~readl(bdev->reg) >> 24; in handle_buttons()
43 for (i = 0; i < ARRAY_SIZE(bdev->keymap); i++) { in handle_buttons()
45 if (++bdev->count[i] == BUTTONS_COUNT_THRESHOLD) { in handle_buttons()
47 input_report_key(input, bdev->keymap[i], 1); in handle_buttons()
51 if (bdev->count[i] >= BUTTONS_COUNT_THRESHOLD) { in handle_buttons()
53 input_report_key(input, bdev->keymap[i], 0); in handle_buttons()
56 bdev->count[i] = 0; in handle_buttons()
63 struct buttons_dev *bdev; in cobalt_buttons_probe() local
68 bdev = devm_kzalloc(&pdev->dev, sizeof(*bdev), GFP_KERNEL); in cobalt_buttons_probe()
[all …]
Dsgi_btns.c54 struct buttons_dev *bdev = input_get_drvdata(input); in handle_buttons() local
60 for (i = 0; i < ARRAY_SIZE(bdev->keymap); i++) { in handle_buttons()
62 if (++bdev->count[i] == BUTTONS_COUNT_THRESHOLD) { in handle_buttons()
64 input_report_key(input, bdev->keymap[i], 1); in handle_buttons()
68 if (bdev->count[i] >= BUTTONS_COUNT_THRESHOLD) { in handle_buttons()
70 input_report_key(input, bdev->keymap[i], 0); in handle_buttons()
73 bdev->count[i] = 0; in handle_buttons()
80 struct buttons_dev *bdev; in sgi_buttons_probe() local
84 bdev = devm_kzalloc(&pdev->dev, sizeof(*bdev), GFP_KERNEL); in sgi_buttons_probe()
85 if (!bdev) in sgi_buttons_probe()
[all …]
/drivers/hid/
Dhid-picolcd_backlight.c17 static int picolcd_get_brightness(struct backlight_device *bdev) in picolcd_get_brightness() argument
19 struct picolcd_data *data = bl_get_data(bdev); in picolcd_get_brightness()
23 static int picolcd_set_brightness(struct backlight_device *bdev) in picolcd_set_brightness() argument
25 struct picolcd_data *data = bl_get_data(bdev); in picolcd_set_brightness()
32 data->lcd_brightness = bdev->props.brightness & 0x0ff; in picolcd_set_brightness()
33 data->lcd_power = bdev->props.power; in picolcd_set_brightness()
42 static int picolcd_check_bl_fb(struct backlight_device *bdev, struct fb_info *fb) in picolcd_check_bl_fb() argument
44 return fb && fb == picolcd_fbinfo((struct picolcd_data *)bl_get_data(bdev)); in picolcd_check_bl_fb()
56 struct backlight_device *bdev; in picolcd_init_backlight() local
69 bdev = backlight_device_register(dev_name(dev), dev, data, in picolcd_init_backlight()
[all …]
/drivers/input/keyboard/
Dgpio_keys_polled.c48 struct gpio_keys_polled_dev *bdev = input_get_drvdata(input); in gpio_keys_button_event() local
54 __set_bit(button->code, bdev->rel_axis_seen); in gpio_keys_button_event()
59 __set_bit(button->code, bdev->abs_axis_seen); in gpio_keys_button_event()
89 struct gpio_keys_polled_dev *bdev = input_get_drvdata(input); in gpio_keys_polled_poll() local
90 const struct gpio_keys_platform_data *pdata = bdev->pdata; in gpio_keys_polled_poll()
93 memset(bdev->rel_axis_seen, 0, sizeof(bdev->rel_axis_seen)); in gpio_keys_polled_poll()
94 memset(bdev->abs_axis_seen, 0, sizeof(bdev->abs_axis_seen)); in gpio_keys_polled_poll()
97 struct gpio_keys_button_data *bdata = &bdev->data[i]; in gpio_keys_polled_poll()
110 if (!test_bit(i, bdev->rel_axis_seen)) in gpio_keys_polled_poll()
115 if (!test_bit(i, bdev->abs_axis_seen)) in gpio_keys_polled_poll()
[all …]
/drivers/gpu/drm/radeon/
Dradeon_ttm.c56 static int radeon_ttm_tt_bind(struct ttm_device *bdev, struct ttm_tt *ttm,
58 static void radeon_ttm_tt_unbind(struct ttm_device *bdev, struct ttm_tt *ttm);
60 struct radeon_device *radeon_get_rdev(struct ttm_device *bdev) in radeon_get_rdev() argument
65 mman = container_of(bdev, struct radeon_mman, bdev); in radeon_get_rdev()
72 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_VRAM, in radeon_ttm_init_vram()
78 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_TT, in radeon_ttm_init_gtt()
150 rdev = radeon_get_rdev(bo->bdev); in radeon_move_blit()
205 r = radeon_ttm_tt_bind(bo->bdev, bo->ttm, new_mem); in radeon_bo_move()
222 rdev = radeon_get_rdev(bo->bdev); in radeon_bo_move()
235 radeon_ttm_tt_unbind(bo->bdev, bo->ttm); in radeon_bo_move()
[all …]
/drivers/pci/controller/
Dpcie-iproc-bcma.c29 struct bcma_device *bdev = container_of(pcie->dev, struct bcma_device, dev); in iproc_pcie_bcma_map_irq() local
31 return bcma_core_irq(bdev, 5); in iproc_pcie_bcma_map_irq()
34 static int iproc_pcie_bcma_probe(struct bcma_device *bdev) in iproc_pcie_bcma_probe() argument
36 struct device *dev = &bdev->dev; in iproc_pcie_bcma_probe()
50 pcie->base = bdev->io_addr; in iproc_pcie_bcma_probe()
56 pcie->base_addr = bdev->addr; in iproc_pcie_bcma_probe()
58 pcie->mem.start = bdev->addr_s[0]; in iproc_pcie_bcma_probe()
59 pcie->mem.end = bdev->addr_s[0] + SZ_128M - 1; in iproc_pcie_bcma_probe()
69 bcma_set_drvdata(bdev, pcie); in iproc_pcie_bcma_probe()
74 static void iproc_pcie_bcma_remove(struct bcma_device *bdev) in iproc_pcie_bcma_remove() argument
[all …]
/drivers/md/
Ddm-linear.c92 bio_set_dev(bio, lc->dev->bdev); in linear_map_bio()
128 static int linear_prepare_ioctl(struct dm_target *ti, struct block_device **bdev) in linear_prepare_ioctl() argument
133 *bdev = dev->bdev; in linear_prepare_ioctl()
139 ti->len != i_size_read(dev->bdev->bd_inode) >> SECTOR_SHIFT) in linear_prepare_ioctl()
150 return dm_report_zones(lc->dev->bdev, lc->start, in linear_report_zones()
172 struct block_device *bdev = lc->dev->bdev; in linear_dax_direct_access() local
177 ret = bdev_dax_pgoff(bdev, dev_sector, nr_pages * PAGE_SIZE, &pgoff); in linear_dax_direct_access()
187 struct block_device *bdev = lc->dev->bdev; in linear_dax_copy_from_iter() local
192 if (bdev_dax_pgoff(bdev, dev_sector, ALIGN(bytes, PAGE_SIZE), &pgoff)) in linear_dax_copy_from_iter()
201 struct block_device *bdev = lc->dev->bdev; in linear_dax_copy_to_iter() local
[all …]
/drivers/nvme/target/
Dzns.c37 u8 zasl = nvmet_zasl(bdev_max_zone_append_sectors(ns->bdev)); in nvmet_bdev_zns_enable()
38 struct gendisk *bd_disk = ns->bdev->bd_disk; in nvmet_bdev_zns_enable()
52 if (get_capacity(bd_disk) & (bdev_zone_sectors(ns->bdev) - 1)) in nvmet_bdev_zns_enable()
60 if (ns->bdev->bd_disk->queue->conv_zones_bitmap) in nvmet_bdev_zns_enable()
63 ret = blkdev_report_zones(ns->bdev, 0, blkdev_nr_zones(bd_disk), in nvmet_bdev_zns_enable()
68 ns->blksize_shift = blksize_bits(bdev_logical_block_size(ns->bdev)); in nvmet_bdev_zns_enable()
127 if (!bdev_is_zoned(req->ns->bdev)) { in nvmet_execute_identify_cns_cs_ns()
133 zsze = (bdev_zone_sectors(req->ns->bdev) << 9) >> in nvmet_execute_identify_cns_cs_ns()
137 mor = bdev_max_open_zones(req->ns->bdev); in nvmet_execute_identify_cns_cs_ns()
144 mar = bdev_max_active_zones(req->ns->bdev); in nvmet_execute_identify_cns_cs_ns()
[all …]
Dio-cmd-bdev.c11 void nvmet_bdev_set_limits(struct block_device *bdev, struct nvme_id_ns *id) in nvmet_bdev_set_limits() argument
13 const struct queue_limits *ql = &bdev_get_queue(bdev)->limits; in nvmet_bdev_set_limits()
52 if (ns->bdev) { in nvmet_bdev_ns_disable()
53 blkdev_put(ns->bdev, FMODE_WRITE | FMODE_READ); in nvmet_bdev_ns_disable()
54 ns->bdev = NULL; in nvmet_bdev_ns_disable()
60 struct blk_integrity *bi = bdev_get_integrity(ns->bdev); in nvmet_bdev_ns_enable_integrity()
78 ns->bdev = blkdev_get_by_path(ns->device_path, in nvmet_bdev_ns_enable()
80 if (IS_ERR(ns->bdev)) { in nvmet_bdev_ns_enable()
81 ret = PTR_ERR(ns->bdev); in nvmet_bdev_ns_enable()
84 ns->device_path, PTR_ERR(ns->bdev)); in nvmet_bdev_ns_enable()
[all …]
/drivers/staging/media/atomisp/include/hmm/
Dhmm_bo.h33 #define check_bodev_null_return(bdev, exp) \ argument
34 check_null_return(bdev, exp, \
37 #define check_bodev_null_return_void(bdev) \ argument
38 check_null_return_void(bdev, \
130 struct hmm_bo_device *bdev; member
162 struct hmm_buffer_object *hmm_bo_alloc(struct hmm_bo_device *bdev,
167 int hmm_bo_device_init(struct hmm_bo_device *bdev,
174 void hmm_bo_device_exit(struct hmm_bo_device *bdev);
179 int hmm_bo_device_inited(struct hmm_bo_device *bdev);
291 struct hmm_bo_device *bdev, ia_css_ptr vaddr);
[all …]
/drivers/block/rnbd/
Drnbd-srv-dev.h16 struct block_device *bdev; member
51 return queue_max_segments(bdev_get_queue(dev->bdev)); in rnbd_dev_get_max_segs()
56 return queue_max_hw_sectors(bdev_get_queue(dev->bdev)); in rnbd_dev_get_max_hw_sects()
61 return blk_queue_secure_erase(bdev_get_queue(dev->bdev)); in rnbd_dev_get_secure_discard()
66 if (!blk_queue_discard(bdev_get_queue(dev->bdev))) in rnbd_dev_get_max_discard_sects()
69 return blk_queue_get_max_sectors(bdev_get_queue(dev->bdev), in rnbd_dev_get_max_discard_sects()
75 return bdev_get_queue(dev->bdev)->limits.discard_granularity; in rnbd_dev_get_discard_granularity()
80 return bdev_get_queue(dev->bdev)->limits.discard_alignment; in rnbd_dev_get_discard_alignment()
/drivers/gpu/drm/qxl/
Dqxl_ttm.c40 static struct qxl_device *qxl_get_qdev(struct ttm_device *bdev) in qxl_get_qdev() argument
45 mman = container_of(bdev, struct qxl_mman, bdev); in qxl_get_qdev()
73 int qxl_ttm_io_mem_reserve(struct ttm_device *bdev, in qxl_ttm_io_mem_reserve() argument
76 struct qxl_device *qdev = qxl_get_qdev(bdev); in qxl_ttm_io_mem_reserve()
102 static void qxl_ttm_backend_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in qxl_ttm_backend_destroy() argument
104 ttm_tt_destroy_common(bdev, ttm); in qxl_ttm_backend_destroy()
179 return ttm_range_man_init(&qdev->mman.bdev, type, false, size); in qxl_ttm_init_mem_type()
188 r = ttm_device_init(&qdev->mman.bdev, &qxl_bo_driver, NULL, in qxl_ttm_init()
220 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_ttm_fini()
221 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_ttm_fini()
[all …]

1234567891011