Home
last modified time | relevance | path

Searched refs:device (Results 1 – 25 of 6149) sorted by relevance

12345678910>>...246

/drivers/block/drbd/
Ddrbd_actlog.c82 void *drbd_md_get_buffer(struct drbd_device *device, const char *intent) in drbd_md_get_buffer() argument
86 wait_event(device->misc_wait, in drbd_md_get_buffer()
87 (r = atomic_cmpxchg(&device->md_io.in_use, 0, 1)) == 0 || in drbd_md_get_buffer()
88 device->state.disk <= D_FAILED); in drbd_md_get_buffer()
93 device->md_io.current_use = intent; in drbd_md_get_buffer()
94 device->md_io.start_jif = jiffies; in drbd_md_get_buffer()
95 device->md_io.submit_jif = device->md_io.start_jif - 1; in drbd_md_get_buffer()
96 return page_address(device->md_io.page); in drbd_md_get_buffer()
99 void drbd_md_put_buffer(struct drbd_device *device) in drbd_md_put_buffer() argument
101 if (atomic_dec_and_test(&device->md_io.in_use)) in drbd_md_put_buffer()
[all …]
Ddrbd_worker.c52 struct drbd_device *device; in drbd_md_endio() local
54 device = bio->bi_private; in drbd_md_endio()
55 device->md_io.error = blk_status_to_errno(bio->bi_status); in drbd_md_endio()
58 if (device->ldev) in drbd_md_endio()
59 put_ldev(device); in drbd_md_endio()
73 drbd_md_put_buffer(device); in drbd_md_endio()
74 device->md_io.done = 1; in drbd_md_endio()
75 wake_up(&device->misc_wait); in drbd_md_endio()
85 struct drbd_device *device = peer_device->device; in drbd_endio_read_sec_final() local
87 spin_lock_irqsave(&device->resource->req_lock, flags); in drbd_endio_read_sec_final()
[all …]
/drivers/s390/char/
Dtape_core.c94 tape_medium_state_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_medium_state_show()
106 tape_first_minor_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_first_minor_show()
118 tape_state_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_state_show()
131 tape_operation_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_operation_show()
158 tape_blocksize_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_blocksize_show()
187 tape_state_set(struct tape_device *device, enum tape_state newstate) in tape_state_set() argument
191 if (device->tape_state == TS_NOT_OPER) { in tape_state_set()
195 DBF_EVENT(4, "ts. dev: %x\n", device->first_minor); in tape_state_set()
197 if (device->tape_state < TS_SIZE && device->tape_state >=0 ) in tape_state_set()
198 str = tape_state_verbose[device->tape_state]; in tape_state_set()
[all …]
Dtape_char.c64 tapechar_setup_device(struct tape_device * device) in tapechar_setup_device() argument
68 sprintf(device_name, "ntibm%i", device->first_minor / 2); in tapechar_setup_device()
69 device->nt = register_tape_dev( in tapechar_setup_device()
70 &device->cdev->dev, in tapechar_setup_device()
71 MKDEV(tapechar_major, device->first_minor), in tapechar_setup_device()
77 device->rt = register_tape_dev( in tapechar_setup_device()
78 &device->cdev->dev, in tapechar_setup_device()
79 MKDEV(tapechar_major, device->first_minor + 1), in tapechar_setup_device()
89 tapechar_cleanup_device(struct tape_device *device) in tapechar_cleanup_device() argument
91 unregister_tape_dev(&device->cdev->dev, device->rt); in tapechar_cleanup_device()
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dnv40.c36 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv40_gr_units()
47 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 20, align, in nv40_gr_object_bind()
79 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv40_gr_chan_bind()
84 nv40_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv40_gr_chan_bind()
97 struct nvkm_device *device = subdev->device; in nv40_gr_chan_fini() local
101 nvkm_mask(device, 0x400720, 0x00000001, 0x00000000); in nv40_gr_chan_fini()
103 if (nvkm_rd32(device, 0x40032c) == inst) { in nv40_gr_chan_fini()
105 nvkm_wr32(device, 0x400720, 0x00000000); in nv40_gr_chan_fini()
106 nvkm_wr32(device, 0x400784, inst); in nv40_gr_chan_fini()
107 nvkm_mask(device, 0x400310, 0x00000020, 0x00000020); in nv40_gr_chan_fini()
[all …]
Dnv50.c35 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv50_gr_units()
46 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, in nv50_gr_object_bind()
73 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv50_gr_chan_bind()
77 nv50_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv50_gr_chan_bind()
243 struct nvkm_device *device = subdev->device; in nv50_gr_prop_trap() local
244 u32 e0c = nvkm_rd32(device, ustatus_addr + 0x04); in nv50_gr_prop_trap()
245 u32 e10 = nvkm_rd32(device, ustatus_addr + 0x08); in nv50_gr_prop_trap()
246 u32 e14 = nvkm_rd32(device, ustatus_addr + 0x0c); in nv50_gr_prop_trap()
247 u32 e18 = nvkm_rd32(device, ustatus_addr + 0x10); in nv50_gr_prop_trap()
248 u32 e1c = nvkm_rd32(device, ustatus_addr + 0x14); in nv50_gr_prop_trap()
[all …]
Dnv20.c34 struct nvkm_device *device = gr->base.engine.subdev.device; in nv20_gr_chan_fini() local
38 nvkm_mask(device, 0x400720, 0x00000001, 0x00000000); in nv20_gr_chan_fini()
39 if (nvkm_rd32(device, 0x400144) & 0x00010000) in nv20_gr_chan_fini()
40 chid = (nvkm_rd32(device, 0x400148) & 0x1f000000) >> 24; in nv20_gr_chan_fini()
42 nvkm_wr32(device, 0x400784, inst >> 4); in nv20_gr_chan_fini()
43 nvkm_wr32(device, 0x400788, 0x00000002); in nv20_gr_chan_fini()
44 nvkm_msec(device, 2000, in nv20_gr_chan_fini()
45 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_chan_fini()
48 nvkm_wr32(device, 0x400144, 0x10000000); in nv20_gr_chan_fini()
49 nvkm_mask(device, 0x400148, 0xff000000, 0x1f000000); in nv20_gr_chan_fini()
[all …]
Dnv04.c445 nv04_gr_set_ctx1(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx1() argument
447 int subc = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7; in nv04_gr_set_ctx1()
450 tmp = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx1()
453 nvkm_wr32(device, 0x700000 + inst, tmp); in nv04_gr_set_ctx1()
455 nvkm_wr32(device, NV04_PGRAPH_CTX_SWITCH1, tmp); in nv04_gr_set_ctx1()
456 nvkm_wr32(device, NV04_PGRAPH_CTX_CACHE1 + (subc << 2), tmp); in nv04_gr_set_ctx1()
460 nv04_gr_set_ctx_val(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx_val() argument
465 ctx1 = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx_val()
469 tmp = nvkm_rd32(device, 0x70000c + inst); in nv04_gr_set_ctx_val()
472 nvkm_wr32(device, 0x70000c + inst, tmp); in nv04_gr_set_ctx_val()
[all …]
Dnv10.c417 nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, addr); \
419 state[__i] = nvkm_rd32(device, NV10_PGRAPH_PIPE_DATA); \
425 nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, addr); \
427 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, state[__i]); \
433 struct nvkm_device *device = chan->object.engine->subdev.device; in nv17_gr_mthd_lma_window() local
447 PIPE_SAVE(device, pipe_0x0040, 0x0040); in nv17_gr_mthd_lma_window()
448 PIPE_SAVE(device, pipe->pipe_0x0200, 0x0200); in nv17_gr_mthd_lma_window()
450 PIPE_RESTORE(device, chan->lma_window, 0x6790); in nv17_gr_mthd_lma_window()
454 xfmode0 = nvkm_rd32(device, NV10_PGRAPH_XFMODE0); in nv17_gr_mthd_lma_window()
455 xfmode1 = nvkm_rd32(device, NV10_PGRAPH_XFMODE1); in nv17_gr_mthd_lma_window()
[all …]
Dgf100.c51 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_gr_zbc_clear_color() local
53 nvkm_wr32(device, 0x405804, gr->zbc_color[zbc].ds[0]); in gf100_gr_zbc_clear_color()
54 nvkm_wr32(device, 0x405808, gr->zbc_color[zbc].ds[1]); in gf100_gr_zbc_clear_color()
55 nvkm_wr32(device, 0x40580c, gr->zbc_color[zbc].ds[2]); in gf100_gr_zbc_clear_color()
56 nvkm_wr32(device, 0x405810, gr->zbc_color[zbc].ds[3]); in gf100_gr_zbc_clear_color()
58 nvkm_wr32(device, 0x405814, gr->zbc_color[zbc].format); in gf100_gr_zbc_clear_color()
59 nvkm_wr32(device, 0x405820, zbc); in gf100_gr_zbc_clear_color()
60 nvkm_wr32(device, 0x405824, 0x00000004); /* TRIGGER | WRITE | COLOR */ in gf100_gr_zbc_clear_color()
67 struct nvkm_ltc *ltc = gr->base.engine.subdev.device->ltc; in gf100_gr_zbc_color_get()
102 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_gr_zbc_clear_depth() local
[all …]
/drivers/firewire/
Dcore-device.c157 static const struct ieee1394_device_id *unit_match(struct device *dev, in unit_match()
173 static bool is_fw_unit(struct device *dev);
175 static int fw_unit_match(struct device *dev, struct device_driver *drv) in fw_unit_match()
181 static int fw_unit_probe(struct device *dev) in fw_unit_probe()
189 static int fw_unit_remove(struct device *dev) in fw_unit_remove()
208 static int fw_unit_uevent(struct device *dev, struct kobj_uevent_env *env) in fw_unit_uevent()
229 int fw_device_enable_phys_dma(struct fw_device *device) in fw_device_enable_phys_dma() argument
231 int generation = device->generation; in fw_device_enable_phys_dma()
236 return device->card->driver->enable_phys_dma(device->card, in fw_device_enable_phys_dma()
237 device->node_id, in fw_device_enable_phys_dma()
[all …]
/drivers/s390/block/
Ddasd.c103 struct dasd_device *device; in dasd_alloc_device() local
105 device = kzalloc(sizeof(struct dasd_device), GFP_ATOMIC); in dasd_alloc_device()
106 if (!device) in dasd_alloc_device()
110 device->ccw_mem = (void *) __get_free_pages(GFP_ATOMIC | GFP_DMA, 1); in dasd_alloc_device()
111 if (!device->ccw_mem) { in dasd_alloc_device()
112 kfree(device); in dasd_alloc_device()
116 device->erp_mem = (void *) get_zeroed_page(GFP_ATOMIC | GFP_DMA); in dasd_alloc_device()
117 if (!device->erp_mem) { in dasd_alloc_device()
118 free_pages((unsigned long) device->ccw_mem, 1); in dasd_alloc_device()
119 kfree(device); in dasd_alloc_device()
[all …]
Ddasd_devmap.c52 struct dasd_device *device; member
431 new->device = NULL; in dasd_add_busid()
485 BUG_ON(devmap->device != NULL); in dasd_forget_ranges()
500 struct dasd_device *device; in dasd_device_from_devindex() local
512 if (devmap && devmap->device) { in dasd_device_from_devindex()
513 device = devmap->device; in dasd_device_from_devindex()
514 dasd_get_device(device); in dasd_device_from_devindex()
516 device = ERR_PTR(-ENODEV); in dasd_device_from_devindex()
518 return device; in dasd_device_from_devindex()
544 struct dasd_device *device; in dasd_create_device() local
[all …]
Ddasd_3990_erp.c74 struct dasd_device *device = erp->startdev; in dasd_3990_erp_block_queue() local
77 DBF_DEV_EVENT(DBF_INFO, device, in dasd_3990_erp_block_queue()
80 spin_lock_irqsave(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_block_queue()
81 dasd_device_set_stop_bits(device, DASD_STOPPED_PENDING); in dasd_3990_erp_block_queue()
82 spin_unlock_irqrestore(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_block_queue()
87 dasd_device_set_timer(device, expires); in dasd_3990_erp_block_queue()
106 struct dasd_device *device = erp->startdev; in dasd_3990_erp_int_req() local
119 dev_err(&device->cdev->dev, in dasd_3990_erp_int_req()
147 struct dasd_device *device = erp->startdev; in dasd_3990_erp_alternate_path() local
152 spin_lock_irqsave(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_alternate_path()
[all …]
/drivers/base/power/
Dpower.h4 static inline void device_pm_init_common(struct device *dev) in device_pm_init_common()
15 static inline void pm_runtime_early_init(struct device *dev) in pm_runtime_early_init()
21 extern void pm_runtime_init(struct device *dev);
22 extern void pm_runtime_reinit(struct device *dev);
23 extern void pm_runtime_remove(struct device *dev);
24 extern u64 pm_runtime_active_time(struct device *dev);
35 struct device *dev;
43 extern void dev_pm_enable_wake_irq_check(struct device *dev,
45 extern void dev_pm_disable_wake_irq_check(struct device *dev, bool cond_disable);
46 extern void dev_pm_enable_wake_irq_complete(struct device *dev);
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/disp/
Dvga.c27 nvkm_rdport(struct nvkm_device *device, int head, u16 port) in nvkm_rdport() argument
29 if (device->card_type >= NV_50) in nvkm_rdport()
30 return nvkm_rd08(device, 0x601000 + port); in nvkm_rdport()
35 return nvkm_rd08(device, 0x601000 + (head * 0x2000) + port); in nvkm_rdport()
40 if (device->card_type < NV_40) in nvkm_rdport()
42 return nvkm_rd08(device, 0x0c0000 + (head * 0x2000) + port); in nvkm_rdport()
49 nvkm_wrport(struct nvkm_device *device, int head, u16 port, u8 data) in nvkm_wrport() argument
51 if (device->card_type >= NV_50) in nvkm_wrport()
52 nvkm_wr08(device, 0x601000 + port, data); in nvkm_wrport()
57 nvkm_wr08(device, 0x601000 + (head * 0x2000) + port, data); in nvkm_wrport()
[all …]
Dgv100.c34 struct nvkm_device *device = disp->engine.subdev.device; in gv100_disp_wndw_cnt() local
35 *pmask = nvkm_rd32(device, 0x610064); in gv100_disp_wndw_cnt()
36 return (nvkm_rd32(device, 0x610074) & 0x03f00000) >> 20; in gv100_disp_wndw_cnt()
45 struct nvkm_device *device = subdev->device; in gv100_disp_super() local
47 u32 stat = nvkm_rd32(device, 0x6107a8); in gv100_disp_super()
52 mask[head->id] = nvkm_rd32(device, 0x6107ac + (head->id * 4)); in gv100_disp_super()
92 nvkm_wr32(device, 0x6107ac + (head->id * 4), 0x00000000); in gv100_disp_super()
93 nvkm_wr32(device, 0x6107a8, 0x80000000); in gv100_disp_super()
100 struct nvkm_device *device = subdev->device; in gv100_disp_exception() local
101 u32 stat = nvkm_rd32(device, 0x611020 + (chid * 12)); in gv100_disp_exception()
[all …]
Dsorg94.c31 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_watermark() local
33 nvkm_mask(device, 0x61c128 + loff, 0x0000003f, watermark); in g94_sor_dp_watermark()
40 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_activesym() local
42 nvkm_mask(device, 0x61c10c + loff, 0x000001fc, TU << 2); in g94_sor_dp_activesym()
43 nvkm_mask(device, 0x61c128 + loff, 0x010f7f00, VTUa << 24 | in g94_sor_dp_activesym()
51 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_audio_sym() local
53 nvkm_mask(device, 0x61c1e8 + soff, 0x0000ffff, h); in g94_sor_dp_audio_sym()
54 nvkm_mask(device, 0x61c1ec + soff, 0x00ffffff, v); in g94_sor_dp_audio_sym()
60 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_drive() local
65 data[0] = nvkm_rd32(device, 0x61c118 + loff) & ~(0x000000ff << shift); in g94_sor_dp_drive()
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dnv04.c52 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_pause() local
58 nvkm_wr32(device, NV03_PFIFO_CACHES, 0x00000000); in nv04_fifo_pause()
59 nvkm_mask(device, NV04_PFIFO_CACHE1_PULL0, 0x00000001, 0x00000000); in nv04_fifo_pause()
70 nvkm_msec(device, 2000, in nv04_fifo_pause()
71 u32 tmp = nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0); in nv04_fifo_pause()
76 if (nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0) & in nv04_fifo_pause()
78 nvkm_wr32(device, NV03_PFIFO_INTR_0, NV_PFIFO_INTR_CACHE_ERROR); in nv04_fifo_pause()
80 nvkm_wr32(device, NV04_PFIFO_CACHE1_HASH, 0x00000000); in nv04_fifo_pause()
88 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_start() local
91 nvkm_mask(device, NV04_PFIFO_CACHE1_PULL0, 0x00000001, 0x00000001); in nv04_fifo_start()
[all …]
/drivers/hid/
Dhid-roccat.c45 struct device *dev;
62 struct roccat_device *device; member
77 struct roccat_device *device = reader->device; in roccat_read() local
82 mutex_lock(&device->cbuf_lock); in roccat_read()
85 if (reader->cbuf_start == device->cbuf_end) { in roccat_read()
86 add_wait_queue(&device->wait, &wait); in roccat_read()
90 while (reader->cbuf_start == device->cbuf_end) { in roccat_read()
99 if (!device->exist) { in roccat_read()
104 mutex_unlock(&device->cbuf_lock); in roccat_read()
106 mutex_lock(&device->cbuf_lock); in roccat_read()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
Dgt215.c34 struct nvkm_device *device = subdev->device; in gt215_pmu_send() local
39 addr = nvkm_rd32(device, 0x10a4a0); in gt215_pmu_send()
40 if (nvkm_msec(device, 2000, in gt215_pmu_send()
41 u32 tmp = nvkm_rd32(device, 0x10a4b0); in gt215_pmu_send()
60 nvkm_wr32(device, 0x10a580, 0x00000001); in gt215_pmu_send()
61 } while (nvkm_rd32(device, 0x10a580) != 0x00000001); in gt215_pmu_send()
64 nvkm_wr32(device, 0x10a1c0, 0x01000000 | (((addr & 0x07) << 4) + in gt215_pmu_send()
66 nvkm_wr32(device, 0x10a1c4, process); in gt215_pmu_send()
67 nvkm_wr32(device, 0x10a1c4, message); in gt215_pmu_send()
68 nvkm_wr32(device, 0x10a1c4, data0); in gt215_pmu_send()
[all …]
Dgk104.c34 magic_(struct nvkm_device *device, u32 ctrl, int size) in magic_() argument
36 nvkm_wr32(device, 0x00c800, 0x00000000); in magic_()
37 nvkm_wr32(device, 0x00c808, 0x00000000); in magic_()
38 nvkm_wr32(device, 0x00c800, ctrl); in magic_()
39 nvkm_msec(device, 2000, in magic_()
40 if (nvkm_rd32(device, 0x00c800) & 0x40000000) { in magic_()
42 nvkm_wr32(device, 0x00c804, 0x00000000); in magic_()
46 nvkm_wr32(device, 0x00c800, 0x00000000); in magic_()
50 magic(struct nvkm_device *device, u32 ctrl) in magic() argument
52 magic_(device, 0x8000a41f | ctrl, 6); in magic()
[all …]
/drivers/staging/pi433/
Dpi433_if.c71 struct device *dev;
109 struct pi433_device *device; member
118 struct pi433_device *device = dev_id; in DIO0_irq_handler() local
120 if (device->irq_state[DIO0] == DIO_PACKET_SENT) { in DIO0_irq_handler()
121 device->free_in_fifo = FIFO_SIZE; in DIO0_irq_handler()
122 dev_dbg(device->dev, "DIO0 irq: Packet sent\n"); in DIO0_irq_handler()
123 wake_up_interruptible(&device->fifo_wait_queue); in DIO0_irq_handler()
124 } else if (device->irq_state[DIO0] == DIO_RSSI_DIO0) { in DIO0_irq_handler()
125 dev_dbg(device->dev, "DIO0 irq: RSSI level over threshold\n"); in DIO0_irq_handler()
126 wake_up_interruptible(&device->rx_wait_queue); in DIO0_irq_handler()
[all …]
/drivers/dma/
Ddmaengine.c154 static struct dma_chan *dev_to_dma_chan(struct device *dev) in dev_to_dma_chan()
158 chan_dev = container_of(dev, typeof(*chan_dev), device); in dev_to_dma_chan()
162 static ssize_t memcpy_count_show(struct device *dev, in memcpy_count_show()
184 static ssize_t bytes_transferred_show(struct device *dev, in bytes_transferred_show()
206 static ssize_t in_use_show(struct device *dev, struct device_attribute *attr, in in_use_show()
232 static void chan_dev_release(struct device *dev) in chan_dev_release()
236 chan_dev = container_of(dev, typeof(*chan_dev), device); in chan_dev_release()
304 int node = dev_to_node(chan->device->dev); in dma_chan_is_local()
322 struct dma_device *device; in min_chan() local
327 list_for_each_entry(device, &dma_device_list, global_node) { in min_chan()
[all …]
/drivers/gpu/host1x/
Dbus.c37 static int host1x_subdev_add(struct host1x_device *device, in host1x_subdev_add() argument
52 mutex_lock(&device->subdevs_lock); in host1x_subdev_add()
53 list_add_tail(&subdev->list, &device->subdevs); in host1x_subdev_add()
54 mutex_unlock(&device->subdevs_lock); in host1x_subdev_add()
60 err = host1x_subdev_add(device, driver, child); in host1x_subdev_add()
88 static int host1x_device_parse_dt(struct host1x_device *device, in host1x_device_parse_dt() argument
94 for_each_child_of_node(device->dev.parent->of_node, np) { in host1x_device_parse_dt()
97 err = host1x_subdev_add(device, driver, np); in host1x_device_parse_dt()
108 static void host1x_subdev_register(struct host1x_device *device, in host1x_subdev_register() argument
119 mutex_lock(&device->subdevs_lock); in host1x_subdev_register()
[all …]

12345678910>>...246