Home
last modified time | relevance | path

Searched refs:device (Results 1 – 25 of 4863) sorted by relevance

12345678910>>...195

/drivers/block/drbd/
Ddrbd_actlog.c94 void *drbd_md_get_buffer(struct drbd_device *device, const char *intent) in drbd_md_get_buffer() argument
98 wait_event(device->misc_wait, in drbd_md_get_buffer()
99 (r = atomic_cmpxchg(&device->md_io.in_use, 0, 1)) == 0 || in drbd_md_get_buffer()
100 device->state.disk <= D_FAILED); in drbd_md_get_buffer()
105 device->md_io.current_use = intent; in drbd_md_get_buffer()
106 device->md_io.start_jif = jiffies; in drbd_md_get_buffer()
107 device->md_io.submit_jif = device->md_io.start_jif - 1; in drbd_md_get_buffer()
108 return page_address(device->md_io.page); in drbd_md_get_buffer()
111 void drbd_md_put_buffer(struct drbd_device *device) in drbd_md_put_buffer() argument
113 if (atomic_dec_and_test(&device->md_io.in_use)) in drbd_md_put_buffer()
[all …]
Ddrbd_worker.c63 struct drbd_device *device; in drbd_md_endio() local
65 device = bio->bi_private; in drbd_md_endio()
66 device->md_io.error = blk_status_to_errno(bio->bi_status); in drbd_md_endio()
69 if (device->ldev) in drbd_md_endio()
70 put_ldev(device); in drbd_md_endio()
84 drbd_md_put_buffer(device); in drbd_md_endio()
85 device->md_io.done = 1; in drbd_md_endio()
86 wake_up(&device->misc_wait); in drbd_md_endio()
96 struct drbd_device *device = peer_device->device; in drbd_endio_read_sec_final() local
98 spin_lock_irqsave(&device->resource->req_lock, flags); in drbd_endio_read_sec_final()
[all …]
/drivers/s390/char/
Dtape_core.c93 tape_medium_state_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_medium_state_show()
105 tape_first_minor_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_first_minor_show()
117 tape_state_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_state_show()
130 tape_operation_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_operation_show()
157 tape_blocksize_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_blocksize_show()
186 tape_state_set(struct tape_device *device, enum tape_state newstate) in tape_state_set() argument
190 if (device->tape_state == TS_NOT_OPER) { in tape_state_set()
194 DBF_EVENT(4, "ts. dev: %x\n", device->first_minor); in tape_state_set()
196 if (device->tape_state < TS_SIZE && device->tape_state >=0 ) in tape_state_set()
197 str = tape_state_verbose[device->tape_state]; in tape_state_set()
[all …]
Dtape_char.c64 tapechar_setup_device(struct tape_device * device) in tapechar_setup_device() argument
68 sprintf(device_name, "ntibm%i", device->first_minor / 2); in tapechar_setup_device()
69 device->nt = register_tape_dev( in tapechar_setup_device()
70 &device->cdev->dev, in tapechar_setup_device()
71 MKDEV(tapechar_major, device->first_minor), in tapechar_setup_device()
77 device->rt = register_tape_dev( in tapechar_setup_device()
78 &device->cdev->dev, in tapechar_setup_device()
79 MKDEV(tapechar_major, device->first_minor + 1), in tapechar_setup_device()
89 tapechar_cleanup_device(struct tape_device *device) in tapechar_cleanup_device() argument
91 unregister_tape_dev(&device->cdev->dev, device->rt); in tapechar_cleanup_device()
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dnv40.c36 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv40_gr_units()
47 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 20, align, in nv40_gr_object_bind()
79 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv40_gr_chan_bind()
84 nv40_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv40_gr_chan_bind()
97 struct nvkm_device *device = subdev->device; in nv40_gr_chan_fini() local
101 nvkm_mask(device, 0x400720, 0x00000001, 0x00000000); in nv40_gr_chan_fini()
103 if (nvkm_rd32(device, 0x40032c) == inst) { in nv40_gr_chan_fini()
105 nvkm_wr32(device, 0x400720, 0x00000000); in nv40_gr_chan_fini()
106 nvkm_wr32(device, 0x400784, inst); in nv40_gr_chan_fini()
107 nvkm_mask(device, 0x400310, 0x00000020, 0x00000020); in nv40_gr_chan_fini()
[all …]
Dnv50.c35 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv50_gr_units()
46 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, in nv50_gr_object_bind()
73 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv50_gr_chan_bind()
77 nv50_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv50_gr_chan_bind()
243 struct nvkm_device *device = subdev->device; in nv50_gr_prop_trap() local
244 u32 e0c = nvkm_rd32(device, ustatus_addr + 0x04); in nv50_gr_prop_trap()
245 u32 e10 = nvkm_rd32(device, ustatus_addr + 0x08); in nv50_gr_prop_trap()
246 u32 e14 = nvkm_rd32(device, ustatus_addr + 0x0c); in nv50_gr_prop_trap()
247 u32 e18 = nvkm_rd32(device, ustatus_addr + 0x10); in nv50_gr_prop_trap()
248 u32 e1c = nvkm_rd32(device, ustatus_addr + 0x14); in nv50_gr_prop_trap()
[all …]
Dgm200.c38 return nvkm_rd32(gr->base.engine.subdev.device, 0x12006c); in gm200_gr_rops()
44 struct nvkm_device *device = gr->base.engine.subdev.device; in gm200_gr_init_gpc_mmu() local
46 nvkm_wr32(device, 0x418880, nvkm_rd32(device, 0x100c80) & 0xf0001fff); in gm200_gr_init_gpc_mmu()
47 nvkm_wr32(device, 0x418890, 0x00000000); in gm200_gr_init_gpc_mmu()
48 nvkm_wr32(device, 0x418894, 0x00000000); in gm200_gr_init_gpc_mmu()
50 nvkm_wr32(device, 0x4188b4, nvkm_rd32(device, 0x100cc8)); in gm200_gr_init_gpc_mmu()
51 nvkm_wr32(device, 0x4188b8, nvkm_rd32(device, 0x100ccc)); in gm200_gr_init_gpc_mmu()
52 nvkm_wr32(device, 0x4188b0, nvkm_rd32(device, 0x100cc4)); in gm200_gr_init_gpc_mmu()
58 struct nvkm_device *device = gr->base.engine.subdev.device; in gm200_gr_init_rop_active_fbps() local
59 const u32 fbp_count = nvkm_rd32(device, 0x12006c); in gm200_gr_init_rop_active_fbps()
[all …]
Dnv20.c34 struct nvkm_device *device = gr->base.engine.subdev.device; in nv20_gr_chan_fini() local
38 nvkm_mask(device, 0x400720, 0x00000001, 0x00000000); in nv20_gr_chan_fini()
39 if (nvkm_rd32(device, 0x400144) & 0x00010000) in nv20_gr_chan_fini()
40 chid = (nvkm_rd32(device, 0x400148) & 0x1f000000) >> 24; in nv20_gr_chan_fini()
42 nvkm_wr32(device, 0x400784, inst >> 4); in nv20_gr_chan_fini()
43 nvkm_wr32(device, 0x400788, 0x00000002); in nv20_gr_chan_fini()
44 nvkm_msec(device, 2000, in nv20_gr_chan_fini()
45 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_chan_fini()
48 nvkm_wr32(device, 0x400144, 0x10000000); in nv20_gr_chan_fini()
49 nvkm_mask(device, 0x400148, 0xff000000, 0x1f000000); in nv20_gr_chan_fini()
[all …]
Dgp100.c36 struct nvkm_device *device = gr->base.engine.subdev.device; in gp100_gr_init_rop_active_fbps() local
38 const u32 fbp_count = nvkm_rd32(device, 0x12006c) & 0x0000000f; in gp100_gr_init_rop_active_fbps()
39 nvkm_mask(device, 0x408850, 0x0000000f, fbp_count); /* zrop */ in gp100_gr_init_rop_active_fbps()
40 nvkm_mask(device, 0x408958, 0x0000000f, fbp_count); /* crop */ in gp100_gr_init_rop_active_fbps()
46 struct nvkm_device *device = gr->base.engine.subdev.device; in gp100_gr_init_num_active_ltcs() local
48 nvkm_wr32(device, GPC_BCAST(0x08ac), nvkm_rd32(device, 0x100800)); in gp100_gr_init_num_active_ltcs()
49 nvkm_wr32(device, GPC_BCAST(0x033c), nvkm_rd32(device, 0x100804)); in gp100_gr_init_num_active_ltcs()
55 struct nvkm_device *device = gr->base.engine.subdev.device; in gp100_gr_init() local
66 nvkm_wr32(device, GPC_UNIT(0, 0x3018), 0x00000001); in gp100_gr_init()
79 nvkm_wr32(device, GPC_BCAST(0x0980), data[0]); in gp100_gr_init()
[all …]
Dgk104.c185 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_gr_init_rop_active_fbps() local
186 const u32 fbp_count = nvkm_rd32(device, 0x120074); in gk104_gr_init_rop_active_fbps()
187 nvkm_mask(device, 0x408850, 0x0000000f, fbp_count); /* zrop */ in gk104_gr_init_rop_active_fbps()
188 nvkm_mask(device, 0x408958, 0x0000000f, fbp_count); /* crop */ in gk104_gr_init_rop_active_fbps()
194 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_gr_init_ppc_exceptions() local
201 nvkm_wr32(device, PPC_UNIT(gpc, ppc, 0x038), 0xc0000000); in gk104_gr_init_ppc_exceptions()
209 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_gr_init() local
210 struct nvkm_fb *fb = device->fb; in gk104_gr_init()
217 nvkm_wr32(device, GPC_BCAST(0x0880), 0x00000000); in gk104_gr_init()
218 nvkm_wr32(device, GPC_BCAST(0x08a4), 0x00000000); in gk104_gr_init()
[all …]
Dgf100.c49 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_gr_zbc_clear_color() local
51 nvkm_wr32(device, 0x405804, gr->zbc_color[zbc].ds[0]); in gf100_gr_zbc_clear_color()
52 nvkm_wr32(device, 0x405808, gr->zbc_color[zbc].ds[1]); in gf100_gr_zbc_clear_color()
53 nvkm_wr32(device, 0x40580c, gr->zbc_color[zbc].ds[2]); in gf100_gr_zbc_clear_color()
54 nvkm_wr32(device, 0x405810, gr->zbc_color[zbc].ds[3]); in gf100_gr_zbc_clear_color()
56 nvkm_wr32(device, 0x405814, gr->zbc_color[zbc].format); in gf100_gr_zbc_clear_color()
57 nvkm_wr32(device, 0x405820, zbc); in gf100_gr_zbc_clear_color()
58 nvkm_wr32(device, 0x405824, 0x00000004); /* TRIGGER | WRITE | COLOR */ in gf100_gr_zbc_clear_color()
65 struct nvkm_ltc *ltc = gr->base.engine.subdev.device->ltc; in gf100_gr_zbc_color_get()
100 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_gr_zbc_clear_depth() local
[all …]
Dnv04.c445 nv04_gr_set_ctx1(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx1() argument
447 int subc = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7; in nv04_gr_set_ctx1()
450 tmp = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx1()
453 nvkm_wr32(device, 0x700000 + inst, tmp); in nv04_gr_set_ctx1()
455 nvkm_wr32(device, NV04_PGRAPH_CTX_SWITCH1, tmp); in nv04_gr_set_ctx1()
456 nvkm_wr32(device, NV04_PGRAPH_CTX_CACHE1 + (subc << 2), tmp); in nv04_gr_set_ctx1()
460 nv04_gr_set_ctx_val(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx_val() argument
465 ctx1 = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx_val()
469 tmp = nvkm_rd32(device, 0x70000c + inst); in nv04_gr_set_ctx_val()
472 nvkm_wr32(device, 0x70000c + inst, tmp); in nv04_gr_set_ctx_val()
[all …]
/drivers/firewire/
Dcore-device.c171 static const struct ieee1394_device_id *unit_match(struct device *dev, in unit_match()
187 static bool is_fw_unit(struct device *dev);
189 static int fw_unit_match(struct device *dev, struct device_driver *drv) in fw_unit_match()
195 static int fw_unit_probe(struct device *dev) in fw_unit_probe()
203 static int fw_unit_remove(struct device *dev) in fw_unit_remove()
222 static int fw_unit_uevent(struct device *dev, struct kobj_uevent_env *env) in fw_unit_uevent()
243 int fw_device_enable_phys_dma(struct fw_device *device) in fw_device_enable_phys_dma() argument
245 int generation = device->generation; in fw_device_enable_phys_dma()
250 return device->card->driver->enable_phys_dma(device->card, in fw_device_enable_phys_dma()
251 device->node_id, in fw_device_enable_phys_dma()
[all …]
/drivers/base/power/
Dpower.h4 static inline void device_pm_init_common(struct device *dev) in device_pm_init_common()
15 static inline void pm_runtime_early_init(struct device *dev) in pm_runtime_early_init()
21 extern void pm_runtime_init(struct device *dev);
22 extern void pm_runtime_reinit(struct device *dev);
23 extern void pm_runtime_remove(struct device *dev);
31 struct device *dev;
38 extern void dev_pm_enable_wake_irq_check(struct device *dev,
40 extern void dev_pm_disable_wake_irq_check(struct device *dev);
44 extern int device_wakeup_attach_irq(struct device *dev,
46 extern void device_wakeup_detach_irq(struct device *dev);
[all …]
/drivers/s390/block/
Ddasd.c94 struct dasd_device *device; in dasd_alloc_device() local
96 device = kzalloc(sizeof(struct dasd_device), GFP_ATOMIC); in dasd_alloc_device()
97 if (!device) in dasd_alloc_device()
101 device->ccw_mem = (void *) __get_free_pages(GFP_ATOMIC | GFP_DMA, 1); in dasd_alloc_device()
102 if (!device->ccw_mem) { in dasd_alloc_device()
103 kfree(device); in dasd_alloc_device()
107 device->erp_mem = (void *) get_zeroed_page(GFP_ATOMIC | GFP_DMA); in dasd_alloc_device()
108 if (!device->erp_mem) { in dasd_alloc_device()
109 free_pages((unsigned long) device->ccw_mem, 1); in dasd_alloc_device()
110 kfree(device); in dasd_alloc_device()
[all …]
Ddasd_devmap.c51 struct dasd_device *device; member
430 new->device = NULL; in dasd_add_busid()
484 BUG_ON(devmap->device != NULL); in dasd_forget_ranges()
499 struct dasd_device *device; in dasd_device_from_devindex() local
511 if (devmap && devmap->device) { in dasd_device_from_devindex()
512 device = devmap->device; in dasd_device_from_devindex()
513 dasd_get_device(device); in dasd_device_from_devindex()
515 device = ERR_PTR(-ENODEV); in dasd_device_from_devindex()
517 return device; in dasd_device_from_devindex()
543 struct dasd_device *device; in dasd_create_device() local
[all …]
Ddasd_3990_erp.c74 struct dasd_device *device = erp->startdev; in dasd_3990_erp_block_queue() local
77 DBF_DEV_EVENT(DBF_INFO, device, in dasd_3990_erp_block_queue()
80 spin_lock_irqsave(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_block_queue()
81 dasd_device_set_stop_bits(device, DASD_STOPPED_PENDING); in dasd_3990_erp_block_queue()
82 spin_unlock_irqrestore(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_block_queue()
87 dasd_device_set_timer(device, expires); in dasd_3990_erp_block_queue()
106 struct dasd_device *device = erp->startdev; in dasd_3990_erp_int_req() local
119 dev_err(&device->cdev->dev, in dasd_3990_erp_int_req()
147 struct dasd_device *device = erp->startdev; in dasd_3990_erp_alternate_path() local
152 spin_lock_irqsave(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_alternate_path()
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/disp/
Dvga.c27 nvkm_rdport(struct nvkm_device *device, int head, u16 port) in nvkm_rdport() argument
29 if (device->card_type >= NV_50) in nvkm_rdport()
30 return nvkm_rd08(device, 0x601000 + port); in nvkm_rdport()
35 return nvkm_rd08(device, 0x601000 + (head * 0x2000) + port); in nvkm_rdport()
40 if (device->card_type < NV_40) in nvkm_rdport()
42 return nvkm_rd08(device, 0x0c0000 + (head * 0x2000) + port); in nvkm_rdport()
49 nvkm_wrport(struct nvkm_device *device, int head, u16 port, u8 data) in nvkm_wrport() argument
51 if (device->card_type >= NV_50) in nvkm_wrport()
52 nvkm_wr08(device, 0x601000 + port, data); in nvkm_wrport()
57 nvkm_wr08(device, 0x601000 + (head * 0x2000) + port, data); in nvkm_wrport()
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dnv04.c52 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_pause() local
58 nvkm_wr32(device, NV03_PFIFO_CACHES, 0x00000000); in nv04_fifo_pause()
59 nvkm_mask(device, NV04_PFIFO_CACHE1_PULL0, 0x00000001, 0x00000000); in nv04_fifo_pause()
70 nvkm_msec(device, 2000, in nv04_fifo_pause()
71 u32 tmp = nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0); in nv04_fifo_pause()
76 if (nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0) & in nv04_fifo_pause()
78 nvkm_wr32(device, NV03_PFIFO_INTR_0, NV_PFIFO_INTR_CACHE_ERROR); in nv04_fifo_pause()
80 nvkm_wr32(device, NV04_PFIFO_CACHE1_HASH, 0x00000000); in nv04_fifo_pause()
88 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_start() local
91 nvkm_mask(device, NV04_PFIFO_CACHE1_PULL0, 0x00000001, 0x00000001); in nv04_fifo_start()
[all …]
/drivers/infiniband/core/
Ddevice.c97 static int ib_device_check_mandatory(struct ib_device *device) in ib_device_check_mandatory() argument
128 if (!*(void **) ((void *) device + mandatory_table[i].offset)) { in ib_device_check_mandatory()
130 device->name, mandatory_table[i].name); in ib_device_check_mandatory()
140 struct ib_device *device; in __ib_device_get_by_index() local
142 list_for_each_entry(device, &device_list, core_list) in __ib_device_get_by_index()
143 if (device->index == index) in __ib_device_get_by_index()
144 return device; in __ib_device_get_by_index()
154 struct ib_device *device; in ib_device_get_by_index() local
157 device = __ib_device_get_by_index(index); in ib_device_get_by_index()
158 if (device) in ib_device_get_by_index()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
Dgt215.c34 struct nvkm_device *device = subdev->device; in gt215_pmu_send() local
39 addr = nvkm_rd32(device, 0x10a4a0); in gt215_pmu_send()
40 if (nvkm_msec(device, 2000, in gt215_pmu_send()
41 u32 tmp = nvkm_rd32(device, 0x10a4b0); in gt215_pmu_send()
60 nvkm_wr32(device, 0x10a580, 0x00000001); in gt215_pmu_send()
61 } while (nvkm_rd32(device, 0x10a580) != 0x00000001); in gt215_pmu_send()
64 nvkm_wr32(device, 0x10a1c0, 0x01000000 | (((addr & 0x07) << 4) + in gt215_pmu_send()
66 nvkm_wr32(device, 0x10a1c4, process); in gt215_pmu_send()
67 nvkm_wr32(device, 0x10a1c4, message); in gt215_pmu_send()
68 nvkm_wr32(device, 0x10a1c4, data0); in gt215_pmu_send()
[all …]
Dgk104.c34 magic_(struct nvkm_device *device, u32 ctrl, int size) in magic_() argument
36 nvkm_wr32(device, 0x00c800, 0x00000000); in magic_()
37 nvkm_wr32(device, 0x00c808, 0x00000000); in magic_()
38 nvkm_wr32(device, 0x00c800, ctrl); in magic_()
39 nvkm_msec(device, 2000, in magic_()
40 if (nvkm_rd32(device, 0x00c800) & 0x40000000) { in magic_()
42 nvkm_wr32(device, 0x00c804, 0x00000000); in magic_()
46 nvkm_wr32(device, 0x00c800, 0x00000000); in magic_()
50 magic(struct nvkm_device *device, u32 ctrl) in magic() argument
52 magic_(device, 0x8000a41f | ctrl, 6); in magic()
[all …]
/drivers/gpu/host1x/
Dbus.c46 static int host1x_subdev_add(struct host1x_device *device, in host1x_subdev_add() argument
61 mutex_lock(&device->subdevs_lock); in host1x_subdev_add()
62 list_add_tail(&subdev->list, &device->subdevs); in host1x_subdev_add()
63 mutex_unlock(&device->subdevs_lock); in host1x_subdev_add()
69 err = host1x_subdev_add(device, driver, child); in host1x_subdev_add()
97 static int host1x_device_parse_dt(struct host1x_device *device, in host1x_device_parse_dt() argument
103 for_each_child_of_node(device->dev.parent->of_node, np) { in host1x_device_parse_dt()
106 err = host1x_subdev_add(device, driver, np); in host1x_device_parse_dt()
117 static void host1x_subdev_register(struct host1x_device *device, in host1x_subdev_register() argument
128 mutex_lock(&device->subdevs_lock); in host1x_subdev_register()
[all …]
/drivers/staging/pi433/
Dpi433_if.c79 struct device *dev;
118 struct pi433_device *device; member
134 struct pi433_device *device = dev_id; in DIO0_irq_handler() local
136 if (device->irq_state[DIO0] == DIO_PacketSent) in DIO0_irq_handler()
138 device->free_in_fifo = FIFO_SIZE; in DIO0_irq_handler()
140 wake_up_interruptible(&device->fifo_wait_queue); in DIO0_irq_handler()
142 else if (device->irq_state[DIO0] == DIO_Rssi_DIO0) in DIO0_irq_handler()
145 wake_up_interruptible(&device->rx_wait_queue); in DIO0_irq_handler()
147 else if (device->irq_state[DIO0] == DIO_PayloadReady) in DIO0_irq_handler()
150 device->free_in_fifo = 0; in DIO0_irq_handler()
[all …]
/drivers/hid/
Dhid-roccat.c48 struct device *dev;
65 struct roccat_device *device; member
80 struct roccat_device *device = reader->device; in roccat_read() local
85 mutex_lock(&device->cbuf_lock); in roccat_read()
88 if (reader->cbuf_start == device->cbuf_end) { in roccat_read()
89 add_wait_queue(&device->wait, &wait); in roccat_read()
93 while (reader->cbuf_start == device->cbuf_end) { in roccat_read()
102 if (!device->exist) { in roccat_read()
107 mutex_unlock(&device->cbuf_lock); in roccat_read()
109 mutex_lock(&device->cbuf_lock); in roccat_read()
[all …]

12345678910>>...195