Home
last modified time | relevance | path

Searched refs:request (Results 1 – 25 of 624) sorted by relevance

12345678910>>...25

/drivers/s390/char/
Dtape_std.c37 struct tape_request * request; in tape_std_assign_timeout() local
41 request = (struct tape_request *) data; in tape_std_assign_timeout()
42 device = request->device; in tape_std_assign_timeout()
47 rc = tape_cancel_io(device, request); in tape_std_assign_timeout()
58 struct tape_request *request; in tape_std_assign() local
60 request = tape_alloc_request(2, 11); in tape_std_assign()
61 if (IS_ERR(request)) in tape_std_assign()
62 return PTR_ERR(request); in tape_std_assign()
64 request->op = TO_ASSIGN; in tape_std_assign()
65 tape_ccw_cc(request->cpaddr, ASSIGN, 11, request->cpdata); in tape_std_assign()
[all …]
Dtape_34xx.c55 static void __tape_34xx_medium_sense(struct tape_request *request) in __tape_34xx_medium_sense() argument
57 struct tape_device *device = request->device; in __tape_34xx_medium_sense()
60 if (request->rc == 0) { in __tape_34xx_medium_sense()
61 sense = request->cpdata; in __tape_34xx_medium_sense()
80 request->rc); in __tape_34xx_medium_sense()
81 tape_free_request(request); in __tape_34xx_medium_sense()
86 struct tape_request *request; in tape_34xx_medium_sense() local
89 request = tape_alloc_request(1, 32); in tape_34xx_medium_sense()
90 if (IS_ERR(request)) { in tape_34xx_medium_sense()
92 return PTR_ERR(request); in tape_34xx_medium_sense()
[all …]
Dtape_3590.c203 struct tape_request *request; in tape_3592_kekl_query() local
212 request = tape_alloc_request(2, sizeof(*order)); in tape_3592_kekl_query()
213 if (IS_ERR(request)) { in tape_3592_kekl_query()
214 rc = PTR_ERR(request); in tape_3592_kekl_query()
217 order = request->cpdata; in tape_3592_kekl_query()
221 request->op = TO_KEKL_QUERY; in tape_3592_kekl_query()
222 tape_ccw_cc(request->cpaddr, PERF_SUBSYS_FUNC, sizeof(*order), order); in tape_3592_kekl_query()
223 tape_ccw_end(request->cpaddr + 1, READ_SS_DATA, sizeof(*int_kekls), in tape_3592_kekl_query()
225 rc = tape_do_io(device, request); in tape_3592_kekl_query()
232 tape_free_request(request); in tape_3592_kekl_query()
[all …]
Dtape_core.c291 __tape_cancel_io(struct tape_device *device, struct tape_request *request) in __tape_cancel_io() argument
297 if (request->callback == NULL) in __tape_cancel_io()
302 rc = ccw_device_clear(device->cdev, (long) request); in __tape_cancel_io()
306 request->status = TAPE_REQUEST_DONE; in __tape_cancel_io()
309 request->status = TAPE_REQUEST_CANCEL; in __tape_cancel_io()
641 struct tape_request * request; in __tape_discard_requests() local
645 request = list_entry(l, struct tape_request, list); in __tape_discard_requests()
646 if (request->status == TAPE_REQUEST_IN_IO) in __tape_discard_requests()
647 request->status = TAPE_REQUEST_DONE; in __tape_discard_requests()
648 list_del(&request->list); in __tape_discard_requests()
[all …]
Dsclp_vt220.c99 static int __sclp_vt220_emit(struct sclp_vt220_request *request);
120 sclp_vt220_process_queue(struct sclp_vt220_request *request) in sclp_vt220_process_queue() argument
127 page = request->sclp_req.sccb; in sclp_vt220_process_queue()
130 list_del(&request->list); in sclp_vt220_process_queue()
133 request = NULL; in sclp_vt220_process_queue()
135 request = list_entry(sclp_vt220_outqueue.next, in sclp_vt220_process_queue()
137 if (!request || sclp_vt220_suspended) { in sclp_vt220_process_queue()
143 } while (__sclp_vt220_emit(request)); in sclp_vt220_process_queue()
144 if (request == NULL && sclp_vt220_flush_later) in sclp_vt220_process_queue()
156 sclp_vt220_callback(struct sclp_req *request, void *data) in sclp_vt220_callback() argument
[all …]
Dsclp_sdias.c52 static void sdias_callback(struct sclp_req *request, void *data) in sdias_callback() argument
106 struct sclp_req request; in sclp_sdias_blk_count() local
112 memset(&request, 0, sizeof(request)); in sclp_sdias_blk_count()
122 request.sccb = &sccb; in sclp_sdias_blk_count()
123 request.command = SCLP_CMDW_WRITE_EVENT_DATA; in sclp_sdias_blk_count()
124 request.status = SCLP_REQ_FILLED; in sclp_sdias_blk_count()
125 request.callback = sdias_callback; in sclp_sdias_blk_count()
127 rc = sdias_sclp_send(&request); in sclp_sdias_blk_count()
165 struct sclp_req request; in sclp_sdias_copy() local
171 memset(&request, 0, sizeof(request)); in sclp_sdias_copy()
[all …]
Dsclp_async.c25 static struct sclp_req *request; variable
132 request->command = SCLP_CMDW_WRITE_EVENT_DATA; in sclp_async_send_wait()
133 request->sccb = sccb; in sclp_async_send_wait()
134 request->status = SCLP_REQ_FILLED; in sclp_async_send_wait()
145 rc = sclp_add_request(request); in sclp_async_send_wait()
149 while (request->status != SCLP_REQ_DONE && in sclp_async_send_wait()
150 request->status != SCLP_REQ_FAILED) { in sclp_async_send_wait()
154 if (request->status != SCLP_REQ_DONE) in sclp_async_send_wait()
157 request->sccb)->header.response_code; in sclp_async_send_wait()
179 request = kzalloc(sizeof(struct sclp_req), GFP_KERNEL); in sclp_async_init()
[all …]
/drivers/gpu/drm/radeon/
Dradeon_ioc32.c137 drm_radeon_stipple_t __user *request; in compat_radeon_cp_stipple() local
143 request = compat_alloc_user_space(sizeof(*request)); in compat_radeon_cp_stipple()
144 if (!access_ok(VERIFY_WRITE, request, sizeof(*request)) in compat_radeon_cp_stipple()
146 &request->mask)) in compat_radeon_cp_stipple()
149 return drm_ioctl(file, DRM_IOCTL_RADEON_STIPPLE, (unsigned long)request); in compat_radeon_cp_stipple()
171 drm_radeon_texture_t __user *request; in compat_radeon_cp_texture() local
183 request = compat_alloc_user_space(sizeof(*request) + sizeof(*image)); in compat_radeon_cp_texture()
184 if (!access_ok(VERIFY_WRITE, request, in compat_radeon_cp_texture()
185 sizeof(*request) + sizeof(*image))) in compat_radeon_cp_texture()
187 image = (drm_radeon_tex_image_t __user *) (request + 1); in compat_radeon_cp_texture()
[all …]
/drivers/staging/lustre/lustre/ptlrpc/
Dniobuf.c484 int ptl_send_rpc(struct ptlrpc_request *request, int noreply) in ptl_send_rpc() argument
492 struct obd_device *obd = request->rq_import->imp_obd; in ptl_send_rpc()
497 LASSERT(request->rq_type == PTL_RPC_MSG_REQUEST); in ptl_send_rpc()
498 LASSERT(request->rq_wait_ctx == 0); in ptl_send_rpc()
502 LASSERT(!request->rq_receiving_reply); in ptl_send_rpc()
503 LASSERT(!((lustre_msg_get_flags(request->rq_reqmsg) & MSG_REPLAY) && in ptl_send_rpc()
504 (request->rq_import->imp_state == LUSTRE_IMP_FULL))); in ptl_send_rpc()
510 spin_lock(&request->rq_lock); in ptl_send_rpc()
511 request->rq_err = 1; in ptl_send_rpc()
512 spin_unlock(&request->rq_lock); in ptl_send_rpc()
[all …]
Dclient.c542 struct ptlrpc_request *request; in ptlrpc_prep_req_from_pool() local
561 request = list_entry(pool->prp_req_list.next, struct ptlrpc_request, in ptlrpc_prep_req_from_pool()
563 list_del_init(&request->rq_list); in ptlrpc_prep_req_from_pool()
566 LASSERT(request->rq_reqbuf); in ptlrpc_prep_req_from_pool()
567 LASSERT(request->rq_pool); in ptlrpc_prep_req_from_pool()
569 reqbuf = request->rq_reqbuf; in ptlrpc_prep_req_from_pool()
570 memset(request, 0, sizeof(*request)); in ptlrpc_prep_req_from_pool()
571 request->rq_reqbuf = reqbuf; in ptlrpc_prep_req_from_pool()
572 request->rq_reqbuf_len = pool->prp_rq_size; in ptlrpc_prep_req_from_pool()
573 request->rq_pool = pool; in ptlrpc_prep_req_from_pool()
[all …]
/drivers/usb/musb/
Dmusb_gadget.c56 static inline void map_dma_buffer(struct musb_request *request, in map_dma_buffer() argument
62 request->map_state = UN_MAPPED; in map_dma_buffer()
73 musb_ep->packet_sz, request->request.buf, in map_dma_buffer()
74 request->request.length); in map_dma_buffer()
78 if (request->request.dma == DMA_ADDR_INVALID) { in map_dma_buffer()
84 request->request.buf, in map_dma_buffer()
85 request->request.length, in map_dma_buffer()
86 request->tx in map_dma_buffer()
93 request->request.dma = dma_addr; in map_dma_buffer()
94 request->map_state = MUSB_MAPPED; in map_dma_buffer()
[all …]
Dmusb_gadget_ep0.c265 struct musb_request *request; in service_zero_data_request() local
308 request = next_request(musb_ep); in service_zero_data_request()
309 if (!musb_ep->busy && request) { in service_zero_data_request()
311 musb_ep_restart(musb, request); in service_zero_data_request()
492 struct musb_request *request; in ep0_rxstate() local
496 request = next_ep0_request(musb); in ep0_rxstate()
497 req = &request->request; in ep0_rxstate()
550 struct usb_request *request; in ep0_txstate() local
561 request = &req->request; in ep0_txstate()
564 fifo_src = (u8 *) request->buf + request->actual; in ep0_txstate()
[all …]
/drivers/gpu/drm/
Ddrm_ioc32.c440 struct drm_buf_info __user *request; in compat_drm_infobufs() local
457 nbytes = sizeof(*request) + count * sizeof(struct drm_buf_desc); in compat_drm_infobufs()
458 request = compat_alloc_user_space(nbytes); in compat_drm_infobufs()
459 if (!request) in compat_drm_infobufs()
461 list = (struct drm_buf_desc *) (request + 1); in compat_drm_infobufs()
463 if (__put_user(count, &request->count) in compat_drm_infobufs()
464 || __put_user(list, &request->list)) in compat_drm_infobufs()
467 err = drm_ioctl(file, DRM_IOCTL_INFO_BUFS, (unsigned long)request); in compat_drm_infobufs()
471 if (__get_user(actual, &request->count)) in compat_drm_infobufs()
504 struct drm_buf_map __user *request; in compat_drm_mapbufs() local
[all …]
Ddrm_agpsupport.c197 int drm_agp_alloc(struct drm_device *dev, struct drm_agp_buffer *request) in drm_agp_alloc() argument
209 pages = (request->size + PAGE_SIZE - 1) / PAGE_SIZE; in drm_agp_alloc()
210 type = (u32) request->type; in drm_agp_alloc()
222 request->handle = entry->handle; in drm_agp_alloc()
223 request->physical = memory->physical; in drm_agp_alloc()
233 struct drm_agp_buffer *request = data; in drm_agp_alloc_ioctl() local
235 return drm_agp_alloc(dev, request); in drm_agp_alloc_ioctl()
271 int drm_agp_unbind(struct drm_device *dev, struct drm_agp_binding *request) in drm_agp_unbind() argument
278 if (!(entry = drm_agp_lookup_entry(dev, request->handle))) in drm_agp_unbind()
293 struct drm_agp_binding *request = data; in drm_agp_unbind_ioctl() local
[all …]
Ddrm_bufs.c517 struct drm_map *request = data; in drm_legacy_rmmap_ioctl() local
525 r_list->user_token == (unsigned long)request->handle && in drm_legacy_rmmap_ioctl()
600 struct drm_buf_desc *request) in drm_legacy_addbufs_agp() argument
621 count = request->count; in drm_legacy_addbufs_agp()
622 order = order_base_2(request->size); in drm_legacy_addbufs_agp()
625 alignment = (request->flags & _DRM_PAGE_ALIGN) in drm_legacy_addbufs_agp()
631 agp_offset = dev->agp->base + request->agp_start; in drm_legacy_addbufs_agp()
752 request->count = entry->buf_count; in drm_legacy_addbufs_agp()
753 request->size = size; in drm_legacy_addbufs_agp()
764 struct drm_buf_desc *request) in drm_legacy_addbufs_pci() argument
[all …]
/drivers/net/hyperv/
Drndis_filter.c81 struct rndis_request *request; in get_rndis_request() local
86 request = kzalloc(sizeof(struct rndis_request), GFP_KERNEL); in get_rndis_request()
87 if (!request) in get_rndis_request()
90 init_completion(&request->wait_event); in get_rndis_request()
92 rndis_msg = &request->request_msg; in get_rndis_request()
96 request->pkt.q_idx = 0; in get_rndis_request()
108 list_add_tail(&request->list_ent, &dev->req_list); in get_rndis_request()
111 return request; in get_rndis_request()
248 struct rndis_request *request) in rndis_set_link_state() argument
253 query_complete = &request->response_msg.msg.query_complete; in rndis_set_link_state()
[all …]
/drivers/staging/rtl8712/
Dusb_ops.c39 u8 request; in usb_read8() local
47 request = 0x05; in usb_read8()
52 r8712_usbctrl_vendorreq(pintfpriv, request, wvalue, index, &data, len, in usb_read8()
59 u8 request; in usb_read16() local
67 request = 0x05; in usb_read16()
72 r8712_usbctrl_vendorreq(pintfpriv, request, wvalue, index, &data, len, in usb_read16()
79 u8 request; in usb_read32() local
87 request = 0x05; in usb_read32()
92 r8712_usbctrl_vendorreq(pintfpriv, request, wvalue, index, &data, len, in usb_read32()
99 u8 request; in usb_write8() local
[all …]
/drivers/firewire/
Dcore-transaction.c633 struct fw_request *request; in free_response_callback() local
635 request = container_of(packet, struct fw_request, response); in free_response_callback()
636 kfree(request); in free_response_callback()
751 struct fw_request *request; in allocate_request() local
784 request = kmalloc(sizeof(*request) + length, GFP_ATOMIC); in allocate_request()
785 if (request == NULL) in allocate_request()
788 request->response.speed = p->speed; in allocate_request()
789 request->response.timestamp = in allocate_request()
791 request->response.generation = p->generation; in allocate_request()
792 request->response.ack = 0; in allocate_request()
[all …]
/drivers/s390/cio/
Dchsc_sch.c60 struct chsc_request *request = private->request; in chsc_subchannel_irq() local
68 if (!request) { in chsc_subchannel_irq()
73 private->request = NULL; in chsc_subchannel_irq()
74 memcpy(&request->irb, irb, sizeof(*irb)); in chsc_subchannel_irq()
76 complete(&request->completion); in chsc_subchannel_irq()
114 if (private->request) { in chsc_subchannel_remove()
115 complete(&private->request->completion); in chsc_subchannel_remove()
243 struct chsc_request *request) in chsc_async() argument
255 if (private->request) { in chsc_async()
273 private->request = request; in chsc_async()
[all …]
/drivers/infiniband/hw/mthca/
Dmthca_profile.c65 struct mthca_profile *request, in mthca_make_profile() argument
99 profile[MTHCA_RES_UARC].size = request->uarc_size; in mthca_make_profile()
101 profile[MTHCA_RES_QP].num = request->num_qp; in mthca_make_profile()
102 profile[MTHCA_RES_SRQ].num = request->num_srq; in mthca_make_profile()
103 profile[MTHCA_RES_EQP].num = request->num_qp; in mthca_make_profile()
104 profile[MTHCA_RES_RDB].num = request->num_qp * request->rdb_per_qp; in mthca_make_profile()
105 profile[MTHCA_RES_CQ].num = request->num_cq; in mthca_make_profile()
107 profile[MTHCA_RES_MCG].num = request->num_mcg; in mthca_make_profile()
108 profile[MTHCA_RES_MPT].num = request->num_mpt; in mthca_make_profile()
109 profile[MTHCA_RES_MTT].num = request->num_mtt; in mthca_make_profile()
[all …]
/drivers/scsi/
Dstorvsc_drv.c617 struct storvsc_cmd_request *request; in handle_multichannel_storage() local
636 request = &stor_device->init_request; in handle_multichannel_storage()
637 vstor_packet = &request->vstor_packet; in handle_multichannel_storage()
657 memset(request, 0, sizeof(struct storvsc_cmd_request)); in handle_multichannel_storage()
658 init_completion(&request->wait_event); in handle_multichannel_storage()
666 (unsigned long)request, in handle_multichannel_storage()
673 t = wait_for_completion_timeout(&request->wait_event, 10*HZ); in handle_multichannel_storage()
692 struct storvsc_cmd_request *request; in storvsc_channel_init() local
702 request = &stor_device->init_request; in storvsc_channel_init()
703 vstor_packet = &request->vstor_packet; in storvsc_channel_init()
[all …]
/drivers/gpu/drm/i915/
Di915_ioc32.c52 drm_i915_getparam_t __user *request; in compat_i915_getparam() local
57 request = compat_alloc_user_space(sizeof(*request)); in compat_i915_getparam()
58 if (!access_ok(VERIFY_WRITE, request, sizeof(*request)) in compat_i915_getparam()
59 || __put_user(req32.param, &request->param) in compat_i915_getparam()
61 &request->value)) in compat_i915_getparam()
65 (unsigned long)request); in compat_i915_getparam()
/drivers/isdn/hardware/eicon/
Ddadapter.c112 if (d->request) { in diva_didd_add_descriptor()
113 MAdapter.request = d->request; in diva_didd_add_descriptor()
114 dprintf = (DIVA_DI_PRINTF)d->request; in diva_didd_add_descriptor()
116 DBG_TRC(("DIMAINT registered, dprintf=%08x", d->request)) in diva_didd_add_descriptor()
120 MAdapter.request = (IDI_CALL)no_printf; in diva_didd_add_descriptor()
132 DBG_TRC(("Add adapter[%d], request=%08x", (i + 1), d->request)) in diva_didd_add_descriptor()
145 static int diva_didd_remove_descriptor(IDI_CALL request) { in diva_didd_remove_descriptor() argument
148 if (request == MAdapter.request) { in diva_didd_remove_descriptor()
152 MAdapter.request = (IDI_CALL)no_printf; in diva_didd_remove_descriptor()
156 if (HandleTable[i].request == request) { in diva_didd_remove_descriptor()
[all …]
/drivers/s390/block/
Dscm_blk.c46 kfree(scmrq->request); in __scm_free_rq()
80 scmrq->request = kcalloc(nr_requests_per_io, sizeof(scmrq->request[0]), in __scm_alloc_rq()
82 if (!scmrq->request) in __scm_alloc_rq()
134 for (i = 0; i < nr_requests_per_io && scmrq->request[i]; i++) { in scm_request_done()
148 static bool scm_permit_request(struct scm_blk_dev *bdev, struct request *req) in scm_permit_request()
185 int pos = scmrq->aob->request.msb_count; in scm_request_prepare()
187 struct request *req = scmrq->request[pos]; in scm_request_prepare()
197 scmrq->aob->request.msb_count++; in scm_request_prepare()
215 struct request *req) in scm_request_set()
217 scmrq->request[scmrq->aob->request.msb_count] = req; in scm_request_set()
[all …]
Dscm_blk_cluster.c60 static bool clusters_intersect(struct request *A, struct request *B) in clusters_intersect()
77 struct request *req = scmrq->request[scmrq->aob->request.msb_count]; in scm_reserve_cluster()
95 for (pos = 0; pos < iter->aob->request.msb_count; pos++) { in scm_reserve_cluster()
96 if (clusters_intersect(req, iter->request[pos]) && in scm_reserve_cluster()
98 rq_data_dir(iter->request[pos]) == WRITE)) { in scm_reserve_cluster()
134 struct request *req = scmrq->request[0]; in scm_prepare_cluster_request()
164 scmrq->aob->request.msb_count = 1; in scm_prepare_cluster_request()
199 int pos = scmrq->aob->request.msb_count; in scm_need_cluster_request()
201 if (rq_data_dir(scmrq->request[pos]) == READ) in scm_need_cluster_request()
204 return blk_rq_bytes(scmrq->request[pos]) < CLUSTER_SIZE; in scm_need_cluster_request()

12345678910>>...25