/drivers/net/ethernet/brocade/bna/ |
D | bfa_ioc_ct.c | 51 static enum bfa_status bfa_ioc_ct_pll_init(void __iomem *rb, 53 static enum bfa_status bfa_ioc_ct2_pll_init(void __iomem *rb, 245 void __iomem *rb; in bfa_ioc_ct_reg_init() local 248 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct_reg_init() 250 ioc->ioc_regs.hfn_mbox = rb + ct_fnreg[pcifn].hfn_mbox; in bfa_ioc_ct_reg_init() 251 ioc->ioc_regs.lpu_mbox = rb + ct_fnreg[pcifn].lpu_mbox; in bfa_ioc_ct_reg_init() 252 ioc->ioc_regs.host_page_num_fn = rb + ct_fnreg[pcifn].hfn_pgn; in bfa_ioc_ct_reg_init() 255 ioc->ioc_regs.heartbeat = rb + BFA_IOC0_HBEAT_REG; in bfa_ioc_ct_reg_init() 256 ioc->ioc_regs.ioc_fwstate = rb + BFA_IOC0_STATE_REG; in bfa_ioc_ct_reg_init() 257 ioc->ioc_regs.alt_ioc_fwstate = rb + BFA_IOC1_STATE_REG; in bfa_ioc_ct_reg_init() [all …]
|
/drivers/scsi/bfa/ |
D | bfa_ioc_ct.c | 186 void __iomem *rb; in bfa_ioc_ct_reg_init() local 189 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct_reg_init() 191 ioc->ioc_regs.hfn_mbox = rb + ct_fnreg[pcifn].hfn_mbox; in bfa_ioc_ct_reg_init() 192 ioc->ioc_regs.lpu_mbox = rb + ct_fnreg[pcifn].lpu_mbox; in bfa_ioc_ct_reg_init() 193 ioc->ioc_regs.host_page_num_fn = rb + ct_fnreg[pcifn].hfn_pgn; in bfa_ioc_ct_reg_init() 196 ioc->ioc_regs.heartbeat = rb + BFA_IOC0_HBEAT_REG; in bfa_ioc_ct_reg_init() 197 ioc->ioc_regs.ioc_fwstate = rb + BFA_IOC0_STATE_REG; in bfa_ioc_ct_reg_init() 198 ioc->ioc_regs.alt_ioc_fwstate = rb + BFA_IOC1_STATE_REG; in bfa_ioc_ct_reg_init() 199 ioc->ioc_regs.hfn_mbox_cmd = rb + ct_p0reg[pcifn].hfn; in bfa_ioc_ct_reg_init() 200 ioc->ioc_regs.lpu_mbox_cmd = rb + ct_p0reg[pcifn].lpu; in bfa_ioc_ct_reg_init() [all …]
|
D | bfa_ioc_cb.c | 110 void __iomem *rb; in bfa_ioc_cb_reg_init() local 113 rb = bfa_ioc_bar0(ioc); in bfa_ioc_cb_reg_init() 115 ioc->ioc_regs.hfn_mbox = rb + iocreg_fnreg[pcifn].hfn_mbox; in bfa_ioc_cb_reg_init() 116 ioc->ioc_regs.lpu_mbox = rb + iocreg_fnreg[pcifn].lpu_mbox; in bfa_ioc_cb_reg_init() 117 ioc->ioc_regs.host_page_num_fn = rb + iocreg_fnreg[pcifn].hfn_pgn; in bfa_ioc_cb_reg_init() 120 ioc->ioc_regs.heartbeat = rb + BFA_IOC0_HBEAT_REG; in bfa_ioc_cb_reg_init() 121 ioc->ioc_regs.ioc_fwstate = rb + BFA_IOC0_STATE_REG; in bfa_ioc_cb_reg_init() 122 ioc->ioc_regs.alt_ioc_fwstate = rb + BFA_IOC1_STATE_REG; in bfa_ioc_cb_reg_init() 124 ioc->ioc_regs.heartbeat = (rb + BFA_IOC1_HBEAT_REG); in bfa_ioc_cb_reg_init() 125 ioc->ioc_regs.ioc_fwstate = (rb + BFA_IOC1_STATE_REG); in bfa_ioc_cb_reg_init() [all …]
|
/drivers/block/drbd/ |
D | drbd_interval.c | 11 struct drbd_interval *this = rb_entry(node, struct drbd_interval, rb); in interval_end() 27 if (node->rb.rb_left) { in compute_subtree_last() 28 sector_t left = interval_end(node->rb.rb_left); in compute_subtree_last() 32 if (node->rb.rb_right) { in compute_subtree_last() 33 sector_t right = interval_end(node->rb.rb_right); in compute_subtree_last() 40 static void augment_propagate(struct rb_node *rb, struct rb_node *stop) in augment_propagate() argument 42 while (rb != stop) { in augment_propagate() 43 struct drbd_interval *node = rb_entry(rb, struct drbd_interval, rb); in augment_propagate() 48 rb = rb_parent(&node->rb); in augment_propagate() 54 struct drbd_interval *old = rb_entry(rb_old, struct drbd_interval, rb); in augment_copy() [all …]
|
D | drbd_interval.h | 8 struct rb_node rb; member 18 RB_CLEAR_NODE(&i->rb); in drbd_clear_interval() 23 return RB_EMPTY_NODE(&i->rb); in drbd_interval_empty()
|
/drivers/xen/xenbus/ |
D | xenbus_dev_frontend.c | 124 struct read_buffer *rb; in xenbus_file_read() local 142 rb = list_entry(u->read_buffers.next, struct read_buffer, list); in xenbus_file_read() 145 unsigned sz = min((unsigned)len - i, rb->len - rb->cons); in xenbus_file_read() 147 ret = copy_to_user(ubuf + i, &rb->msg[rb->cons], sz); in xenbus_file_read() 150 rb->cons += sz - ret; in xenbus_file_read() 159 if (rb->cons == rb->len) { in xenbus_file_read() 160 list_del(&rb->list); in xenbus_file_read() 161 kfree(rb); in xenbus_file_read() 164 rb = list_entry(u->read_buffers.next, in xenbus_file_read() 185 struct read_buffer *rb; in queue_reply() local [all …]
|
/drivers/target/iscsi/ |
D | iscsi_target_configfs.c | 60 ssize_t rb; in lio_target_np_show_sctp() local 64 rb = sprintf(page, "1\n"); in lio_target_np_show_sctp() 66 rb = sprintf(page, "0\n"); in lio_target_np_show_sctp() 68 return rb; in lio_target_np_show_sctp() 135 ssize_t rb; in lio_target_np_show_iser() local 139 rb = sprintf(page, "1\n"); in lio_target_np_show_iser() 141 rb = sprintf(page, "0\n"); in lio_target_np_show_iser() 143 return rb; in lio_target_np_show_iser() 582 ssize_t rb; \ 587 rb = snprintf(page, PAGE_SIZE, \ [all …]
|
/drivers/tty/hvc/ |
D | hvc_iucv.c | 219 struct iucv_tty_buffer *rb; in hvc_iucv_write() local 237 rb = list_first_entry(&priv->tty_inqueue, struct iucv_tty_buffer, list); in hvc_iucv_write() 240 if (!rb->mbuf) { /* message not yet received ... */ in hvc_iucv_write() 243 rb->mbuf = kmalloc(rb->msg.length, GFP_ATOMIC | GFP_DMA); in hvc_iucv_write() 244 if (!rb->mbuf) in hvc_iucv_write() 247 rc = __iucv_message_receive(priv->path, &rb->msg, 0, in hvc_iucv_write() 248 rb->mbuf, rb->msg.length, NULL); in hvc_iucv_write() 260 if (rc || (rb->mbuf->version != MSG_VERSION) || in hvc_iucv_write() 261 (rb->msg.length != MSG_SIZE(rb->mbuf->datalen))) in hvc_iucv_write() 265 switch (rb->mbuf->type) { in hvc_iucv_write() [all …]
|
/drivers/mtd/ubi/ |
D | attach.c | 268 av = rb_entry(parent, struct ubi_ainf_volume, rb); in add_volume() 295 rb_link_node(&av->rb, parent, p); in add_volume() 296 rb_insert_color(&av->rb, &ai->volumes); in add_volume() 479 aeb = rb_entry(parent, struct ubi_ainf_peb, u.rb); in ubi_add_to_av() 590 rb_link_node(&aeb->u.rb, parent, p); in ubi_add_to_av() 591 rb_insert_color(&aeb->u.rb, &av->root); in ubi_add_to_av() 610 av = rb_entry(p, struct ubi_ainf_volume, rb); in ubi_find_av() 631 struct rb_node *rb; in ubi_remove_av() local 636 while ((rb = rb_first(&av->root))) { in ubi_remove_av() 637 aeb = rb_entry(rb, struct ubi_ainf_peb, u.rb); in ubi_remove_av() [all …]
|
D | wl.c | 196 e1 = rb_entry(parent, struct ubi_wl_entry, u.rb); in wl_tree_add() 211 rb_link_node(&e->u.rb, parent, p); in wl_tree_add() 212 rb_insert_color(&e->u.rb, root); in wl_tree_add() 305 e1 = rb_entry(p, struct ubi_wl_entry, u.rb); in in_wl_tree() 365 e = rb_entry(rb_first(root), struct ubi_wl_entry, u.rb); in find_wl_entry() 372 e1 = rb_entry(p, struct ubi_wl_entry, u.rb); in find_wl_entry() 406 first = rb_entry(rb_first(root), struct ubi_wl_entry, u.rb); in find_mean_wl_entry() 407 last = rb_entry(rb_last(root), struct ubi_wl_entry, u.rb); in find_mean_wl_entry() 410 e = rb_entry(root->rb_node, struct ubi_wl_entry, u.rb); in find_mean_wl_entry() 419 struct ubi_wl_entry, u.rb); in find_mean_wl_entry() [all …]
|
D | ubi.h | 167 struct rb_node rb; member 189 struct rb_node rb; member 615 struct rb_node rb; member 650 struct rb_node rb; member 874 #define ubi_rb_for_each_entry(rb, pos, root, member) \ argument 875 for (rb = rb_first(root), \ 876 pos = (rb ? container_of(rb, typeof(*pos), member) : NULL); \ 877 rb; \ 878 rb = rb_next(rb), \ 879 pos = (rb ? container_of(rb, typeof(*pos), member) : NULL)) [all …]
|
D | eba.c | 105 le = rb_entry(p, struct ubi_ltree_entry, rb); in ltree_lookup() 171 le1 = rb_entry(parent, struct ubi_ltree_entry, rb); in ltree_add_entry() 186 rb_link_node(&le->rb, parent, p); in ltree_add_entry() 187 rb_insert_color(&le->rb, &ubi->ltree); in ltree_add_entry() 232 rb_erase(&le->rb, &ubi->ltree); in leb_read_unlock() 284 rb_erase(&le->rb, &ubi->ltree); in leb_write_trylock() 308 rb_erase(&le->rb, &ubi->ltree); in leb_write_unlock() 1234 struct rb_node *rb; in self_check_eba() local 1274 ubi_rb_for_each_entry(rb, aeb, &av->root, u.rb) in self_check_eba() 1281 ubi_rb_for_each_entry(rb, aeb, &av->root, u.rb) in self_check_eba() [all …]
|
D | fastmap.c | 126 av = rb_entry(parent, struct ubi_ainf_volume, rb); in add_vol() 149 rb_link_node(&av->rb, parent, p); in add_vol() 150 rb_insert_color(&av->rb, &ai->volumes); in add_vol() 174 tmp_aeb = rb_entry(parent, struct ubi_ainf_peb, u.rb); in assign_aeb_to_av() 189 rb_link_node(&aeb->u.rb, parent, p); in assign_aeb_to_av() 190 rb_insert_color(&aeb->u.rb, &av->root); in assign_aeb_to_av() 213 aeb = rb_entry(parent, struct ubi_ainf_peb, u.rb); in update_vol() 284 rb_link_node(&new_aeb->u.rb, parent, p); in update_vol() 285 rb_insert_color(&new_aeb->u.rb, &av->root); in update_vol() 317 tmp_av = rb_entry(parent, struct ubi_ainf_volume, rb); in process_pool_aeb() [all …]
|
/drivers/spi/ |
D | spi-mpc52xx-psc.c | 138 unsigned rb = 0; /* number of bytes receieved */ in mpc52xx_psc_spi_transfer_rxtx() local 152 while (rb < t->len) { in mpc52xx_psc_spi_transfer_rxtx() 153 if (t->len - rb > MPC52xx_PSC_BUFSIZE) { in mpc52xx_psc_spi_transfer_rxtx() 158 rfalarm = MPC52xx_PSC_BUFSIZE - (t->len - rb); in mpc52xx_psc_spi_transfer_rxtx() 180 if (t->len - rb == 1) { in mpc52xx_psc_spi_transfer_rxtx() 193 for (; recv_at_once; rb++, recv_at_once--) in mpc52xx_psc_spi_transfer_rxtx() 194 rx_buf[rb] = in_8(&psc->mpc52xx_psc_buffer_8); in mpc52xx_psc_spi_transfer_rxtx() 196 for (; recv_at_once; rb++, recv_at_once--) in mpc52xx_psc_spi_transfer_rxtx()
|
/drivers/i2c/busses/ |
D | i2c-cpm.c | 198 u_char *rb; in cpm_i2c_parse_message() local 209 rb = cpm->rxbuf[rx]; in cpm_i2c_parse_message() 212 rb = (u_char *) (((ulong) rb + 1) & ~1); in cpm_i2c_parse_message() 258 u_char *rb; in cpm_i2c_check_message() local 265 rb = cpm->rxbuf[rx]; in cpm_i2c_check_message() 268 rb = (u_char *) (((uint) rb + 1) & ~1); in cpm_i2c_check_message() 288 memcpy(pmsg->buf, rb, pmsg->len); in cpm_i2c_check_message()
|
/drivers/media/dvb-frontends/ |
D | dib3000mb.c | 56 u8 rb[2]; in dib3000_read_reg() local 59 { .addr = state->config.demod_address, .flags = I2C_M_RD, .buf = rb, .len = 2 }, in dib3000_read_reg() 66 (rb[0] << 8) | rb[1],(rb[0] << 8) | rb[1]); in dib3000_read_reg() 68 return (rb[0] << 8) | rb[1]; in dib3000_read_reg()
|
/drivers/mtd/ |
D | mtdswap.c | 83 struct rb_node rb; member 93 rb)->erase_count) 95 rb)->erase_count) 214 rb_erase(&eb->rb, eb->root); in mtdswap_eb_detach() 226 cur = rb_entry(parent, struct swap_eb, rb); in __mtdswap_rb_add() 233 rb_link_node(&eb->rb, parent, p); in __mtdswap_rb_add() 234 rb_insert_color(&eb->rb, root); in __mtdswap_rb_add() 453 median = rb_entry(medrb, struct swap_eb, rb)->erase_count; in mtdswap_check_counts() 466 rb_erase(&eb->rb, &hist_root); in mtdswap_check_counts() 625 eb = rb_entry(rb_first(clean_root), struct swap_eb, rb); in mtdswap_map_free_block() [all …]
|
/drivers/usb/class/ |
D | cdc-acm.c | 417 struct acm_rb *rb = urb->context; in acm_read_bulk_callback() local 418 struct acm *acm = rb->instance; in acm_read_bulk_callback() 422 rb->index, urb->actual_length); in acm_read_bulk_callback() 423 set_bit(rb->index, &acm->read_urbs_free); in acm_read_bulk_callback() 443 acm_submit_read_urb(acm, rb->index, GFP_ATOMIC); in acm_read_bulk_callback() 1215 struct acm_rb *rb = &(acm->read_buffers[i]); in acm_probe() local 1218 rb->base = usb_alloc_coherent(acm->dev, readsize, GFP_KERNEL, in acm_probe() 1219 &rb->dma); in acm_probe() 1220 if (!rb->base) { in acm_probe() 1225 rb->index = i; in acm_probe() [all …]
|
/drivers/media/pci/ngene/ |
D | ngene-core.c | 779 static void free_ringbuffer(struct ngene *dev, struct SRingBufferDescriptor *rb) in free_ringbuffer() argument 781 struct SBufferHeader *Cur = rb->Head; in free_ringbuffer() 787 for (j = 0; j < rb->NumBuffers; j++, Cur = Cur->Next) { in free_ringbuffer() 790 rb->Buffer1Length, in free_ringbuffer() 796 rb->Buffer2Length, in free_ringbuffer() 801 if (rb->SCListMem) in free_ringbuffer() 802 pci_free_consistent(dev->pci_dev, rb->SCListMemSize, in free_ringbuffer() 803 rb->SCListMem, rb->PASCListMem); in free_ringbuffer() 805 pci_free_consistent(dev->pci_dev, rb->MemSize, rb->Head, rb->PAHead); in free_ringbuffer() 809 struct SRingBufferDescriptor *rb, in free_idlebuffer() argument [all …]
|
/drivers/iio/ |
D | industrialio-buffer.c | 56 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_read_first_n_outer() local 58 if (!rb || !rb->access->read_first_n) in iio_buffer_read_first_n_outer() 60 return rb->access->read_first_n(rb, n, buf); in iio_buffer_read_first_n_outer() 70 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_poll() local 72 poll_wait(filp, &rb->pollq, wait); in iio_buffer_poll() 73 if (rb->stufftoread) in iio_buffer_poll()
|
/drivers/gpu/drm/ |
D | drm_modes.c | 1102 bool yres_specified = false, cvt = false, rb = false; in drm_mode_parse_command_line_for_connector() local 1123 !yres_specified && !cvt && !rb && was_digit) { in drm_mode_parse_command_line_for_connector() 1132 !rb && was_digit) { in drm_mode_parse_command_line_for_connector() 1156 if (yres_specified || cvt || rb || was_digit) in drm_mode_parse_command_line_for_connector() 1158 rb = true; in drm_mode_parse_command_line_for_connector() 1235 mode->rb = rb; in drm_mode_parse_command_line_for_connector() 1255 cmd->rb, cmd->interlace, in drm_mode_create_from_cmdline_mode()
|
/drivers/usb/gadget/ |
D | uvc_queue.c | 149 struct v4l2_requestbuffers *rb) in uvc_alloc_buffers() argument 154 ret = vb2_reqbufs(&queue->queue, rb); in uvc_alloc_buffers() 157 return ret ? ret : rb->count; in uvc_alloc_buffers()
|
/drivers/media/usb/gspca/ |
D | gspca.c | 1360 struct v4l2_requestbuffers *rb) in vidioc_reqbufs() argument 1365 i = rb->memory; /* (avoid compilation warning) */ in vidioc_reqbufs() 1379 && gspca_dev->memory != rb->memory) { in vidioc_reqbufs() 1413 if (rb->count == 0) /* unrequest */ in vidioc_reqbufs() 1415 ret = frame_alloc(gspca_dev, file, rb->memory, rb->count); in vidioc_reqbufs() 1417 rb->count = gspca_dev->nframes; in vidioc_reqbufs() 1423 PDEBUG(D_STREAM, "reqbufs st:%d c:%d", ret, rb->count); in vidioc_reqbufs() 1811 struct v4l2_requestbuffers rb; in read_alloc() local 1813 memset(&rb, 0, sizeof rb); in read_alloc() 1814 rb.count = gspca_dev->nbufread; in read_alloc() [all …]
|
/drivers/video/ |
D | modedb.c | 592 int yres_specified = 0, cvt = 0, rb = 0, interlace = 0; in fb_find_mode() local 605 if (cvt || rb) in fb_find_mode() 616 if (cvt || rb) in fb_find_mode() 637 rb = 1; in fb_find_mode() 662 (rb) ? " reduced blanking" : "", in fb_find_mode() 676 ret = fb_find_mode_cvt(&cvt_mode, margins, rb); in fb_find_mode()
|
/drivers/media/usb/uvc/ |
D | uvc_queue.c | 170 struct v4l2_requestbuffers *rb) in uvc_alloc_buffers() argument 175 ret = vb2_reqbufs(&queue->queue, rb); in uvc_alloc_buffers() 178 return ret ? ret : rb->count; in uvc_alloc_buffers()
|