Home
last modified time | relevance | path

Searched refs:buffer (Results 1 – 25 of 1461) sorted by relevance

12345678910>>...59

/drivers/s390/char/
Dsclp_rw.c43 struct sclp_buffer *buffer; in sclp_make_buffer() local
51 buffer = ((struct sclp_buffer *) ((addr_t) sccb + PAGE_SIZE)) - 1; in sclp_make_buffer()
52 buffer->sccb = sccb; in sclp_make_buffer()
53 buffer->retry_count = 0; in sclp_make_buffer()
54 buffer->messages = 0; in sclp_make_buffer()
55 buffer->char_sum = 0; in sclp_make_buffer()
56 buffer->current_line = NULL; in sclp_make_buffer()
57 buffer->current_length = 0; in sclp_make_buffer()
58 buffer->columns = columns; in sclp_make_buffer()
59 buffer->htab = htab; in sclp_make_buffer()
[all …]
/drivers/gpu/drm/etnaviv/
Detnaviv_buffer.c25 static inline void OUT(struct etnaviv_cmdbuf *buffer, u32 data) in OUT() argument
27 u32 *vaddr = (u32 *)buffer->vaddr; in OUT()
29 BUG_ON(buffer->user_size >= buffer->size); in OUT()
31 vaddr[buffer->user_size / 4] = data; in OUT()
32 buffer->user_size += 4; in OUT()
35 static inline void CMD_LOAD_STATE(struct etnaviv_cmdbuf *buffer, in CMD_LOAD_STATE() argument
40 buffer->user_size = ALIGN(buffer->user_size, 8); in CMD_LOAD_STATE()
43 OUT(buffer, VIV_FE_LOAD_STATE_HEADER_OP_LOAD_STATE | in CMD_LOAD_STATE()
46 OUT(buffer, value); in CMD_LOAD_STATE()
49 static inline void CMD_END(struct etnaviv_cmdbuf *buffer) in CMD_END() argument
[all …]
/drivers/media/tuners/
Dtea5767.c133 unsigned char *buffer) in tea5767_status_dump() argument
137 if (TEA5767_READY_FLAG_MASK & buffer[0]) in tea5767_status_dump()
142 if (TEA5767_BAND_LIMIT_MASK & buffer[0]) in tea5767_status_dump()
147 div = ((buffer[0] & 0x3f) << 8) | buffer[1]; in tea5767_status_dump()
164 buffer[0] = (div >> 8) & 0x3f; in tea5767_status_dump()
165 buffer[1] = div & 0xff; in tea5767_status_dump()
170 if (TEA5767_STEREO_MASK & buffer[2]) in tea5767_status_dump()
175 tuner_info("IF Counter = %d\n", buffer[2] & TEA5767_IF_CNTR_MASK); in tea5767_status_dump()
178 (buffer[3] & TEA5767_ADC_LEVEL_MASK) >> 4); in tea5767_status_dump()
180 tuner_info("Chip ID = %d\n", (buffer[3] & TEA5767_CHIP_ID_MASK)); in tea5767_status_dump()
[all …]
Dtea5761.c123 static void tea5761_status_dump(unsigned char *buffer) in tea5761_status_dump() argument
127 div = ((buffer[2] & 0x3f) << 8) | buffer[3]; in tea5761_status_dump()
142 unsigned char buffer[7] = {0, 0, 0, 0, 0, 0, 0 }; in __set_radio_freq() local
150 buffer[5] |= TEA5761_TNCTRL_MU; in __set_radio_freq()
152 buffer[4] |= TEA5761_TNCTRL_PUPD_0; in __set_radio_freq()
158 buffer[5] |= TEA5761_TNCTRL_MST; in __set_radio_freq()
164 buffer[1] = (div >> 8) & 0x3f; in __set_radio_freq()
165 buffer[2] = div & 0xff; in __set_radio_freq()
168 tea5761_status_dump(buffer); in __set_radio_freq()
170 if (7 != (rc = tuner_i2c_xfer_send(&priv->i2c_props, buffer, 7))) in __set_radio_freq()
[all …]
/drivers/dma-buf/heaps/
Dcma_heap.c53 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_attach() local
61 ret = sg_alloc_table_from_pages(&a->table, buffer->pages, in cma_heap_attach()
62 buffer->pagecount, 0, in cma_heap_attach()
63 buffer->pagecount << PAGE_SHIFT, in cma_heap_attach()
76 mutex_lock(&buffer->lock); in cma_heap_attach()
77 list_add(&a->list, &buffer->attachments); in cma_heap_attach()
78 mutex_unlock(&buffer->lock); in cma_heap_attach()
86 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_detach() local
89 mutex_lock(&buffer->lock); in cma_heap_detach()
91 mutex_unlock(&buffer->lock); in cma_heap_detach()
[all …]
Dsystem_heap.c94 struct system_heap_buffer *buffer = dmabuf->priv; in system_heap_attach() local
102 table = dup_sg_table(&buffer->sg_table); in system_heap_attach()
112 a->uncached = buffer->uncached; in system_heap_attach()
115 mutex_lock(&buffer->lock); in system_heap_attach()
116 list_add(&a->list, &buffer->attachments); in system_heap_attach()
117 mutex_unlock(&buffer->lock); in system_heap_attach()
125 struct system_heap_buffer *buffer = dmabuf->priv; in system_heap_detach() local
128 mutex_lock(&buffer->lock); in system_heap_detach()
130 mutex_unlock(&buffer->lock); in system_heap_detach()
172 struct system_heap_buffer *buffer = dmabuf->priv; in system_heap_dma_buf_begin_cpu_access() local
[all …]
/drivers/pnp/
Dinterface.c26 char *buffer; /* pointer to begin of buffer */ member
37 static int pnp_printf(pnp_info_buffer_t * buffer, char *fmt, ...) in pnp_printf() argument
42 if (buffer->stop || buffer->error) in pnp_printf()
45 res = vsnprintf(buffer->curr, buffer->len - buffer->size, fmt, args); in pnp_printf()
47 if (buffer->size + res >= buffer->len) { in pnp_printf()
48 buffer->stop = 1; in pnp_printf()
51 buffer->curr += res; in pnp_printf()
52 buffer->size += res; in pnp_printf()
56 static void pnp_print_port(pnp_info_buffer_t * buffer, char *space, in pnp_print_port() argument
59 pnp_printf(buffer, "%sport %#llx-%#llx, align %#llx, size %#llx, " in pnp_print_port()
[all …]
/drivers/android/
Dbinder_alloc.c51 static struct binder_buffer *binder_buffer_next(struct binder_buffer *buffer) in binder_buffer_next() argument
53 return list_entry(buffer->entry.next, struct binder_buffer, entry); in binder_buffer_next()
56 static struct binder_buffer *binder_buffer_prev(struct binder_buffer *buffer) in binder_buffer_prev() argument
58 return list_entry(buffer->entry.prev, struct binder_buffer, entry); in binder_buffer_prev()
62 struct binder_buffer *buffer) in binder_alloc_buffer_size() argument
64 if (list_is_last(&buffer->entry, &alloc->buffers)) in binder_alloc_buffer_size()
65 return alloc->buffer + alloc->buffer_size - buffer->user_data; in binder_alloc_buffer_size()
66 return binder_buffer_next(buffer)->user_data - buffer->user_data; in binder_alloc_buffer_size()
74 struct binder_buffer *buffer; in binder_insert_free_buffer() local
88 buffer = rb_entry(parent, struct binder_buffer, rb_node); in binder_insert_free_buffer()
[all …]
/drivers/iio/
Dindustrialio-buffer.c110 struct iio_buffer *rb = ib->buffer; in iio_buffer_read()
179 struct iio_buffer *rb = ib->buffer; in iio_buffer_write()
242 struct iio_buffer *rb = ib->buffer; in iio_buffer_poll()
268 struct iio_buffer *rb = ib->buffer; in iio_buffer_read_wrapper()
281 struct iio_buffer *rb = ib->buffer; in iio_buffer_write_wrapper()
294 struct iio_buffer *rb = ib->buffer; in iio_buffer_poll_wrapper()
313 struct iio_buffer *buffer; in iio_buffer_wakeup_poll() local
317 buffer = iio_dev_opaque->attached_buffers[i]; in iio_buffer_wakeup_poll()
318 wake_up(&buffer->pollq); in iio_buffer_wakeup_poll()
322 int iio_pop_from_buffer(struct iio_buffer *buffer, void *data) in iio_pop_from_buffer() argument
[all …]
/drivers/s390/net/
Dqeth_core_mpc.h18 #define QETH_IPA_PDU_LEN_TOTAL(buffer) (buffer + 0x0e) argument
19 #define QETH_IPA_PDU_LEN_PDU1(buffer) (buffer + 0x26) argument
20 #define QETH_IPA_PDU_LEN_PDU2(buffer) (buffer + 0x29) argument
21 #define QETH_IPA_PDU_LEN_PDU3(buffer) (buffer + 0x3a) argument
23 #define QETH_IPA_CMD_DEST_ADDR(buffer) (buffer + 0x2c) argument
523 char *buffer; member
877 #define QETH_CM_ENABLE_ISSUER_RM_TOKEN(buffer) (buffer + 0x2c) argument
878 #define QETH_CM_ENABLE_FILTER_TOKEN(buffer) (buffer + 0x53) argument
879 #define QETH_CM_ENABLE_USER_DATA(buffer) (buffer + 0x5b) argument
881 #define QETH_CM_ENABLE_RESP_FILTER_TOKEN(buffer) \ argument
[all …]
/drivers/staging/media/deprecated/cpia2/
Dcpia2_core.c167 cmd.buffer.block_data[0] = param; in cpia2_do_command()
178 cmd.buffer.block_data[0] = param; in cpia2_do_command()
186 cmd.buffer.block_data[0] = param; in cpia2_do_command()
197 cmd.buffer.block_data[0] = param; in cpia2_do_command()
205 cmd.buffer.block_data[0] = param; in cpia2_do_command()
213 cmd.buffer.block_data[0] = param; in cpia2_do_command()
221 cmd.buffer.block_data[0] = param; in cpia2_do_command()
233 cmd.buffer.block_data[0] = param; in cpia2_do_command()
236 cmd.buffer.block_data[0] = param; in cpia2_do_command()
247 cmd.buffer.registers[0].index = CPIA2_VC_ST_CTRL; in cpia2_do_command()
[all …]
/drivers/gpu/drm/
Ddrm_client.c255 static void drm_client_buffer_delete(struct drm_client_buffer *buffer) in drm_client_buffer_delete() argument
257 struct drm_device *dev = buffer->client->dev; in drm_client_buffer_delete()
259 drm_gem_vunmap(buffer->gem, &buffer->map); in drm_client_buffer_delete()
261 if (buffer->gem) in drm_client_buffer_delete()
262 drm_gem_object_put(buffer->gem); in drm_client_buffer_delete()
264 if (buffer->handle) in drm_client_buffer_delete()
265 drm_mode_destroy_dumb(dev, buffer->handle, buffer->client->file); in drm_client_buffer_delete()
267 kfree(buffer); in drm_client_buffer_delete()
276 struct drm_client_buffer *buffer; in drm_client_buffer_create() local
280 buffer = kzalloc(sizeof(*buffer), GFP_KERNEL); in drm_client_buffer_create()
[all …]
/drivers/scsi/
Dsr_vendor.c126 unsigned char *buffer; /* the buffer for the ioctl */ in sr_set_blocklength() local
134 buffer = kmalloc(512, GFP_KERNEL); in sr_set_blocklength()
135 if (!buffer) in sr_set_blocklength()
145 modesel = (struct ccs_modesel_head *) buffer; in sr_set_blocklength()
151 cgc.buffer = buffer; in sr_set_blocklength()
164 kfree(buffer); in sr_set_blocklength()
175 unsigned char *buffer; /* the buffer for the ioctl */ in sr_cd_check() local
182 buffer = kmalloc(512, GFP_KERNEL); in sr_cd_check()
183 if (!buffer) in sr_cd_check()
198 cgc.buffer = buffer; in sr_cd_check()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/fault/
Dgv100.c31 gv100_fault_buffer_process(struct nvkm_fault_buffer *buffer) in gv100_fault_buffer_process() argument
33 struct nvkm_device *device = buffer->fault->subdev.device; in gv100_fault_buffer_process()
34 struct nvkm_memory *mem = buffer->mem; in gv100_fault_buffer_process()
35 u32 get = nvkm_rd32(device, buffer->get); in gv100_fault_buffer_process()
36 u32 put = nvkm_rd32(device, buffer->put); in gv100_fault_buffer_process()
42 const u32 base = get * buffer->fault->func->buffer.entry_size; in gv100_fault_buffer_process()
53 if (++get == buffer->entries) in gv100_fault_buffer_process()
55 nvkm_wr32(device, buffer->get, get); in gv100_fault_buffer_process()
74 gv100_fault_buffer_intr(struct nvkm_fault_buffer *buffer, bool enable) in gv100_fault_buffer_intr() argument
76 struct nvkm_device *device = buffer->fault->subdev.device; in gv100_fault_buffer_intr()
[all …]
Dgp100.c30 gp100_fault_buffer_intr(struct nvkm_fault_buffer *buffer, bool enable) in gp100_fault_buffer_intr() argument
32 struct nvkm_device *device = buffer->fault->subdev.device; in gp100_fault_buffer_intr()
37 gp100_fault_buffer_fini(struct nvkm_fault_buffer *buffer) in gp100_fault_buffer_fini() argument
39 struct nvkm_device *device = buffer->fault->subdev.device; in gp100_fault_buffer_fini()
44 gp100_fault_buffer_init(struct nvkm_fault_buffer *buffer) in gp100_fault_buffer_init() argument
46 struct nvkm_device *device = buffer->fault->subdev.device; in gp100_fault_buffer_init()
47 nvkm_wr32(device, 0x002a74, upper_32_bits(buffer->addr)); in gp100_fault_buffer_init()
48 nvkm_wr32(device, 0x002a70, lower_32_bits(buffer->addr)); in gp100_fault_buffer_init()
52 u64 gp100_fault_buffer_pin(struct nvkm_fault_buffer *buffer) in gp100_fault_buffer_pin() argument
54 return nvkm_memory_bar2(buffer->mem); in gp100_fault_buffer_pin()
[all …]
Dtu102.c32 tu102_fault_buffer_intr(struct nvkm_fault_buffer *buffer, bool enable) in tu102_fault_buffer_intr() argument
38 struct nvkm_device *device = buffer->fault->subdev.device; in tu102_fault_buffer_intr()
44 tu102_fault_buffer_fini(struct nvkm_fault_buffer *buffer) in tu102_fault_buffer_fini() argument
46 struct nvkm_device *device = buffer->fault->subdev.device; in tu102_fault_buffer_fini()
47 const u32 foff = buffer->id * 0x20; in tu102_fault_buffer_fini()
57 tu102_fault_buffer_init(struct nvkm_fault_buffer *buffer) in tu102_fault_buffer_init() argument
59 struct nvkm_device *device = buffer->fault->subdev.device; in tu102_fault_buffer_init()
60 const u32 foff = buffer->id * 0x20; in tu102_fault_buffer_init()
67 nvkm_wr32(device, 0xb83004 + foff, upper_32_bits(buffer->addr)); in tu102_fault_buffer_init()
68 nvkm_wr32(device, 0xb83000 + foff, lower_32_bits(buffer->addr)); in tu102_fault_buffer_init()
[all …]
Dbase.c31 fault->func->buffer.intr(fault->buffer[index], false); in nvkm_fault_ntfy_fini()
38 fault->func->buffer.intr(fault->buffer[index], true); in nvkm_fault_ntfy_init()
45 struct nvkm_fault_buffer *buffer = nvkm_fault_buffer(object); in nvkm_fault_ntfy_ctor() local
49 notify->index = buffer->id; in nvkm_fault_ntfy_ctor()
92 struct nvkm_fault_buffer *buffer; in nvkm_fault_oneinit_buffer() local
95 if (!(buffer = kzalloc(sizeof(*buffer), GFP_KERNEL))) in nvkm_fault_oneinit_buffer()
97 buffer->fault = fault; in nvkm_fault_oneinit_buffer()
98 buffer->id = id; in nvkm_fault_oneinit_buffer()
99 fault->func->buffer.info(buffer); in nvkm_fault_oneinit_buffer()
100 fault->buffer[id] = buffer; in nvkm_fault_oneinit_buffer()
[all …]
Duser.c34 struct nvkm_fault_buffer *buffer = nvkm_fault_buffer(object); in nvkm_ufault_map() local
35 struct nvkm_device *device = buffer->fault->subdev.device; in nvkm_ufault_map()
37 *addr = device->func->resource_addr(device, 3) + buffer->addr; in nvkm_ufault_map()
38 *size = nvkm_memory_size(buffer->mem); in nvkm_ufault_map()
46 struct nvkm_fault_buffer *buffer = nvkm_fault_buffer(object); in nvkm_ufault_ntfy() local
48 *pevent = &buffer->fault->event; in nvkm_ufault_ntfy()
57 struct nvkm_fault_buffer *buffer = nvkm_fault_buffer(object); in nvkm_ufault_fini() local
58 buffer->fault->func->buffer.fini(buffer); in nvkm_ufault_fini()
65 struct nvkm_fault_buffer *buffer = nvkm_fault_buffer(object); in nvkm_ufault_init() local
66 buffer->fault->func->buffer.init(buffer); in nvkm_ufault_init()
[all …]
/drivers/usb/misc/
Dcytherm.c81 unsigned char *buffer; in brightness_store() local
84 buffer = kmalloc(8, GFP_KERNEL); in brightness_store()
85 if (!buffer) in brightness_store()
97 cytherm->brightness, buffer, 8); in brightness_store()
102 0x01, buffer, 8); in brightness_store()
106 kfree(buffer); in brightness_store()
123 unsigned char *buffer; in temp_show() local
127 buffer = kmalloc(8, GFP_KERNEL); in temp_show()
128 if (!buffer) in temp_show()
132 retval = vendor_command(cytherm->udev, READ_RAM, TEMP, 0, buffer, 8); in temp_show()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/en/
Dport_buffer.c41 void *buffer; in mlx5e_port_query_buffer() local
55 buffer = MLX5_ADDR_OF(pbmc_reg, out, buffer[i]); in mlx5e_port_query_buffer()
56 port_buffer->buffer[i].lossy = in mlx5e_port_query_buffer()
57 MLX5_GET(bufferx_reg, buffer, lossy); in mlx5e_port_query_buffer()
58 port_buffer->buffer[i].epsb = in mlx5e_port_query_buffer()
59 MLX5_GET(bufferx_reg, buffer, epsb); in mlx5e_port_query_buffer()
60 port_buffer->buffer[i].size = in mlx5e_port_query_buffer()
61 MLX5_GET(bufferx_reg, buffer, size) * port_buff_cell_sz; in mlx5e_port_query_buffer()
62 port_buffer->buffer[i].xon = in mlx5e_port_query_buffer()
63 MLX5_GET(bufferx_reg, buffer, xon_threshold) * port_buff_cell_sz; in mlx5e_port_query_buffer()
[all …]
/drivers/net/ethernet/sfc/siena/
Dtx_common.c38 tx_queue->buffer = kcalloc(entries, sizeof(*tx_queue->buffer), in efx_siena_probe_tx_queue()
40 if (!tx_queue->buffer) in efx_siena_probe_tx_queue()
62 kfree(tx_queue->buffer); in efx_siena_probe_tx_queue()
63 tx_queue->buffer = NULL; in efx_siena_probe_tx_queue()
101 if (!tx_queue->buffer) in efx_siena_remove_tx_queue()
116 kfree(tx_queue->buffer); in efx_siena_remove_tx_queue()
117 tx_queue->buffer = NULL; in efx_siena_remove_tx_queue()
122 struct efx_tx_buffer *buffer, in efx_dequeue_buffer() argument
126 if (buffer->unmap_len) { in efx_dequeue_buffer()
128 dma_addr_t unmap_addr = buffer->dma_addr - buffer->dma_offset; in efx_dequeue_buffer()
[all …]
/drivers/acpi/acpica/
Dexstorob.c34 u8 *buffer; in acpi_ex_store_buffer_to_buffer() local
46 buffer = ACPI_CAST_PTR(u8, source_desc->buffer.pointer); in acpi_ex_store_buffer_to_buffer()
47 length = source_desc->buffer.length; in acpi_ex_store_buffer_to_buffer()
53 if ((target_desc->buffer.length == 0) || in acpi_ex_store_buffer_to_buffer()
55 target_desc->buffer.pointer = ACPI_ALLOCATE(length); in acpi_ex_store_buffer_to_buffer()
56 if (!target_desc->buffer.pointer) { in acpi_ex_store_buffer_to_buffer()
60 target_desc->buffer.length = length; in acpi_ex_store_buffer_to_buffer()
65 if (length <= target_desc->buffer.length) { in acpi_ex_store_buffer_to_buffer()
69 memset(target_desc->buffer.pointer, 0, in acpi_ex_store_buffer_to_buffer()
70 target_desc->buffer.length); in acpi_ex_store_buffer_to_buffer()
[all …]
/drivers/net/ethernet/sfc/
Dtx_common.c38 tx_queue->buffer = kcalloc(entries, sizeof(*tx_queue->buffer), in efx_probe_tx_queue()
40 if (!tx_queue->buffer) in efx_probe_tx_queue()
62 kfree(tx_queue->buffer); in efx_probe_tx_queue()
63 tx_queue->buffer = NULL; in efx_probe_tx_queue()
99 struct efx_tx_buffer *buffer; in efx_fini_tx_queue() local
106 if (!tx_queue->buffer) in efx_fini_tx_queue()
114 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in efx_fini_tx_queue()
115 efx_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl, in efx_fini_tx_queue()
128 if (!tx_queue->buffer) in efx_remove_tx_queue()
143 kfree(tx_queue->buffer); in efx_remove_tx_queue()
[all …]
/drivers/net/ethernet/sfc/falcon/
Dtx.c26 struct ef4_tx_buffer *buffer) in ef4_tx_get_copy_buffer() argument
38 buffer->dma_addr = page_buf->dma_addr + offset; in ef4_tx_get_copy_buffer()
39 buffer->unmap_len = 0; in ef4_tx_get_copy_buffer()
44 struct ef4_tx_buffer *buffer, size_t len) in ef4_tx_get_copy_buffer_limited() argument
48 return ef4_tx_get_copy_buffer(tx_queue, buffer); in ef4_tx_get_copy_buffer_limited()
52 struct ef4_tx_buffer *buffer, in ef4_dequeue_buffer() argument
56 if (buffer->unmap_len) { in ef4_dequeue_buffer()
58 dma_addr_t unmap_addr = buffer->dma_addr - buffer->dma_offset; in ef4_dequeue_buffer()
59 if (buffer->flags & EF4_TX_BUF_MAP_SINGLE) in ef4_dequeue_buffer()
60 dma_unmap_single(dma_dev, unmap_addr, buffer->unmap_len, in ef4_dequeue_buffer()
[all …]
/drivers/firewire/
Dnosy.c50 } buffer[13]; member
107 struct packet_buffer buffer; member
115 packet_buffer_init(struct packet_buffer *buffer, size_t capacity) in packet_buffer_init() argument
117 buffer->data = kmalloc(capacity, GFP_KERNEL); in packet_buffer_init()
118 if (buffer->data == NULL) in packet_buffer_init()
120 buffer->head = (struct packet *) buffer->data; in packet_buffer_init()
121 buffer->tail = (struct packet *) buffer->data; in packet_buffer_init()
122 buffer->capacity = capacity; in packet_buffer_init()
123 buffer->lost_packet_count = 0; in packet_buffer_init()
124 atomic_set(&buffer->size, 0); in packet_buffer_init()
[all …]

12345678910>>...59