• Home
  • Raw
  • Download

Lines Matching full:buffer

9  * Handling of buffer allocation / resizing.
28 #include <linux/iio/buffer.h>
65 /* drain the buffer if it was disabled */ in iio_buffer_ready()
94 * iio_buffer_read_first_n_outer() - chrdev read for buffer access
96 * @buf: Destination buffer for iio buffer read
100 * This function relies on all buffer implementations having an
110 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_read_first_n_outer()
126 * buffer, so signal end of file now. in iio_buffer_read_first_n_outer()
164 * iio_buffer_poll() - poll the buffer to find out if it has data
176 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_poll()
188 * iio_buffer_wakeup_poll - Wakes up the buffer waitqueue
196 if (!indio_dev->buffer) in iio_buffer_wakeup_poll()
199 wake_up(&indio_dev->buffer->pollq); in iio_buffer_wakeup_poll()
202 void iio_buffer_init(struct iio_buffer *buffer) in iio_buffer_init() argument
204 INIT_LIST_HEAD(&buffer->demux_list); in iio_buffer_init()
205 INIT_LIST_HEAD(&buffer->buffer_list); in iio_buffer_init()
206 init_waitqueue_head(&buffer->pollq); in iio_buffer_init()
207 kref_init(&buffer->ref); in iio_buffer_init()
208 if (!buffer->watermark) in iio_buffer_init()
209 buffer->watermark = 1; in iio_buffer_init()
214 * iio_buffer_set_attrs - Set buffer specific attributes
215 * @buffer: The buffer for which we are setting attributes
218 void iio_buffer_set_attrs(struct iio_buffer *buffer, in iio_buffer_set_attrs() argument
221 buffer->attrs = attrs; in iio_buffer_set_attrs()
272 indio_dev->buffer->scan_mask); in iio_scan_el_show()
310 * @buffer: the buffer whose scan mask we are interested in
318 struct iio_buffer *buffer, int bit) in iio_scan_mask_set() argument
328 WARN(1, "Trying to set scanmask prior to registering buffer\n"); in iio_scan_mask_set()
331 bitmap_copy(trialmask, buffer->scan_mask, indio_dev->masklength); in iio_scan_mask_set()
344 bitmap_copy(buffer->scan_mask, trialmask, indio_dev->masklength); in iio_scan_mask_set()
355 static int iio_scan_mask_clear(struct iio_buffer *buffer, int bit) in iio_scan_mask_clear() argument
357 clear_bit(bit, buffer->scan_mask); in iio_scan_mask_clear()
362 struct iio_buffer *buffer, int bit) in iio_scan_mask_query() argument
367 if (!buffer->scan_mask) in iio_scan_mask_query()
371 return !!test_bit(bit, buffer->scan_mask); in iio_scan_mask_query()
382 struct iio_buffer *buffer = indio_dev->buffer; in iio_scan_el_store() local
389 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_scan_el_store()
393 ret = iio_scan_mask_query(indio_dev, buffer, this_attr->address); in iio_scan_el_store()
397 ret = iio_scan_mask_clear(buffer, this_attr->address); in iio_scan_el_store()
401 ret = iio_scan_mask_set(indio_dev, buffer, this_attr->address); in iio_scan_el_store()
418 return sprintf(buf, "%d\n", indio_dev->buffer->scan_timestamp); in iio_scan_el_ts_show()
435 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_scan_el_ts_store()
439 indio_dev->buffer->scan_timestamp = state; in iio_scan_el_ts_store()
450 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_add_channel_sysfs() local
459 &buffer->scan_el_dev_attr_list); in iio_buffer_add_channel_sysfs()
470 &buffer->scan_el_dev_attr_list); in iio_buffer_add_channel_sysfs()
482 &buffer->scan_el_dev_attr_list); in iio_buffer_add_channel_sysfs()
491 &buffer->scan_el_dev_attr_list); in iio_buffer_add_channel_sysfs()
504 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_read_length() local
506 return sprintf(buf, "%d\n", buffer->length); in iio_buffer_read_length()
514 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_write_length() local
522 if (val == buffer->length) in iio_buffer_write_length()
526 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_buffer_write_length()
529 buffer->access->set_length(buffer, val); in iio_buffer_write_length()
534 if (buffer->length && buffer->length < buffer->watermark) in iio_buffer_write_length()
535 buffer->watermark = buffer->length; in iio_buffer_write_length()
547 return sprintf(buf, "%d\n", iio_buffer_is_active(indio_dev->buffer)); in iio_buffer_show_enable()
596 struct iio_buffer *buffer) in iio_buffer_activate() argument
598 iio_buffer_get(buffer); in iio_buffer_activate()
599 list_add(&buffer->buffer_list, &indio_dev->buffer_list); in iio_buffer_activate()
602 static void iio_buffer_deactivate(struct iio_buffer *buffer) in iio_buffer_deactivate() argument
604 list_del_init(&buffer->buffer_list); in iio_buffer_deactivate()
605 wake_up_interruptible(&buffer->pollq); in iio_buffer_deactivate()
606 iio_buffer_put(buffer); in iio_buffer_deactivate()
611 struct iio_buffer *buffer, *_buffer; in iio_buffer_deactivate_all() local
613 list_for_each_entry_safe(buffer, _buffer, in iio_buffer_deactivate_all()
615 iio_buffer_deactivate(buffer); in iio_buffer_deactivate_all()
618 static int iio_buffer_enable(struct iio_buffer *buffer, in iio_buffer_enable() argument
621 if (!buffer->access->enable) in iio_buffer_enable()
623 return buffer->access->enable(buffer, indio_dev); in iio_buffer_enable()
626 static int iio_buffer_disable(struct iio_buffer *buffer, in iio_buffer_disable() argument
629 if (!buffer->access->disable) in iio_buffer_disable()
631 return buffer->access->disable(buffer, indio_dev); in iio_buffer_disable()
635 struct iio_buffer *buffer) in iio_buffer_update_bytes_per_datum() argument
639 if (!buffer->access->set_bytes_per_datum) in iio_buffer_update_bytes_per_datum()
642 bytes = iio_compute_scan_bytes(indio_dev, buffer->scan_mask, in iio_buffer_update_bytes_per_datum()
643 buffer->scan_timestamp); in iio_buffer_update_bytes_per_datum()
645 buffer->access->set_bytes_per_datum(buffer, bytes); in iio_buffer_update_bytes_per_datum()
649 struct iio_buffer *buffer) in iio_buffer_request_update() argument
653 iio_buffer_update_bytes_per_datum(indio_dev, buffer); in iio_buffer_request_update()
654 if (buffer->access->request_update) { in iio_buffer_request_update()
655 ret = buffer->access->request_update(buffer); in iio_buffer_request_update()
658 "Buffer not started: buffer parameter update failed (%d)\n", in iio_buffer_request_update()
690 struct iio_buffer *buffer; in iio_verify_update() local
698 * If there is just one buffer and we are removing it there is nothing in iio_verify_update()
707 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_verify_update()
708 if (buffer == remove_buffer) in iio_verify_update()
710 modes &= buffer->access->modes; in iio_verify_update()
711 config->watermark = min(config->watermark, buffer->watermark); in iio_verify_update()
725 * Keep things simple for now and only allow a single buffer to in iio_verify_update()
735 /* Can only occur on first buffer */ in iio_verify_update()
737 dev_dbg(&indio_dev->dev, "Buffer not started: no trigger\n"); in iio_verify_update()
749 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_verify_update()
750 if (buffer == remove_buffer) in iio_verify_update()
752 bitmap_or(compound_mask, compound_mask, buffer->scan_mask, in iio_verify_update()
754 scan_timestamp |= buffer->scan_timestamp; in iio_verify_update()
797 static void iio_buffer_demux_free(struct iio_buffer *buffer) in iio_buffer_demux_free() argument
800 list_for_each_entry_safe(p, q, &buffer->demux_list, l) { in iio_buffer_demux_free()
806 static int iio_buffer_add_demux(struct iio_buffer *buffer, in iio_buffer_add_demux() argument
821 list_add_tail(&(*p)->l, &buffer->demux_list); in iio_buffer_add_demux()
828 struct iio_buffer *buffer) in iio_buffer_update_demux() argument
835 iio_buffer_demux_free(buffer); in iio_buffer_update_demux()
836 kfree(buffer->demux_bounce); in iio_buffer_update_demux()
837 buffer->demux_bounce = NULL; in iio_buffer_update_demux()
841 buffer->scan_mask, in iio_buffer_update_demux()
847 buffer->scan_mask, in iio_buffer_update_demux()
863 ret = iio_buffer_add_demux(buffer, &p, in_loc, out_loc, length); in iio_buffer_update_demux()
870 if (buffer->scan_timestamp) { in iio_buffer_update_demux()
874 ret = iio_buffer_add_demux(buffer, &p, in_loc, out_loc, length); in iio_buffer_update_demux()
880 buffer->demux_bounce = kzalloc(out_loc, GFP_KERNEL); in iio_buffer_update_demux()
881 if (buffer->demux_bounce == NULL) { in iio_buffer_update_demux()
888 iio_buffer_demux_free(buffer); in iio_buffer_update_demux()
895 struct iio_buffer *buffer; in iio_update_demux() local
898 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_update_demux()
899 ret = iio_buffer_update_demux(indio_dev, buffer); in iio_update_demux()
906 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) in iio_update_demux()
907 iio_buffer_demux_free(buffer); in iio_update_demux()
915 struct iio_buffer *buffer; in iio_enable_buffers() local
929 "Buffer not started: buffer preenable failed (%d)\n", ret); in iio_enable_buffers()
940 "Buffer not started: update scan mode failed (%d)\n", in iio_enable_buffers()
950 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_enable_buffers()
951 ret = iio_buffer_enable(buffer, indio_dev); in iio_enable_buffers()
962 "Buffer not started: postenable failed (%d)\n", ret); in iio_enable_buffers()
970 list_for_each_entry_continue_reverse(buffer, &indio_dev->buffer_list, in iio_enable_buffers()
972 iio_buffer_disable(buffer, indio_dev); in iio_enable_buffers()
985 struct iio_buffer *buffer; in iio_disable_buffers() local
1006 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_disable_buffers()
1007 ret2 = iio_buffer_disable(buffer, indio_dev); in iio_disable_buffers()
1140 inlist = iio_buffer_is_active(indio_dev->buffer); in iio_buffer_store_enable()
1147 indio_dev->buffer, NULL); in iio_buffer_store_enable()
1150 NULL, indio_dev->buffer); in iio_buffer_store_enable()
1164 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_show_watermark() local
1166 return sprintf(buf, "%u\n", buffer->watermark); in iio_buffer_show_watermark()
1175 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_store_watermark() local
1187 if (val > buffer->length) { in iio_buffer_store_watermark()
1192 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_buffer_store_watermark()
1197 buffer->watermark = val; in iio_buffer_store_watermark()
1211 bytes = iio_buffer_data_available(indio_dev->buffer); in iio_dma_show_data_available()
1240 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_alloc_sysfs_and_mask() local
1253 if (!buffer) in iio_buffer_alloc_sysfs_and_mask()
1257 if (buffer->attrs) { in iio_buffer_alloc_sysfs_and_mask()
1258 while (buffer->attrs[attrcount] != NULL) in iio_buffer_alloc_sysfs_and_mask()
1268 if (!buffer->access->set_length) in iio_buffer_alloc_sysfs_and_mask()
1271 if (buffer->access->flags & INDIO_BUFFER_FLAG_FIXED_WATERMARK) in iio_buffer_alloc_sysfs_and_mask()
1274 if (buffer->attrs) in iio_buffer_alloc_sysfs_and_mask()
1275 memcpy(&attr[ARRAY_SIZE(iio_buffer_attrs)], buffer->attrs, in iio_buffer_alloc_sysfs_and_mask()
1280 buffer->buffer_group.name = "buffer"; in iio_buffer_alloc_sysfs_and_mask()
1281 buffer->buffer_group.attrs = attr; in iio_buffer_alloc_sysfs_and_mask()
1283 indio_dev->groups[indio_dev->groupcounter++] = &buffer->buffer_group; in iio_buffer_alloc_sysfs_and_mask()
1285 if (buffer->scan_el_attrs != NULL) { in iio_buffer_alloc_sysfs_and_mask()
1286 attr = buffer->scan_el_attrs->attrs; in iio_buffer_alloc_sysfs_and_mask()
1291 INIT_LIST_HEAD(&buffer->scan_el_dev_attr_list); in iio_buffer_alloc_sysfs_and_mask()
1308 if (indio_dev->masklength && buffer->scan_mask == NULL) { in iio_buffer_alloc_sysfs_and_mask()
1309 buffer->scan_mask = kcalloc(BITS_TO_LONGS(indio_dev->masklength), in iio_buffer_alloc_sysfs_and_mask()
1310 sizeof(*buffer->scan_mask), in iio_buffer_alloc_sysfs_and_mask()
1312 if (buffer->scan_mask == NULL) { in iio_buffer_alloc_sysfs_and_mask()
1319 buffer->scan_el_group.name = iio_scan_elements_group_name; in iio_buffer_alloc_sysfs_and_mask()
1321 buffer->scan_el_group.attrs = kcalloc(attrcount + 1, in iio_buffer_alloc_sysfs_and_mask()
1322 sizeof(buffer->scan_el_group.attrs[0]), in iio_buffer_alloc_sysfs_and_mask()
1324 if (buffer->scan_el_group.attrs == NULL) { in iio_buffer_alloc_sysfs_and_mask()
1328 if (buffer->scan_el_attrs) in iio_buffer_alloc_sysfs_and_mask()
1329 memcpy(buffer->scan_el_group.attrs, buffer->scan_el_attrs, in iio_buffer_alloc_sysfs_and_mask()
1330 sizeof(buffer->scan_el_group.attrs[0])*attrcount_orig); in iio_buffer_alloc_sysfs_and_mask()
1333 list_for_each_entry(p, &buffer->scan_el_dev_attr_list, l) in iio_buffer_alloc_sysfs_and_mask()
1334 buffer->scan_el_group.attrs[attrn++] = &p->dev_attr.attr; in iio_buffer_alloc_sysfs_and_mask()
1335 indio_dev->groups[indio_dev->groupcounter++] = &buffer->scan_el_group; in iio_buffer_alloc_sysfs_and_mask()
1340 kfree(buffer->scan_mask); in iio_buffer_alloc_sysfs_and_mask()
1342 iio_free_chan_devattr_list(&buffer->scan_el_dev_attr_list); in iio_buffer_alloc_sysfs_and_mask()
1343 kfree(indio_dev->buffer->buffer_group.attrs); in iio_buffer_alloc_sysfs_and_mask()
1350 if (!indio_dev->buffer) in iio_buffer_free_sysfs_and_mask()
1353 kfree(indio_dev->buffer->scan_mask); in iio_buffer_free_sysfs_and_mask()
1354 kfree(indio_dev->buffer->buffer_group.attrs); in iio_buffer_free_sysfs_and_mask()
1355 kfree(indio_dev->buffer->scan_el_group.attrs); in iio_buffer_free_sysfs_and_mask()
1356 iio_free_chan_devattr_list(&indio_dev->buffer->scan_el_dev_attr_list); in iio_buffer_free_sysfs_and_mask()
1375 static const void *iio_demux(struct iio_buffer *buffer, in iio_demux() argument
1380 if (list_empty(&buffer->demux_list)) in iio_demux()
1382 list_for_each_entry(t, &buffer->demux_list, l) in iio_demux()
1383 memcpy(buffer->demux_bounce + t->to, in iio_demux()
1386 return buffer->demux_bounce; in iio_demux()
1389 static int iio_push_to_buffer(struct iio_buffer *buffer, const void *data) in iio_push_to_buffer() argument
1391 const void *dataout = iio_demux(buffer, data); in iio_push_to_buffer()
1394 ret = buffer->access->store_to(buffer, dataout); in iio_push_to_buffer()
1402 wake_up_interruptible_poll(&buffer->pollq, EPOLLIN | EPOLLRDNORM); in iio_push_to_buffer()
1407 * iio_push_to_buffers() - push to a registered buffer.
1427 * iio_buffer_release() - Free a buffer's resources
1430 * This function is called when the last reference to the buffer has been
1431 * dropped. It will typically free all resources allocated by the buffer. Do not
1433 * buffer.
1437 struct iio_buffer *buffer = container_of(ref, struct iio_buffer, ref); in iio_buffer_release() local
1439 buffer->access->release(buffer); in iio_buffer_release()
1443 * iio_buffer_get() - Grab a reference to the buffer
1444 * @buffer: The buffer to grab a reference for, may be NULL
1446 * Returns the pointer to the buffer that was passed into the function.
1448 struct iio_buffer *iio_buffer_get(struct iio_buffer *buffer) in iio_buffer_get() argument
1450 if (buffer) in iio_buffer_get()
1451 kref_get(&buffer->ref); in iio_buffer_get()
1453 return buffer; in iio_buffer_get()
1458 * iio_buffer_put() - Release the reference to the buffer
1459 * @buffer: The buffer to release the reference for, may be NULL
1461 void iio_buffer_put(struct iio_buffer *buffer) in iio_buffer_put() argument
1463 if (buffer) in iio_buffer_put()
1464 kref_put(&buffer->ref, iio_buffer_release); in iio_buffer_put()
1469 * iio_device_attach_buffer - Attach a buffer to a IIO device
1470 * @indio_dev: The device the buffer should be attached to
1471 * @buffer: The buffer to attach to the device
1473 * This function attaches a buffer to a IIO device. The buffer stays attached to
1478 struct iio_buffer *buffer) in iio_device_attach_buffer() argument
1480 indio_dev->buffer = iio_buffer_get(buffer); in iio_device_attach_buffer()