Lines Matching refs:buffer
110 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_read_first_n_outer()
176 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_poll()
196 if (!indio_dev->buffer) in iio_buffer_wakeup_poll()
199 wake_up(&indio_dev->buffer->pollq); in iio_buffer_wakeup_poll()
202 void iio_buffer_init(struct iio_buffer *buffer) in iio_buffer_init() argument
204 INIT_LIST_HEAD(&buffer->demux_list); in iio_buffer_init()
205 INIT_LIST_HEAD(&buffer->buffer_list); in iio_buffer_init()
206 init_waitqueue_head(&buffer->pollq); in iio_buffer_init()
207 kref_init(&buffer->ref); in iio_buffer_init()
208 if (!buffer->watermark) in iio_buffer_init()
209 buffer->watermark = 1; in iio_buffer_init()
218 void iio_buffer_set_attrs(struct iio_buffer *buffer, in iio_buffer_set_attrs() argument
221 buffer->attrs = attrs; in iio_buffer_set_attrs()
272 indio_dev->buffer->scan_mask); in iio_scan_el_show()
318 struct iio_buffer *buffer, int bit) in iio_scan_mask_set() argument
331 bitmap_copy(trialmask, buffer->scan_mask, indio_dev->masklength); in iio_scan_mask_set()
344 bitmap_copy(buffer->scan_mask, trialmask, indio_dev->masklength); in iio_scan_mask_set()
355 static int iio_scan_mask_clear(struct iio_buffer *buffer, int bit) in iio_scan_mask_clear() argument
357 clear_bit(bit, buffer->scan_mask); in iio_scan_mask_clear()
362 struct iio_buffer *buffer, int bit) in iio_scan_mask_query() argument
367 if (!buffer->scan_mask) in iio_scan_mask_query()
371 return !!test_bit(bit, buffer->scan_mask); in iio_scan_mask_query()
382 struct iio_buffer *buffer = indio_dev->buffer; in iio_scan_el_store() local
389 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_scan_el_store()
393 ret = iio_scan_mask_query(indio_dev, buffer, this_attr->address); in iio_scan_el_store()
397 ret = iio_scan_mask_clear(buffer, this_attr->address); in iio_scan_el_store()
401 ret = iio_scan_mask_set(indio_dev, buffer, this_attr->address); in iio_scan_el_store()
418 return sprintf(buf, "%d\n", indio_dev->buffer->scan_timestamp); in iio_scan_el_ts_show()
435 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_scan_el_ts_store()
439 indio_dev->buffer->scan_timestamp = state; in iio_scan_el_ts_store()
450 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_add_channel_sysfs() local
459 &buffer->scan_el_dev_attr_list); in iio_buffer_add_channel_sysfs()
470 &buffer->scan_el_dev_attr_list); in iio_buffer_add_channel_sysfs()
482 &buffer->scan_el_dev_attr_list); in iio_buffer_add_channel_sysfs()
491 &buffer->scan_el_dev_attr_list); in iio_buffer_add_channel_sysfs()
504 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_read_length() local
506 return sprintf(buf, "%d\n", buffer->length); in iio_buffer_read_length()
514 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_write_length() local
522 if (val == buffer->length) in iio_buffer_write_length()
526 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_buffer_write_length()
529 buffer->access->set_length(buffer, val); in iio_buffer_write_length()
534 if (buffer->length && buffer->length < buffer->watermark) in iio_buffer_write_length()
535 buffer->watermark = buffer->length; in iio_buffer_write_length()
547 return sprintf(buf, "%d\n", iio_buffer_is_active(indio_dev->buffer)); in iio_buffer_show_enable()
596 struct iio_buffer *buffer) in iio_buffer_activate() argument
598 iio_buffer_get(buffer); in iio_buffer_activate()
599 list_add(&buffer->buffer_list, &indio_dev->buffer_list); in iio_buffer_activate()
602 static void iio_buffer_deactivate(struct iio_buffer *buffer) in iio_buffer_deactivate() argument
604 list_del_init(&buffer->buffer_list); in iio_buffer_deactivate()
605 wake_up_interruptible(&buffer->pollq); in iio_buffer_deactivate()
606 iio_buffer_put(buffer); in iio_buffer_deactivate()
611 struct iio_buffer *buffer, *_buffer; in iio_buffer_deactivate_all() local
613 list_for_each_entry_safe(buffer, _buffer, in iio_buffer_deactivate_all()
615 iio_buffer_deactivate(buffer); in iio_buffer_deactivate_all()
618 static int iio_buffer_enable(struct iio_buffer *buffer, in iio_buffer_enable() argument
621 if (!buffer->access->enable) in iio_buffer_enable()
623 return buffer->access->enable(buffer, indio_dev); in iio_buffer_enable()
626 static int iio_buffer_disable(struct iio_buffer *buffer, in iio_buffer_disable() argument
629 if (!buffer->access->disable) in iio_buffer_disable()
631 return buffer->access->disable(buffer, indio_dev); in iio_buffer_disable()
635 struct iio_buffer *buffer) in iio_buffer_update_bytes_per_datum() argument
639 if (!buffer->access->set_bytes_per_datum) in iio_buffer_update_bytes_per_datum()
642 bytes = iio_compute_scan_bytes(indio_dev, buffer->scan_mask, in iio_buffer_update_bytes_per_datum()
643 buffer->scan_timestamp); in iio_buffer_update_bytes_per_datum()
645 buffer->access->set_bytes_per_datum(buffer, bytes); in iio_buffer_update_bytes_per_datum()
649 struct iio_buffer *buffer) in iio_buffer_request_update() argument
653 iio_buffer_update_bytes_per_datum(indio_dev, buffer); in iio_buffer_request_update()
654 if (buffer->access->request_update) { in iio_buffer_request_update()
655 ret = buffer->access->request_update(buffer); in iio_buffer_request_update()
690 struct iio_buffer *buffer; in iio_verify_update() local
707 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_verify_update()
708 if (buffer == remove_buffer) in iio_verify_update()
710 modes &= buffer->access->modes; in iio_verify_update()
711 config->watermark = min(config->watermark, buffer->watermark); in iio_verify_update()
749 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_verify_update()
750 if (buffer == remove_buffer) in iio_verify_update()
752 bitmap_or(compound_mask, compound_mask, buffer->scan_mask, in iio_verify_update()
754 scan_timestamp |= buffer->scan_timestamp; in iio_verify_update()
797 static void iio_buffer_demux_free(struct iio_buffer *buffer) in iio_buffer_demux_free() argument
800 list_for_each_entry_safe(p, q, &buffer->demux_list, l) { in iio_buffer_demux_free()
806 static int iio_buffer_add_demux(struct iio_buffer *buffer, in iio_buffer_add_demux() argument
821 list_add_tail(&(*p)->l, &buffer->demux_list); in iio_buffer_add_demux()
828 struct iio_buffer *buffer) in iio_buffer_update_demux() argument
835 iio_buffer_demux_free(buffer); in iio_buffer_update_demux()
836 kfree(buffer->demux_bounce); in iio_buffer_update_demux()
837 buffer->demux_bounce = NULL; in iio_buffer_update_demux()
841 buffer->scan_mask, in iio_buffer_update_demux()
847 buffer->scan_mask, in iio_buffer_update_demux()
863 ret = iio_buffer_add_demux(buffer, &p, in_loc, out_loc, length); in iio_buffer_update_demux()
870 if (buffer->scan_timestamp) { in iio_buffer_update_demux()
874 ret = iio_buffer_add_demux(buffer, &p, in_loc, out_loc, length); in iio_buffer_update_demux()
880 buffer->demux_bounce = kzalloc(out_loc, GFP_KERNEL); in iio_buffer_update_demux()
881 if (buffer->demux_bounce == NULL) { in iio_buffer_update_demux()
888 iio_buffer_demux_free(buffer); in iio_buffer_update_demux()
895 struct iio_buffer *buffer; in iio_update_demux() local
898 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_update_demux()
899 ret = iio_buffer_update_demux(indio_dev, buffer); in iio_update_demux()
906 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) in iio_update_demux()
907 iio_buffer_demux_free(buffer); in iio_update_demux()
915 struct iio_buffer *buffer; in iio_enable_buffers() local
950 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_enable_buffers()
951 ret = iio_buffer_enable(buffer, indio_dev); in iio_enable_buffers()
970 list_for_each_entry_continue_reverse(buffer, &indio_dev->buffer_list, in iio_enable_buffers()
972 iio_buffer_disable(buffer, indio_dev); in iio_enable_buffers()
985 struct iio_buffer *buffer; in iio_disable_buffers() local
1006 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_disable_buffers()
1007 ret2 = iio_buffer_disable(buffer, indio_dev); in iio_disable_buffers()
1140 inlist = iio_buffer_is_active(indio_dev->buffer); in iio_buffer_store_enable()
1147 indio_dev->buffer, NULL); in iio_buffer_store_enable()
1150 NULL, indio_dev->buffer); in iio_buffer_store_enable()
1164 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_show_watermark() local
1166 return sprintf(buf, "%u\n", buffer->watermark); in iio_buffer_show_watermark()
1175 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_store_watermark() local
1187 if (val > buffer->length) { in iio_buffer_store_watermark()
1192 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_buffer_store_watermark()
1197 buffer->watermark = val; in iio_buffer_store_watermark()
1225 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_alloc_sysfs_and_mask() local
1238 if (!buffer) in iio_buffer_alloc_sysfs_and_mask()
1242 if (buffer->attrs) { in iio_buffer_alloc_sysfs_and_mask()
1243 while (buffer->attrs[attrcount] != NULL) in iio_buffer_alloc_sysfs_and_mask()
1253 if (!buffer->access->set_length) in iio_buffer_alloc_sysfs_and_mask()
1256 if (buffer->access->flags & INDIO_BUFFER_FLAG_FIXED_WATERMARK) in iio_buffer_alloc_sysfs_and_mask()
1259 if (buffer->attrs) in iio_buffer_alloc_sysfs_and_mask()
1260 memcpy(&attr[ARRAY_SIZE(iio_buffer_attrs)], buffer->attrs, in iio_buffer_alloc_sysfs_and_mask()
1265 buffer->buffer_group.name = "buffer"; in iio_buffer_alloc_sysfs_and_mask()
1266 buffer->buffer_group.attrs = attr; in iio_buffer_alloc_sysfs_and_mask()
1268 indio_dev->groups[indio_dev->groupcounter++] = &buffer->buffer_group; in iio_buffer_alloc_sysfs_and_mask()
1270 if (buffer->scan_el_attrs != NULL) { in iio_buffer_alloc_sysfs_and_mask()
1271 attr = buffer->scan_el_attrs->attrs; in iio_buffer_alloc_sysfs_and_mask()
1276 INIT_LIST_HEAD(&buffer->scan_el_dev_attr_list); in iio_buffer_alloc_sysfs_and_mask()
1293 if (indio_dev->masklength && buffer->scan_mask == NULL) { in iio_buffer_alloc_sysfs_and_mask()
1294 buffer->scan_mask = kcalloc(BITS_TO_LONGS(indio_dev->masklength), in iio_buffer_alloc_sysfs_and_mask()
1295 sizeof(*buffer->scan_mask), in iio_buffer_alloc_sysfs_and_mask()
1297 if (buffer->scan_mask == NULL) { in iio_buffer_alloc_sysfs_and_mask()
1304 buffer->scan_el_group.name = iio_scan_elements_group_name; in iio_buffer_alloc_sysfs_and_mask()
1306 buffer->scan_el_group.attrs = kcalloc(attrcount + 1, in iio_buffer_alloc_sysfs_and_mask()
1307 sizeof(buffer->scan_el_group.attrs[0]), in iio_buffer_alloc_sysfs_and_mask()
1309 if (buffer->scan_el_group.attrs == NULL) { in iio_buffer_alloc_sysfs_and_mask()
1313 if (buffer->scan_el_attrs) in iio_buffer_alloc_sysfs_and_mask()
1314 memcpy(buffer->scan_el_group.attrs, buffer->scan_el_attrs, in iio_buffer_alloc_sysfs_and_mask()
1315 sizeof(buffer->scan_el_group.attrs[0])*attrcount_orig); in iio_buffer_alloc_sysfs_and_mask()
1318 list_for_each_entry(p, &buffer->scan_el_dev_attr_list, l) in iio_buffer_alloc_sysfs_and_mask()
1319 buffer->scan_el_group.attrs[attrn++] = &p->dev_attr.attr; in iio_buffer_alloc_sysfs_and_mask()
1320 indio_dev->groups[indio_dev->groupcounter++] = &buffer->scan_el_group; in iio_buffer_alloc_sysfs_and_mask()
1325 kfree(buffer->scan_mask); in iio_buffer_alloc_sysfs_and_mask()
1327 iio_free_chan_devattr_list(&buffer->scan_el_dev_attr_list); in iio_buffer_alloc_sysfs_and_mask()
1328 kfree(indio_dev->buffer->buffer_group.attrs); in iio_buffer_alloc_sysfs_and_mask()
1335 if (!indio_dev->buffer) in iio_buffer_free_sysfs_and_mask()
1338 kfree(indio_dev->buffer->scan_mask); in iio_buffer_free_sysfs_and_mask()
1339 kfree(indio_dev->buffer->buffer_group.attrs); in iio_buffer_free_sysfs_and_mask()
1340 kfree(indio_dev->buffer->scan_el_group.attrs); in iio_buffer_free_sysfs_and_mask()
1341 iio_free_chan_devattr_list(&indio_dev->buffer->scan_el_dev_attr_list); in iio_buffer_free_sysfs_and_mask()
1360 static const void *iio_demux(struct iio_buffer *buffer, in iio_demux() argument
1365 if (list_empty(&buffer->demux_list)) in iio_demux()
1367 list_for_each_entry(t, &buffer->demux_list, l) in iio_demux()
1368 memcpy(buffer->demux_bounce + t->to, in iio_demux()
1371 return buffer->demux_bounce; in iio_demux()
1374 static int iio_push_to_buffer(struct iio_buffer *buffer, const void *data) in iio_push_to_buffer() argument
1376 const void *dataout = iio_demux(buffer, data); in iio_push_to_buffer()
1379 ret = buffer->access->store_to(buffer, dataout); in iio_push_to_buffer()
1387 wake_up_interruptible_poll(&buffer->pollq, POLLIN | POLLRDNORM); in iio_push_to_buffer()
1422 struct iio_buffer *buffer = container_of(ref, struct iio_buffer, ref); in iio_buffer_release() local
1424 buffer->access->release(buffer); in iio_buffer_release()
1433 struct iio_buffer *iio_buffer_get(struct iio_buffer *buffer) in iio_buffer_get() argument
1435 if (buffer) in iio_buffer_get()
1436 kref_get(&buffer->ref); in iio_buffer_get()
1438 return buffer; in iio_buffer_get()
1446 void iio_buffer_put(struct iio_buffer *buffer) in iio_buffer_put() argument
1448 if (buffer) in iio_buffer_put()
1449 kref_put(&buffer->ref, iio_buffer_release); in iio_buffer_put()
1463 struct iio_buffer *buffer) in iio_device_attach_buffer() argument
1465 indio_dev->buffer = iio_buffer_get(buffer); in iio_device_attach_buffer()