• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0-only
2 /* The industrial I/O callback buffer
3  */
4 
5 #include <linux/kernel.h>
6 #include <linux/module.h>
7 #include <linux/slab.h>
8 #include <linux/err.h>
9 #include <linux/export.h>
10 #include <linux/iio/iio.h>
11 #include <linux/iio/buffer_impl.h>
12 #include <linux/iio/consumer.h>
13 
14 struct iio_cb_buffer {
15 	struct iio_buffer buffer;
16 	int (*cb)(const void *data, void *private);
17 	void *private;
18 	struct iio_channel *channels;
19 	struct iio_dev *indio_dev;
20 };
21 
buffer_to_cb_buffer(struct iio_buffer * buffer)22 static struct iio_cb_buffer *buffer_to_cb_buffer(struct iio_buffer *buffer)
23 {
24 	return container_of(buffer, struct iio_cb_buffer, buffer);
25 }
26 
iio_buffer_cb_store_to(struct iio_buffer * buffer,const void * data)27 static int iio_buffer_cb_store_to(struct iio_buffer *buffer, const void *data)
28 {
29 	struct iio_cb_buffer *cb_buff = buffer_to_cb_buffer(buffer);
30 	return cb_buff->cb(data, cb_buff->private);
31 }
32 
iio_buffer_cb_release(struct iio_buffer * buffer)33 static void iio_buffer_cb_release(struct iio_buffer *buffer)
34 {
35 	struct iio_cb_buffer *cb_buff = buffer_to_cb_buffer(buffer);
36 
37 	bitmap_free(cb_buff->buffer.scan_mask);
38 	kfree(cb_buff);
39 }
40 
41 static const struct iio_buffer_access_funcs iio_cb_access = {
42 	.store_to = &iio_buffer_cb_store_to,
43 	.release = &iio_buffer_cb_release,
44 
45 	.modes = INDIO_BUFFER_SOFTWARE | INDIO_BUFFER_TRIGGERED,
46 };
47 
iio_channel_get_all_cb(struct device * dev,int (* cb)(const void * data,void * private),void * private)48 struct iio_cb_buffer *iio_channel_get_all_cb(struct device *dev,
49 					     int (*cb)(const void *data,
50 						       void *private),
51 					     void *private)
52 {
53 	int ret;
54 	struct iio_cb_buffer *cb_buff;
55 	struct iio_channel *chan;
56 
57 	if (!cb) {
58 		dev_err(dev, "Invalid arguments: A callback must be provided!\n");
59 		return ERR_PTR(-EINVAL);
60 	}
61 
62 	cb_buff = kzalloc(sizeof(*cb_buff), GFP_KERNEL);
63 	if (cb_buff == NULL)
64 		return ERR_PTR(-ENOMEM);
65 
66 	iio_buffer_init(&cb_buff->buffer);
67 
68 	cb_buff->private = private;
69 	cb_buff->cb = cb;
70 	cb_buff->buffer.access = &iio_cb_access;
71 	INIT_LIST_HEAD(&cb_buff->buffer.demux_list);
72 
73 	cb_buff->channels = iio_channel_get_all(dev);
74 	if (IS_ERR(cb_buff->channels)) {
75 		ret = PTR_ERR(cb_buff->channels);
76 		goto error_free_cb_buff;
77 	}
78 
79 	cb_buff->indio_dev = cb_buff->channels[0].indio_dev;
80 	cb_buff->buffer.scan_mask = bitmap_zalloc(cb_buff->indio_dev->masklength,
81 						  GFP_KERNEL);
82 	if (cb_buff->buffer.scan_mask == NULL) {
83 		ret = -ENOMEM;
84 		goto error_release_channels;
85 	}
86 	chan = &cb_buff->channels[0];
87 	while (chan->indio_dev) {
88 		if (chan->indio_dev != cb_buff->indio_dev) {
89 			ret = -EINVAL;
90 			goto error_free_scan_mask;
91 		}
92 		set_bit(chan->channel->scan_index,
93 			cb_buff->buffer.scan_mask);
94 		chan++;
95 	}
96 
97 	return cb_buff;
98 
99 error_free_scan_mask:
100 	bitmap_free(cb_buff->buffer.scan_mask);
101 error_release_channels:
102 	iio_channel_release_all(cb_buff->channels);
103 error_free_cb_buff:
104 	kfree(cb_buff);
105 	return ERR_PTR(ret);
106 }
107 EXPORT_SYMBOL_GPL(iio_channel_get_all_cb);
108 
iio_channel_cb_set_buffer_watermark(struct iio_cb_buffer * cb_buff,size_t watermark)109 int iio_channel_cb_set_buffer_watermark(struct iio_cb_buffer *cb_buff,
110 					size_t watermark)
111 {
112 	if (!watermark)
113 		return -EINVAL;
114 	cb_buff->buffer.watermark = watermark;
115 
116 	return 0;
117 }
118 EXPORT_SYMBOL_GPL(iio_channel_cb_set_buffer_watermark);
119 
iio_channel_start_all_cb(struct iio_cb_buffer * cb_buff)120 int iio_channel_start_all_cb(struct iio_cb_buffer *cb_buff)
121 {
122 	return iio_update_buffers(cb_buff->indio_dev, &cb_buff->buffer,
123 				  NULL);
124 }
125 EXPORT_SYMBOL_GPL(iio_channel_start_all_cb);
126 
iio_channel_stop_all_cb(struct iio_cb_buffer * cb_buff)127 void iio_channel_stop_all_cb(struct iio_cb_buffer *cb_buff)
128 {
129 	iio_update_buffers(cb_buff->indio_dev, NULL, &cb_buff->buffer);
130 }
131 EXPORT_SYMBOL_GPL(iio_channel_stop_all_cb);
132 
iio_channel_release_all_cb(struct iio_cb_buffer * cb_buff)133 void iio_channel_release_all_cb(struct iio_cb_buffer *cb_buff)
134 {
135 	iio_channel_release_all(cb_buff->channels);
136 	iio_buffer_put(&cb_buff->buffer);
137 }
138 EXPORT_SYMBOL_GPL(iio_channel_release_all_cb);
139 
140 struct iio_channel
iio_channel_cb_get_channels(const struct iio_cb_buffer * cb_buffer)141 *iio_channel_cb_get_channels(const struct iio_cb_buffer *cb_buffer)
142 {
143 	return cb_buffer->channels;
144 }
145 EXPORT_SYMBOL_GPL(iio_channel_cb_get_channels);
146 
147 struct iio_dev
iio_channel_cb_get_iio_dev(const struct iio_cb_buffer * cb_buffer)148 *iio_channel_cb_get_iio_dev(const struct iio_cb_buffer *cb_buffer)
149 {
150 	return cb_buffer->indio_dev;
151 }
152 EXPORT_SYMBOL_GPL(iio_channel_cb_get_iio_dev);
153 
154 MODULE_AUTHOR("Jonathan Cameron <jic23@kernel.org>");
155 MODULE_DESCRIPTION("Industrial I/O callback buffer");
156 MODULE_LICENSE("GPL");
157