0001
0002
0003
0004
0005
0006
0007 #ifndef __INDUSTRIALIO_DMA_BUFFER_H__
0008 #define __INDUSTRIALIO_DMA_BUFFER_H__
0009
0010 #include <linux/list.h>
0011 #include <linux/kref.h>
0012 #include <linux/spinlock.h>
0013 #include <linux/mutex.h>
0014 #include <linux/iio/buffer_impl.h>
0015
0016 struct iio_dma_buffer_queue;
0017 struct iio_dma_buffer_ops;
0018 struct device;
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028 enum iio_block_state {
0029 IIO_BLOCK_STATE_DEQUEUED,
0030 IIO_BLOCK_STATE_QUEUED,
0031 IIO_BLOCK_STATE_ACTIVE,
0032 IIO_BLOCK_STATE_DONE,
0033 IIO_BLOCK_STATE_DEAD,
0034 };
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047 struct iio_dma_buffer_block {
0048
0049 struct list_head head;
0050 size_t bytes_used;
0051
0052
0053
0054
0055
0056 void *vaddr;
0057 dma_addr_t phys_addr;
0058 size_t size;
0059 struct iio_dma_buffer_queue *queue;
0060
0061
0062 struct kref kref;
0063
0064
0065
0066
0067 enum iio_block_state state;
0068 };
0069
0070
0071
0072
0073
0074
0075
0076
0077 struct iio_dma_buffer_queue_fileio {
0078 struct iio_dma_buffer_block *blocks[2];
0079 struct iio_dma_buffer_block *active_block;
0080 size_t pos;
0081 size_t block_size;
0082 };
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100 struct iio_dma_buffer_queue {
0101 struct iio_buffer buffer;
0102 struct device *dev;
0103 const struct iio_dma_buffer_ops *ops;
0104
0105 struct mutex lock;
0106 spinlock_t list_lock;
0107 struct list_head incoming;
0108 struct list_head outgoing;
0109
0110 bool active;
0111
0112 struct iio_dma_buffer_queue_fileio fileio;
0113 };
0114
0115
0116
0117
0118
0119
0120 struct iio_dma_buffer_ops {
0121 int (*submit)(struct iio_dma_buffer_queue *queue,
0122 struct iio_dma_buffer_block *block);
0123 void (*abort)(struct iio_dma_buffer_queue *queue);
0124 };
0125
0126 void iio_dma_buffer_block_done(struct iio_dma_buffer_block *block);
0127 void iio_dma_buffer_block_list_abort(struct iio_dma_buffer_queue *queue,
0128 struct list_head *list);
0129
0130 int iio_dma_buffer_enable(struct iio_buffer *buffer,
0131 struct iio_dev *indio_dev);
0132 int iio_dma_buffer_disable(struct iio_buffer *buffer,
0133 struct iio_dev *indio_dev);
0134 int iio_dma_buffer_read(struct iio_buffer *buffer, size_t n,
0135 char __user *user_buffer);
0136 size_t iio_dma_buffer_data_available(struct iio_buffer *buffer);
0137 int iio_dma_buffer_set_bytes_per_datum(struct iio_buffer *buffer, size_t bpd);
0138 int iio_dma_buffer_set_length(struct iio_buffer *buffer, unsigned int length);
0139 int iio_dma_buffer_request_update(struct iio_buffer *buffer);
0140
0141 int iio_dma_buffer_init(struct iio_dma_buffer_queue *queue,
0142 struct device *dma_dev, const struct iio_dma_buffer_ops *ops);
0143 void iio_dma_buffer_exit(struct iio_dma_buffer_queue *queue);
0144 void iio_dma_buffer_release(struct iio_dma_buffer_queue *queue);
0145
0146 #endif