0001
0002
0003
0004
0005 #include <linux/dma-mapping.h>
0006 #include <linux/mei.h>
0007
0008 #include "mei_dev.h"
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021 static int mei_dmam_dscr_alloc(struct mei_device *dev,
0022 struct mei_dma_dscr *dscr)
0023 {
0024 if (!dscr->size)
0025 return 0;
0026
0027 if (WARN_ON(!is_power_of_2(dscr->size)))
0028 return -EINVAL;
0029
0030 if (dscr->vaddr)
0031 return 0;
0032
0033 dscr->vaddr = dmam_alloc_coherent(dev->dev, dscr->size, &dscr->daddr,
0034 GFP_KERNEL);
0035 if (!dscr->vaddr)
0036 return -ENOMEM;
0037
0038 return 0;
0039 }
0040
0041
0042
0043
0044
0045
0046
0047 static void mei_dmam_dscr_free(struct mei_device *dev,
0048 struct mei_dma_dscr *dscr)
0049 {
0050 if (!dscr->vaddr)
0051 return;
0052
0053 dmam_free_coherent(dev->dev, dscr->size, dscr->vaddr, dscr->daddr);
0054 dscr->vaddr = NULL;
0055 }
0056
0057
0058
0059
0060
0061 void mei_dmam_ring_free(struct mei_device *dev)
0062 {
0063 int i;
0064
0065 for (i = 0; i < DMA_DSCR_NUM; i++)
0066 mei_dmam_dscr_free(dev, &dev->dr_dscr[i]);
0067 }
0068
0069
0070
0071
0072
0073
0074
0075 int mei_dmam_ring_alloc(struct mei_device *dev)
0076 {
0077 int i;
0078
0079 for (i = 0; i < DMA_DSCR_NUM; i++)
0080 if (mei_dmam_dscr_alloc(dev, &dev->dr_dscr[i]))
0081 goto err;
0082
0083 return 0;
0084
0085 err:
0086 mei_dmam_ring_free(dev);
0087 return -ENOMEM;
0088 }
0089
0090
0091
0092
0093
0094
0095
0096 bool mei_dma_ring_is_allocated(struct mei_device *dev)
0097 {
0098 return !!dev->dr_dscr[DMA_DSCR_HOST].vaddr;
0099 }
0100
0101 static inline
0102 struct hbm_dma_ring_ctrl *mei_dma_ring_ctrl(struct mei_device *dev)
0103 {
0104 return (struct hbm_dma_ring_ctrl *)dev->dr_dscr[DMA_DSCR_CTRL].vaddr;
0105 }
0106
0107
0108
0109
0110
0111 void mei_dma_ring_reset(struct mei_device *dev)
0112 {
0113 struct hbm_dma_ring_ctrl *ctrl = mei_dma_ring_ctrl(dev);
0114
0115 if (!ctrl)
0116 return;
0117
0118 memset(ctrl, 0, sizeof(*ctrl));
0119 }
0120
0121
0122
0123
0124
0125
0126
0127
0128 static size_t mei_dma_copy_from(struct mei_device *dev, unsigned char *buf,
0129 u32 offset, u32 n)
0130 {
0131 unsigned char *dbuf = dev->dr_dscr[DMA_DSCR_DEVICE].vaddr;
0132
0133 size_t b_offset = offset << 2;
0134 size_t b_n = n << 2;
0135
0136 memcpy(buf, dbuf + b_offset, b_n);
0137
0138 return b_n;
0139 }
0140
0141
0142
0143
0144
0145
0146
0147
0148 static size_t mei_dma_copy_to(struct mei_device *dev, unsigned char *buf,
0149 u32 offset, u32 n)
0150 {
0151 unsigned char *hbuf = dev->dr_dscr[DMA_DSCR_HOST].vaddr;
0152
0153 size_t b_offset = offset << 2;
0154 size_t b_n = n << 2;
0155
0156 memcpy(hbuf + b_offset, buf, b_n);
0157
0158 return b_n;
0159 }
0160
0161
0162
0163
0164
0165
0166
0167 void mei_dma_ring_read(struct mei_device *dev, unsigned char *buf, u32 len)
0168 {
0169 struct hbm_dma_ring_ctrl *ctrl = mei_dma_ring_ctrl(dev);
0170 u32 dbuf_depth;
0171 u32 rd_idx, rem, slots;
0172
0173 if (WARN_ON(!ctrl))
0174 return;
0175
0176 dev_dbg(dev->dev, "reading from dma %u bytes\n", len);
0177
0178 if (!len)
0179 return;
0180
0181 dbuf_depth = dev->dr_dscr[DMA_DSCR_DEVICE].size >> 2;
0182 rd_idx = READ_ONCE(ctrl->dbuf_rd_idx) & (dbuf_depth - 1);
0183 slots = mei_data2slots(len);
0184
0185
0186 if (!buf)
0187 goto out;
0188
0189 if (rd_idx + slots > dbuf_depth) {
0190 buf += mei_dma_copy_from(dev, buf, rd_idx, dbuf_depth - rd_idx);
0191 rem = slots - (dbuf_depth - rd_idx);
0192 rd_idx = 0;
0193 } else {
0194 rem = slots;
0195 }
0196
0197 mei_dma_copy_from(dev, buf, rd_idx, rem);
0198 out:
0199 WRITE_ONCE(ctrl->dbuf_rd_idx, ctrl->dbuf_rd_idx + slots);
0200 }
0201
0202 static inline u32 mei_dma_ring_hbuf_depth(struct mei_device *dev)
0203 {
0204 return dev->dr_dscr[DMA_DSCR_HOST].size >> 2;
0205 }
0206
0207
0208
0209
0210
0211
0212
0213 u32 mei_dma_ring_empty_slots(struct mei_device *dev)
0214 {
0215 struct hbm_dma_ring_ctrl *ctrl = mei_dma_ring_ctrl(dev);
0216 u32 wr_idx, rd_idx, hbuf_depth, empty;
0217
0218 if (!mei_dma_ring_is_allocated(dev))
0219 return 0;
0220
0221 if (WARN_ON(!ctrl))
0222 return 0;
0223
0224
0225 hbuf_depth = mei_dma_ring_hbuf_depth(dev);
0226 rd_idx = READ_ONCE(ctrl->hbuf_rd_idx);
0227 wr_idx = READ_ONCE(ctrl->hbuf_wr_idx);
0228
0229 if (rd_idx > wr_idx)
0230 empty = rd_idx - wr_idx;
0231 else
0232 empty = hbuf_depth - (wr_idx - rd_idx);
0233
0234 return empty;
0235 }
0236
0237
0238
0239
0240
0241
0242
0243
0244 void mei_dma_ring_write(struct mei_device *dev, unsigned char *buf, u32 len)
0245 {
0246 struct hbm_dma_ring_ctrl *ctrl = mei_dma_ring_ctrl(dev);
0247 u32 hbuf_depth;
0248 u32 wr_idx, rem, slots;
0249
0250 if (WARN_ON(!ctrl))
0251 return;
0252
0253 dev_dbg(dev->dev, "writing to dma %u bytes\n", len);
0254 hbuf_depth = mei_dma_ring_hbuf_depth(dev);
0255 wr_idx = READ_ONCE(ctrl->hbuf_wr_idx) & (hbuf_depth - 1);
0256 slots = mei_data2slots(len);
0257
0258 if (wr_idx + slots > hbuf_depth) {
0259 buf += mei_dma_copy_to(dev, buf, wr_idx, hbuf_depth - wr_idx);
0260 rem = slots - (hbuf_depth - wr_idx);
0261 wr_idx = 0;
0262 } else {
0263 rem = slots;
0264 }
0265
0266 mei_dma_copy_to(dev, buf, wr_idx, rem);
0267
0268 WRITE_ONCE(ctrl->hbuf_wr_idx, ctrl->hbuf_wr_idx + slots);
0269 }