0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048 #include <linux/platform_device.h>
0049
0050 #include <drm/drm_drv.h>
0051
0052 #include "vc4_drv.h"
0053 #include "vc4_regs.h"
0054 #include "vc4_trace.h"
0055
0056 #define V3D_DRIVER_IRQS (V3D_INT_OUTOMEM | \
0057 V3D_INT_FLDONE | \
0058 V3D_INT_FRDONE)
0059
0060 DECLARE_WAIT_QUEUE_HEAD(render_wait);
0061
0062 static void
0063 vc4_overflow_mem_work(struct work_struct *work)
0064 {
0065 struct vc4_dev *vc4 =
0066 container_of(work, struct vc4_dev, overflow_mem_work);
0067 struct vc4_bo *bo;
0068 int bin_bo_slot;
0069 struct vc4_exec_info *exec;
0070 unsigned long irqflags;
0071
0072 mutex_lock(&vc4->bin_bo_lock);
0073
0074 if (!vc4->bin_bo)
0075 goto complete;
0076
0077 bo = vc4->bin_bo;
0078
0079 bin_bo_slot = vc4_v3d_get_bin_slot(vc4);
0080 if (bin_bo_slot < 0) {
0081 DRM_ERROR("Couldn't allocate binner overflow mem\n");
0082 goto complete;
0083 }
0084
0085 spin_lock_irqsave(&vc4->job_lock, irqflags);
0086
0087 if (vc4->bin_alloc_overflow) {
0088
0089
0090
0091
0092
0093
0094 exec = vc4_first_bin_job(vc4);
0095 if (!exec)
0096 exec = vc4_last_render_job(vc4);
0097 if (exec) {
0098 exec->bin_slots |= vc4->bin_alloc_overflow;
0099 } else {
0100
0101
0102
0103 vc4->bin_alloc_used &= ~vc4->bin_alloc_overflow;
0104 }
0105 }
0106 vc4->bin_alloc_overflow = BIT(bin_bo_slot);
0107
0108 V3D_WRITE(V3D_BPOA, bo->base.paddr + bin_bo_slot * vc4->bin_alloc_size);
0109 V3D_WRITE(V3D_BPOS, bo->base.base.size);
0110 V3D_WRITE(V3D_INTCTL, V3D_INT_OUTOMEM);
0111 V3D_WRITE(V3D_INTENA, V3D_INT_OUTOMEM);
0112 spin_unlock_irqrestore(&vc4->job_lock, irqflags);
0113
0114 complete:
0115 mutex_unlock(&vc4->bin_bo_lock);
0116 }
0117
0118 static void
0119 vc4_irq_finish_bin_job(struct drm_device *dev)
0120 {
0121 struct vc4_dev *vc4 = to_vc4_dev(dev);
0122 struct vc4_exec_info *next, *exec = vc4_first_bin_job(vc4);
0123
0124 if (!exec)
0125 return;
0126
0127 trace_vc4_bcl_end_irq(dev, exec->seqno);
0128
0129 vc4_move_job_to_render(dev, exec);
0130 next = vc4_first_bin_job(vc4);
0131
0132
0133
0134
0135
0136 if (next && next->perfmon == exec->perfmon)
0137 vc4_submit_next_bin_job(dev);
0138 }
0139
0140 static void
0141 vc4_cancel_bin_job(struct drm_device *dev)
0142 {
0143 struct vc4_dev *vc4 = to_vc4_dev(dev);
0144 struct vc4_exec_info *exec = vc4_first_bin_job(vc4);
0145
0146 if (!exec)
0147 return;
0148
0149
0150 if (exec->perfmon)
0151 vc4_perfmon_stop(vc4, exec->perfmon, false);
0152
0153 list_move_tail(&exec->head, &vc4->bin_job_list);
0154 vc4_submit_next_bin_job(dev);
0155 }
0156
0157 static void
0158 vc4_irq_finish_render_job(struct drm_device *dev)
0159 {
0160 struct vc4_dev *vc4 = to_vc4_dev(dev);
0161 struct vc4_exec_info *exec = vc4_first_render_job(vc4);
0162 struct vc4_exec_info *nextbin, *nextrender;
0163
0164 if (!exec)
0165 return;
0166
0167 trace_vc4_rcl_end_irq(dev, exec->seqno);
0168
0169 vc4->finished_seqno++;
0170 list_move_tail(&exec->head, &vc4->job_done_list);
0171
0172 nextbin = vc4_first_bin_job(vc4);
0173 nextrender = vc4_first_render_job(vc4);
0174
0175
0176
0177
0178 if (exec->perfmon && !nextrender &&
0179 (!nextbin || nextbin->perfmon != exec->perfmon))
0180 vc4_perfmon_stop(vc4, exec->perfmon, true);
0181
0182
0183
0184
0185
0186
0187
0188
0189 if (nextrender)
0190 vc4_submit_next_render_job(dev);
0191 else if (nextbin && nextbin->perfmon != exec->perfmon)
0192 vc4_submit_next_bin_job(dev);
0193
0194 if (exec->fence) {
0195 dma_fence_signal_locked(exec->fence);
0196 dma_fence_put(exec->fence);
0197 exec->fence = NULL;
0198 }
0199
0200 wake_up_all(&vc4->job_wait_queue);
0201 schedule_work(&vc4->job_done_work);
0202 }
0203
0204 static irqreturn_t
0205 vc4_irq(int irq, void *arg)
0206 {
0207 struct drm_device *dev = arg;
0208 struct vc4_dev *vc4 = to_vc4_dev(dev);
0209 uint32_t intctl;
0210 irqreturn_t status = IRQ_NONE;
0211
0212 barrier();
0213 intctl = V3D_READ(V3D_INTCTL);
0214
0215
0216
0217
0218
0219
0220 V3D_WRITE(V3D_INTCTL, intctl);
0221
0222 if (intctl & V3D_INT_OUTOMEM) {
0223
0224 V3D_WRITE(V3D_INTDIS, V3D_INT_OUTOMEM);
0225 schedule_work(&vc4->overflow_mem_work);
0226 status = IRQ_HANDLED;
0227 }
0228
0229 if (intctl & V3D_INT_FLDONE) {
0230 spin_lock(&vc4->job_lock);
0231 vc4_irq_finish_bin_job(dev);
0232 spin_unlock(&vc4->job_lock);
0233 status = IRQ_HANDLED;
0234 }
0235
0236 if (intctl & V3D_INT_FRDONE) {
0237 spin_lock(&vc4->job_lock);
0238 vc4_irq_finish_render_job(dev);
0239 spin_unlock(&vc4->job_lock);
0240 status = IRQ_HANDLED;
0241 }
0242
0243 return status;
0244 }
0245
0246 static void
0247 vc4_irq_prepare(struct drm_device *dev)
0248 {
0249 struct vc4_dev *vc4 = to_vc4_dev(dev);
0250
0251 if (!vc4->v3d)
0252 return;
0253
0254 init_waitqueue_head(&vc4->job_wait_queue);
0255 INIT_WORK(&vc4->overflow_mem_work, vc4_overflow_mem_work);
0256
0257
0258
0259
0260 V3D_WRITE(V3D_INTCTL, V3D_DRIVER_IRQS);
0261 }
0262
0263 void
0264 vc4_irq_enable(struct drm_device *dev)
0265 {
0266 struct vc4_dev *vc4 = to_vc4_dev(dev);
0267
0268 if (WARN_ON_ONCE(vc4->is_vc5))
0269 return;
0270
0271 if (!vc4->v3d)
0272 return;
0273
0274
0275
0276
0277 V3D_WRITE(V3D_INTENA, V3D_INT_FLDONE | V3D_INT_FRDONE);
0278 }
0279
0280 void
0281 vc4_irq_disable(struct drm_device *dev)
0282 {
0283 struct vc4_dev *vc4 = to_vc4_dev(dev);
0284
0285 if (WARN_ON_ONCE(vc4->is_vc5))
0286 return;
0287
0288 if (!vc4->v3d)
0289 return;
0290
0291
0292 V3D_WRITE(V3D_INTDIS, V3D_DRIVER_IRQS);
0293
0294
0295 V3D_WRITE(V3D_INTCTL, V3D_DRIVER_IRQS);
0296
0297
0298 disable_irq(vc4->irq);
0299
0300 cancel_work_sync(&vc4->overflow_mem_work);
0301 }
0302
0303 int vc4_irq_install(struct drm_device *dev, int irq)
0304 {
0305 struct vc4_dev *vc4 = to_vc4_dev(dev);
0306 int ret;
0307
0308 if (WARN_ON_ONCE(vc4->is_vc5))
0309 return -ENODEV;
0310
0311 if (irq == IRQ_NOTCONNECTED)
0312 return -ENOTCONN;
0313
0314 vc4_irq_prepare(dev);
0315
0316 ret = request_irq(irq, vc4_irq, 0, dev->driver->name, dev);
0317 if (ret)
0318 return ret;
0319
0320 vc4_irq_enable(dev);
0321
0322 return 0;
0323 }
0324
0325 void vc4_irq_uninstall(struct drm_device *dev)
0326 {
0327 struct vc4_dev *vc4 = to_vc4_dev(dev);
0328
0329 if (WARN_ON_ONCE(vc4->is_vc5))
0330 return;
0331
0332 vc4_irq_disable(dev);
0333 free_irq(vc4->irq, dev);
0334 }
0335
0336
0337 void vc4_irq_reset(struct drm_device *dev)
0338 {
0339 struct vc4_dev *vc4 = to_vc4_dev(dev);
0340 unsigned long irqflags;
0341
0342 if (WARN_ON_ONCE(vc4->is_vc5))
0343 return;
0344
0345
0346 V3D_WRITE(V3D_INTCTL, V3D_DRIVER_IRQS);
0347
0348
0349
0350
0351
0352
0353
0354 V3D_WRITE(V3D_INTENA, V3D_DRIVER_IRQS);
0355
0356 spin_lock_irqsave(&vc4->job_lock, irqflags);
0357 vc4_cancel_bin_job(dev);
0358 vc4_irq_finish_render_job(dev);
0359 spin_unlock_irqrestore(&vc4->job_lock, irqflags);
0360 }