0001
0002
0003
0004
0005
0006 #include <drm/drm_drv.h>
0007
0008 #include "etnaviv_cmdbuf.h"
0009 #include "etnaviv_gpu.h"
0010 #include "etnaviv_gem.h"
0011 #include "etnaviv_mmu.h"
0012
0013 #include "common.xml.h"
0014 #include "state.xml.h"
0015 #include "state_blt.xml.h"
0016 #include "state_hi.xml.h"
0017 #include "state_3d.xml.h"
0018 #include "cmdstream.xml.h"
0019
0020
0021
0022
0023
0024
0025 static inline void OUT(struct etnaviv_cmdbuf *buffer, u32 data)
0026 {
0027 u32 *vaddr = (u32 *)buffer->vaddr;
0028
0029 BUG_ON(buffer->user_size >= buffer->size);
0030
0031 vaddr[buffer->user_size / 4] = data;
0032 buffer->user_size += 4;
0033 }
0034
0035 static inline void CMD_LOAD_STATE(struct etnaviv_cmdbuf *buffer,
0036 u32 reg, u32 value)
0037 {
0038 u32 index = reg >> VIV_FE_LOAD_STATE_HEADER_OFFSET__SHR;
0039
0040 buffer->user_size = ALIGN(buffer->user_size, 8);
0041
0042
0043 OUT(buffer, VIV_FE_LOAD_STATE_HEADER_OP_LOAD_STATE |
0044 VIV_FE_LOAD_STATE_HEADER_COUNT(1) |
0045 VIV_FE_LOAD_STATE_HEADER_OFFSET(index));
0046 OUT(buffer, value);
0047 }
0048
0049 static inline void CMD_END(struct etnaviv_cmdbuf *buffer)
0050 {
0051 buffer->user_size = ALIGN(buffer->user_size, 8);
0052
0053 OUT(buffer, VIV_FE_END_HEADER_OP_END);
0054 }
0055
0056 static inline void CMD_WAIT(struct etnaviv_cmdbuf *buffer)
0057 {
0058 buffer->user_size = ALIGN(buffer->user_size, 8);
0059
0060 OUT(buffer, VIV_FE_WAIT_HEADER_OP_WAIT | 200);
0061 }
0062
0063 static inline void CMD_LINK(struct etnaviv_cmdbuf *buffer,
0064 u16 prefetch, u32 address)
0065 {
0066 buffer->user_size = ALIGN(buffer->user_size, 8);
0067
0068 OUT(buffer, VIV_FE_LINK_HEADER_OP_LINK |
0069 VIV_FE_LINK_HEADER_PREFETCH(prefetch));
0070 OUT(buffer, address);
0071 }
0072
0073 static inline void CMD_STALL(struct etnaviv_cmdbuf *buffer,
0074 u32 from, u32 to)
0075 {
0076 buffer->user_size = ALIGN(buffer->user_size, 8);
0077
0078 OUT(buffer, VIV_FE_STALL_HEADER_OP_STALL);
0079 OUT(buffer, VIV_FE_STALL_TOKEN_FROM(from) | VIV_FE_STALL_TOKEN_TO(to));
0080 }
0081
0082 static inline void CMD_SEM(struct etnaviv_cmdbuf *buffer, u32 from, u32 to)
0083 {
0084 CMD_LOAD_STATE(buffer, VIVS_GL_SEMAPHORE_TOKEN,
0085 VIVS_GL_SEMAPHORE_TOKEN_FROM(from) |
0086 VIVS_GL_SEMAPHORE_TOKEN_TO(to));
0087 }
0088
0089 static void etnaviv_cmd_select_pipe(struct etnaviv_gpu *gpu,
0090 struct etnaviv_cmdbuf *buffer, u8 pipe)
0091 {
0092 u32 flush = 0;
0093
0094 lockdep_assert_held(&gpu->lock);
0095
0096
0097
0098
0099
0100
0101
0102 if (gpu->exec_state == ETNA_PIPE_2D)
0103 flush = VIVS_GL_FLUSH_CACHE_PE2D;
0104 else if (gpu->exec_state == ETNA_PIPE_3D)
0105 flush = VIVS_GL_FLUSH_CACHE_DEPTH | VIVS_GL_FLUSH_CACHE_COLOR;
0106
0107 CMD_LOAD_STATE(buffer, VIVS_GL_FLUSH_CACHE, flush);
0108 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0109 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0110
0111 CMD_LOAD_STATE(buffer, VIVS_GL_PIPE_SELECT,
0112 VIVS_GL_PIPE_SELECT_PIPE(pipe));
0113 }
0114
0115 static void etnaviv_buffer_dump(struct etnaviv_gpu *gpu,
0116 struct etnaviv_cmdbuf *buf, u32 off, u32 len)
0117 {
0118 u32 size = buf->size;
0119 u32 *ptr = buf->vaddr + off;
0120
0121 dev_info(gpu->dev, "virt %p phys 0x%08x free 0x%08x\n",
0122 ptr, etnaviv_cmdbuf_get_va(buf,
0123 &gpu->mmu_context->cmdbuf_mapping) +
0124 off, size - len * 4 - off);
0125
0126 print_hex_dump(KERN_INFO, "cmd ", DUMP_PREFIX_OFFSET, 16, 4,
0127 ptr, len * 4, 0);
0128 }
0129
0130
0131
0132
0133
0134
0135
0136 static void etnaviv_buffer_replace_wait(struct etnaviv_cmdbuf *buffer,
0137 unsigned int wl_offset, u32 cmd, u32 arg)
0138 {
0139 u32 *lw = buffer->vaddr + wl_offset;
0140
0141 lw[1] = arg;
0142 mb();
0143 lw[0] = cmd;
0144 mb();
0145 }
0146
0147
0148
0149
0150
0151 static u32 etnaviv_buffer_reserve(struct etnaviv_gpu *gpu,
0152 struct etnaviv_cmdbuf *buffer, unsigned int cmd_dwords)
0153 {
0154 if (buffer->user_size + cmd_dwords * sizeof(u64) > buffer->size)
0155 buffer->user_size = 0;
0156
0157 return etnaviv_cmdbuf_get_va(buffer,
0158 &gpu->mmu_context->cmdbuf_mapping) +
0159 buffer->user_size;
0160 }
0161
0162 u16 etnaviv_buffer_init(struct etnaviv_gpu *gpu)
0163 {
0164 struct etnaviv_cmdbuf *buffer = &gpu->buffer;
0165
0166 lockdep_assert_held(&gpu->lock);
0167
0168
0169 buffer->user_size = 0;
0170
0171 CMD_WAIT(buffer);
0172 CMD_LINK(buffer, 2,
0173 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping)
0174 + buffer->user_size - 4);
0175
0176 return buffer->user_size / 8;
0177 }
0178
0179 u16 etnaviv_buffer_config_mmuv2(struct etnaviv_gpu *gpu, u32 mtlb_addr, u32 safe_addr)
0180 {
0181 struct etnaviv_cmdbuf *buffer = &gpu->buffer;
0182
0183 lockdep_assert_held(&gpu->lock);
0184
0185 buffer->user_size = 0;
0186
0187 if (gpu->identity.features & chipFeatures_PIPE_3D) {
0188 CMD_LOAD_STATE(buffer, VIVS_GL_PIPE_SELECT,
0189 VIVS_GL_PIPE_SELECT_PIPE(ETNA_PIPE_3D));
0190 CMD_LOAD_STATE(buffer, VIVS_MMUv2_CONFIGURATION,
0191 mtlb_addr | VIVS_MMUv2_CONFIGURATION_MODE_MODE4_K);
0192 CMD_LOAD_STATE(buffer, VIVS_MMUv2_SAFE_ADDRESS, safe_addr);
0193 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0194 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0195 }
0196
0197 if (gpu->identity.features & chipFeatures_PIPE_2D) {
0198 CMD_LOAD_STATE(buffer, VIVS_GL_PIPE_SELECT,
0199 VIVS_GL_PIPE_SELECT_PIPE(ETNA_PIPE_2D));
0200 CMD_LOAD_STATE(buffer, VIVS_MMUv2_CONFIGURATION,
0201 mtlb_addr | VIVS_MMUv2_CONFIGURATION_MODE_MODE4_K);
0202 CMD_LOAD_STATE(buffer, VIVS_MMUv2_SAFE_ADDRESS, safe_addr);
0203 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0204 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0205 }
0206
0207 CMD_END(buffer);
0208
0209 buffer->user_size = ALIGN(buffer->user_size, 8);
0210
0211 return buffer->user_size / 8;
0212 }
0213
0214 u16 etnaviv_buffer_config_pta(struct etnaviv_gpu *gpu, unsigned short id)
0215 {
0216 struct etnaviv_cmdbuf *buffer = &gpu->buffer;
0217
0218 lockdep_assert_held(&gpu->lock);
0219
0220 buffer->user_size = 0;
0221
0222 CMD_LOAD_STATE(buffer, VIVS_MMUv2_PTA_CONFIG,
0223 VIVS_MMUv2_PTA_CONFIG_INDEX(id));
0224
0225 CMD_END(buffer);
0226
0227 buffer->user_size = ALIGN(buffer->user_size, 8);
0228
0229 return buffer->user_size / 8;
0230 }
0231
0232 void etnaviv_buffer_end(struct etnaviv_gpu *gpu)
0233 {
0234 struct etnaviv_cmdbuf *buffer = &gpu->buffer;
0235 unsigned int waitlink_offset = buffer->user_size - 16;
0236 u32 link_target, flush = 0;
0237 bool has_blt = !!(gpu->identity.minor_features5 &
0238 chipMinorFeatures5_BLT_ENGINE);
0239
0240 lockdep_assert_held(&gpu->lock);
0241
0242 if (gpu->exec_state == ETNA_PIPE_2D)
0243 flush = VIVS_GL_FLUSH_CACHE_PE2D;
0244 else if (gpu->exec_state == ETNA_PIPE_3D)
0245 flush = VIVS_GL_FLUSH_CACHE_DEPTH |
0246 VIVS_GL_FLUSH_CACHE_COLOR |
0247 VIVS_GL_FLUSH_CACHE_TEXTURE |
0248 VIVS_GL_FLUSH_CACHE_TEXTUREVS |
0249 VIVS_GL_FLUSH_CACHE_SHADER_L2;
0250
0251 if (flush) {
0252 unsigned int dwords = 7;
0253
0254 if (has_blt)
0255 dwords += 10;
0256
0257 link_target = etnaviv_buffer_reserve(gpu, buffer, dwords);
0258
0259 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0260 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0261 if (has_blt) {
0262 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x1);
0263 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_BLT);
0264 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_BLT);
0265 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x0);
0266 }
0267 CMD_LOAD_STATE(buffer, VIVS_GL_FLUSH_CACHE, flush);
0268 if (gpu->exec_state == ETNA_PIPE_3D) {
0269 if (has_blt) {
0270 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x1);
0271 CMD_LOAD_STATE(buffer, VIVS_BLT_SET_COMMAND, 0x1);
0272 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x0);
0273 } else {
0274 CMD_LOAD_STATE(buffer, VIVS_TS_FLUSH_CACHE,
0275 VIVS_TS_FLUSH_CACHE_FLUSH);
0276 }
0277 }
0278 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0279 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0280 if (has_blt) {
0281 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x1);
0282 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_BLT);
0283 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_BLT);
0284 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x0);
0285 }
0286 CMD_END(buffer);
0287
0288 etnaviv_buffer_replace_wait(buffer, waitlink_offset,
0289 VIV_FE_LINK_HEADER_OP_LINK |
0290 VIV_FE_LINK_HEADER_PREFETCH(dwords),
0291 link_target);
0292 } else {
0293
0294 etnaviv_buffer_replace_wait(buffer, waitlink_offset,
0295 VIV_FE_END_HEADER_OP_END, 0);
0296 }
0297 }
0298
0299
0300 void etnaviv_sync_point_queue(struct etnaviv_gpu *gpu, unsigned int event)
0301 {
0302 struct etnaviv_cmdbuf *buffer = &gpu->buffer;
0303 unsigned int waitlink_offset = buffer->user_size - 16;
0304 u32 dwords, target;
0305
0306 lockdep_assert_held(&gpu->lock);
0307
0308
0309
0310
0311
0312 dwords = 4;
0313 target = etnaviv_buffer_reserve(gpu, buffer, dwords);
0314
0315
0316 CMD_LOAD_STATE(buffer, VIVS_GL_EVENT, VIVS_GL_EVENT_EVENT_ID(event) |
0317 VIVS_GL_EVENT_FROM_PE);
0318
0319
0320 CMD_END(buffer);
0321
0322
0323 CMD_WAIT(buffer);
0324 CMD_LINK(buffer, 2,
0325 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping)
0326 + buffer->user_size - 4);
0327
0328
0329
0330
0331
0332 etnaviv_buffer_replace_wait(buffer, waitlink_offset,
0333 VIV_FE_LINK_HEADER_OP_LINK |
0334 VIV_FE_LINK_HEADER_PREFETCH(dwords),
0335 target);
0336 }
0337
0338
0339 void etnaviv_buffer_queue(struct etnaviv_gpu *gpu, u32 exec_state,
0340 struct etnaviv_iommu_context *mmu_context, unsigned int event,
0341 struct etnaviv_cmdbuf *cmdbuf)
0342 {
0343 struct etnaviv_cmdbuf *buffer = &gpu->buffer;
0344 unsigned int waitlink_offset = buffer->user_size - 16;
0345 u32 return_target, return_dwords;
0346 u32 link_target, link_dwords;
0347 bool switch_context = gpu->exec_state != exec_state;
0348 bool switch_mmu_context = gpu->mmu_context != mmu_context;
0349 unsigned int new_flush_seq = READ_ONCE(gpu->mmu_context->flush_seq);
0350 bool need_flush = switch_mmu_context || gpu->flush_seq != new_flush_seq;
0351 bool has_blt = !!(gpu->identity.minor_features5 &
0352 chipMinorFeatures5_BLT_ENGINE);
0353
0354 lockdep_assert_held(&gpu->lock);
0355
0356 if (drm_debug_enabled(DRM_UT_DRIVER))
0357 etnaviv_buffer_dump(gpu, buffer, 0, 0x50);
0358
0359 link_target = etnaviv_cmdbuf_get_va(cmdbuf,
0360 &gpu->mmu_context->cmdbuf_mapping);
0361 link_dwords = cmdbuf->size / 8;
0362
0363
0364
0365
0366
0367
0368 if (need_flush || switch_context) {
0369 u32 target, extra_dwords;
0370
0371
0372 extra_dwords = 1;
0373
0374
0375 if (need_flush) {
0376 if (gpu->mmu_context->global->version == ETNAVIV_IOMMU_V1)
0377 extra_dwords += 1;
0378 else
0379 extra_dwords += 3;
0380 }
0381
0382
0383 if (switch_context)
0384 extra_dwords += 4;
0385
0386
0387 if (switch_mmu_context && gpu->sec_mode == ETNA_SEC_KERNEL)
0388 extra_dwords += 1;
0389
0390 target = etnaviv_buffer_reserve(gpu, buffer, extra_dwords);
0391
0392
0393
0394
0395
0396
0397 if (switch_mmu_context) {
0398 struct etnaviv_iommu_context *old_context = gpu->mmu_context;
0399
0400 gpu->mmu_context = etnaviv_iommu_context_get(mmu_context);
0401 etnaviv_iommu_context_put(old_context);
0402 }
0403
0404 if (need_flush) {
0405
0406 if (gpu->mmu_context->global->version == ETNAVIV_IOMMU_V1) {
0407 CMD_LOAD_STATE(buffer, VIVS_GL_FLUSH_MMU,
0408 VIVS_GL_FLUSH_MMU_FLUSH_FEMMU |
0409 VIVS_GL_FLUSH_MMU_FLUSH_UNK1 |
0410 VIVS_GL_FLUSH_MMU_FLUSH_UNK2 |
0411 VIVS_GL_FLUSH_MMU_FLUSH_PEMMU |
0412 VIVS_GL_FLUSH_MMU_FLUSH_UNK4);
0413 } else {
0414 u32 flush = VIVS_MMUv2_CONFIGURATION_MODE_MASK |
0415 VIVS_MMUv2_CONFIGURATION_FLUSH_FLUSH;
0416
0417 if (switch_mmu_context &&
0418 gpu->sec_mode == ETNA_SEC_KERNEL) {
0419 unsigned short id =
0420 etnaviv_iommuv2_get_pta_id(gpu->mmu_context);
0421 CMD_LOAD_STATE(buffer,
0422 VIVS_MMUv2_PTA_CONFIG,
0423 VIVS_MMUv2_PTA_CONFIG_INDEX(id));
0424 }
0425
0426 if (gpu->sec_mode == ETNA_SEC_NONE)
0427 flush |= etnaviv_iommuv2_get_mtlb_addr(gpu->mmu_context);
0428
0429 CMD_LOAD_STATE(buffer, VIVS_MMUv2_CONFIGURATION,
0430 flush);
0431 CMD_SEM(buffer, SYNC_RECIPIENT_FE,
0432 SYNC_RECIPIENT_PE);
0433 CMD_STALL(buffer, SYNC_RECIPIENT_FE,
0434 SYNC_RECIPIENT_PE);
0435 }
0436
0437 gpu->flush_seq = new_flush_seq;
0438 }
0439
0440 if (switch_context) {
0441 etnaviv_cmd_select_pipe(gpu, buffer, exec_state);
0442 gpu->exec_state = exec_state;
0443 }
0444
0445
0446 link_target = etnaviv_cmdbuf_get_va(cmdbuf,
0447 &gpu->mmu_context->cmdbuf_mapping);
0448 CMD_LINK(buffer, link_dwords, link_target);
0449
0450
0451 link_target = target;
0452 link_dwords = extra_dwords;
0453 }
0454
0455
0456
0457
0458
0459
0460
0461 return_dwords = 7;
0462
0463
0464
0465
0466
0467
0468 if (has_blt)
0469 return_dwords += 6;
0470
0471 return_target = etnaviv_buffer_reserve(gpu, buffer, return_dwords);
0472 CMD_LINK(cmdbuf, return_dwords, return_target);
0473
0474
0475
0476
0477
0478 if (gpu->exec_state == ETNA_PIPE_2D) {
0479 CMD_LOAD_STATE(buffer, VIVS_GL_FLUSH_CACHE,
0480 VIVS_GL_FLUSH_CACHE_PE2D);
0481 } else {
0482 CMD_LOAD_STATE(buffer, VIVS_GL_FLUSH_CACHE,
0483 VIVS_GL_FLUSH_CACHE_DEPTH |
0484 VIVS_GL_FLUSH_CACHE_COLOR);
0485 if (has_blt) {
0486 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x1);
0487 CMD_LOAD_STATE(buffer, VIVS_BLT_SET_COMMAND, 0x1);
0488 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x0);
0489 } else {
0490 CMD_LOAD_STATE(buffer, VIVS_TS_FLUSH_CACHE,
0491 VIVS_TS_FLUSH_CACHE_FLUSH);
0492 }
0493 }
0494 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0495 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_PE);
0496
0497 if (has_blt) {
0498 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x1);
0499 CMD_SEM(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_BLT);
0500 CMD_STALL(buffer, SYNC_RECIPIENT_FE, SYNC_RECIPIENT_BLT);
0501 CMD_LOAD_STATE(buffer, VIVS_BLT_ENABLE, 0x0);
0502 }
0503
0504 CMD_LOAD_STATE(buffer, VIVS_GL_EVENT, VIVS_GL_EVENT_EVENT_ID(event) |
0505 VIVS_GL_EVENT_FROM_PE);
0506 CMD_WAIT(buffer);
0507 CMD_LINK(buffer, 2,
0508 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping)
0509 + buffer->user_size - 4);
0510
0511 if (drm_debug_enabled(DRM_UT_DRIVER))
0512 pr_info("stream link to 0x%08x @ 0x%08x %p\n",
0513 return_target,
0514 etnaviv_cmdbuf_get_va(cmdbuf, &gpu->mmu_context->cmdbuf_mapping),
0515 cmdbuf->vaddr);
0516
0517 if (drm_debug_enabled(DRM_UT_DRIVER)) {
0518 print_hex_dump(KERN_INFO, "cmd ", DUMP_PREFIX_OFFSET, 16, 4,
0519 cmdbuf->vaddr, cmdbuf->size, 0);
0520
0521 pr_info("link op: %p\n", buffer->vaddr + waitlink_offset);
0522 pr_info("addr: 0x%08x\n", link_target);
0523 pr_info("back: 0x%08x\n", return_target);
0524 pr_info("event: %d\n", event);
0525 }
0526
0527
0528
0529
0530
0531 etnaviv_buffer_replace_wait(buffer, waitlink_offset,
0532 VIV_FE_LINK_HEADER_OP_LINK |
0533 VIV_FE_LINK_HEADER_PREFETCH(link_dwords),
0534 link_target);
0535
0536 if (drm_debug_enabled(DRM_UT_DRIVER))
0537 etnaviv_buffer_dump(gpu, buffer, 0, 0x50);
0538 }