0001
0002
0003
0004 #include "a4xx_gpu.h"
0005
0006 #define A4XX_INT0_MASK \
0007 (A4XX_INT0_RBBM_AHB_ERROR | \
0008 A4XX_INT0_RBBM_ATB_BUS_OVERFLOW | \
0009 A4XX_INT0_CP_T0_PACKET_IN_IB | \
0010 A4XX_INT0_CP_OPCODE_ERROR | \
0011 A4XX_INT0_CP_RESERVED_BIT_ERROR | \
0012 A4XX_INT0_CP_HW_FAULT | \
0013 A4XX_INT0_CP_IB1_INT | \
0014 A4XX_INT0_CP_IB2_INT | \
0015 A4XX_INT0_CP_RB_INT | \
0016 A4XX_INT0_CP_REG_PROTECT_FAULT | \
0017 A4XX_INT0_CP_AHB_ERROR_HALT | \
0018 A4XX_INT0_CACHE_FLUSH_TS | \
0019 A4XX_INT0_UCHE_OOB_ACCESS)
0020
0021 extern bool hang_debug;
0022 static void a4xx_dump(struct msm_gpu *gpu);
0023 static bool a4xx_idle(struct msm_gpu *gpu);
0024
0025 static void a4xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit)
0026 {
0027 struct msm_ringbuffer *ring = submit->ring;
0028 unsigned int i;
0029
0030 for (i = 0; i < submit->nr_cmds; i++) {
0031 switch (submit->cmd[i].type) {
0032 case MSM_SUBMIT_CMD_IB_TARGET_BUF:
0033
0034 break;
0035 case MSM_SUBMIT_CMD_CTX_RESTORE_BUF:
0036
0037 if (gpu->cur_ctx_seqno == submit->queue->ctx->seqno)
0038 break;
0039 fallthrough;
0040 case MSM_SUBMIT_CMD_BUF:
0041 OUT_PKT3(ring, CP_INDIRECT_BUFFER_PFE, 2);
0042 OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
0043 OUT_RING(ring, submit->cmd[i].size);
0044 OUT_PKT2(ring);
0045 break;
0046 }
0047 }
0048
0049 OUT_PKT0(ring, REG_AXXX_CP_SCRATCH_REG2, 1);
0050 OUT_RING(ring, submit->seqno);
0051
0052
0053
0054
0055
0056 OUT_PKT3(ring, CP_EVENT_WRITE, 1);
0057 OUT_RING(ring, HLSQ_FLUSH);
0058
0059
0060 OUT_PKT3(ring, CP_WAIT_FOR_IDLE, 1);
0061 OUT_RING(ring, 0x00000000);
0062
0063
0064 OUT_PKT3(ring, CP_EVENT_WRITE, 3);
0065 OUT_RING(ring, CACHE_FLUSH_TS | BIT(31));
0066 OUT_RING(ring, rbmemptr(ring, fence));
0067 OUT_RING(ring, submit->seqno);
0068
0069 adreno_flush(gpu, ring, REG_A4XX_CP_RB_WPTR);
0070 }
0071
0072
0073
0074
0075
0076 static void a4xx_enable_hwcg(struct msm_gpu *gpu)
0077 {
0078 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0079 unsigned int i;
0080 for (i = 0; i < 4; i++)
0081 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_TP(i), 0x02222202);
0082 for (i = 0; i < 4; i++)
0083 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_TP(i), 0x00002222);
0084 for (i = 0; i < 4; i++)
0085 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_TP(i), 0x0E739CE7);
0086 for (i = 0; i < 4; i++)
0087 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_TP(i), 0x00111111);
0088 for (i = 0; i < 4; i++)
0089 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_SP(i), 0x22222222);
0090 for (i = 0; i < 4; i++)
0091 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_SP(i), 0x00222222);
0092 for (i = 0; i < 4; i++)
0093 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_SP(i), 0x00000104);
0094 for (i = 0; i < 4; i++)
0095 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_SP(i), 0x00000081);
0096 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_UCHE, 0x22222222);
0097 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_UCHE, 0x02222222);
0098 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL3_UCHE, 0x00000000);
0099 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL4_UCHE, 0x00000000);
0100 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_UCHE, 0x00004444);
0101 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_UCHE, 0x00001112);
0102 for (i = 0; i < 4; i++)
0103 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_RB(i), 0x22222222);
0104
0105
0106 for (i = 0; i < 4; i++) {
0107 if (adreno_is_a420(adreno_gpu)) {
0108 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_RB(i),
0109 0x00002020);
0110 } else {
0111 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_RB(i),
0112 0x00022020);
0113 }
0114 }
0115
0116
0117 if (!adreno_is_a405(adreno_gpu)) {
0118 for (i = 0; i < 4; i++) {
0119 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_MARB_CCU(i),
0120 0x00000922);
0121 }
0122
0123 for (i = 0; i < 4; i++) {
0124 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_RB_MARB_CCU(i),
0125 0x00000000);
0126 }
0127
0128 for (i = 0; i < 4; i++) {
0129 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_RB_MARB_CCU_L1(i),
0130 0x00000001);
0131 }
0132 }
0133
0134 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_MODE_GPC, 0x02222222);
0135 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_GPC, 0x04100104);
0136 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_GPC, 0x00022222);
0137 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_COM_DCOM, 0x00000022);
0138 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_COM_DCOM, 0x0000010F);
0139 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_COM_DCOM, 0x00000022);
0140 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_TSE_RAS_RBBM, 0x00222222);
0141 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_TSE_RAS_RBBM, 0x00004104);
0142 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_TSE_RAS_RBBM, 0x00000222);
0143 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_HLSQ , 0x00000000);
0144 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_HLSQ, 0x00000000);
0145 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ, 0x00220000);
0146
0147
0148 if (adreno_is_a430(adreno_gpu) && adreno_gpu->rev.patchid < 2)
0149 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL, 0);
0150 else
0151 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL, 0xAAAAAAAA);
0152 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2, 0);
0153 }
0154
0155
0156 static bool a4xx_me_init(struct msm_gpu *gpu)
0157 {
0158 struct msm_ringbuffer *ring = gpu->rb[0];
0159
0160 OUT_PKT3(ring, CP_ME_INIT, 17);
0161 OUT_RING(ring, 0x000003f7);
0162 OUT_RING(ring, 0x00000000);
0163 OUT_RING(ring, 0x00000000);
0164 OUT_RING(ring, 0x00000000);
0165 OUT_RING(ring, 0x00000080);
0166 OUT_RING(ring, 0x00000100);
0167 OUT_RING(ring, 0x00000180);
0168 OUT_RING(ring, 0x00006600);
0169 OUT_RING(ring, 0x00000150);
0170 OUT_RING(ring, 0x0000014e);
0171 OUT_RING(ring, 0x00000154);
0172 OUT_RING(ring, 0x00000001);
0173 OUT_RING(ring, 0x00000000);
0174 OUT_RING(ring, 0x00000000);
0175 OUT_RING(ring, 0x00000000);
0176 OUT_RING(ring, 0x00000000);
0177 OUT_RING(ring, 0x00000000);
0178
0179 adreno_flush(gpu, ring, REG_A4XX_CP_RB_WPTR);
0180 return a4xx_idle(gpu);
0181 }
0182
0183 static int a4xx_hw_init(struct msm_gpu *gpu)
0184 {
0185 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0186 struct a4xx_gpu *a4xx_gpu = to_a4xx_gpu(adreno_gpu);
0187 uint32_t *ptr, len;
0188 int i, ret;
0189
0190 if (adreno_is_a405(adreno_gpu)) {
0191 gpu_write(gpu, REG_A4XX_VBIF_ROUND_ROBIN_QOS_ARB, 0x00000003);
0192 } else if (adreno_is_a420(adreno_gpu)) {
0193 gpu_write(gpu, REG_A4XX_VBIF_ABIT_SORT, 0x0001001F);
0194 gpu_write(gpu, REG_A4XX_VBIF_ABIT_SORT_CONF, 0x000000A4);
0195 gpu_write(gpu, REG_A4XX_VBIF_GATE_OFF_WRREQ_EN, 0x00000001);
0196 gpu_write(gpu, REG_A4XX_VBIF_IN_RD_LIM_CONF0, 0x18181818);
0197 gpu_write(gpu, REG_A4XX_VBIF_IN_RD_LIM_CONF1, 0x00000018);
0198 gpu_write(gpu, REG_A4XX_VBIF_IN_WR_LIM_CONF0, 0x18181818);
0199 gpu_write(gpu, REG_A4XX_VBIF_IN_WR_LIM_CONF1, 0x00000018);
0200 gpu_write(gpu, REG_A4XX_VBIF_ROUND_ROBIN_QOS_ARB, 0x00000003);
0201 } else if (adreno_is_a430(adreno_gpu)) {
0202 gpu_write(gpu, REG_A4XX_VBIF_GATE_OFF_WRREQ_EN, 0x00000001);
0203 gpu_write(gpu, REG_A4XX_VBIF_IN_RD_LIM_CONF0, 0x18181818);
0204 gpu_write(gpu, REG_A4XX_VBIF_IN_RD_LIM_CONF1, 0x00000018);
0205 gpu_write(gpu, REG_A4XX_VBIF_IN_WR_LIM_CONF0, 0x18181818);
0206 gpu_write(gpu, REG_A4XX_VBIF_IN_WR_LIM_CONF1, 0x00000018);
0207 gpu_write(gpu, REG_A4XX_VBIF_ROUND_ROBIN_QOS_ARB, 0x00000003);
0208 } else {
0209 BUG();
0210 }
0211
0212
0213 gpu_write(gpu, REG_A4XX_RBBM_GPU_BUSY_MASKED, 0xffffffff);
0214
0215
0216 gpu_write(gpu, REG_A4XX_RBBM_SP_HYST_CNT, 0x10);
0217 gpu_write(gpu, REG_A4XX_RBBM_WAIT_IDLE_CLOCKS_CTL, 0x10);
0218
0219 if (adreno_is_a430(adreno_gpu)) {
0220 gpu_write(gpu, REG_A4XX_RBBM_WAIT_IDLE_CLOCKS_CTL2, 0x30);
0221 }
0222
0223
0224 gpu_write(gpu, REG_A4XX_RBBM_AHB_CTL0, 0x00000001);
0225
0226
0227 gpu_write(gpu, REG_A4XX_RBBM_AHB_CTL1, 0xa6ffffff);
0228
0229
0230 gpu_write(gpu, REG_A4XX_RBBM_RBBM_CTL, 0x00000030);
0231
0232
0233
0234
0235
0236 gpu_write(gpu, REG_A4XX_RBBM_INTERFACE_HANG_INT_CTL,
0237 (1 << 30) | 0xFFFF);
0238
0239 gpu_write(gpu, REG_A4XX_RB_GMEM_BASE_ADDR,
0240 (unsigned int)(a4xx_gpu->ocmem.base >> 14));
0241
0242
0243 gpu_write(gpu, REG_A4XX_RBBM_PERFCTR_CTL, 0x01);
0244
0245
0246
0247
0248 gpu_write(gpu, REG_A4XX_CP_PERFCTR_CP_SEL_0, CP_ALWAYS_COUNT);
0249
0250 if (adreno_is_a430(adreno_gpu))
0251 gpu_write(gpu, REG_A4XX_UCHE_CACHE_WAYS_VFD, 0x07);
0252
0253
0254 gpu_write(gpu, REG_A4XX_UCHE_TRAP_BASE_LO, 0xffff0000);
0255 gpu_write(gpu, REG_A4XX_UCHE_TRAP_BASE_HI, 0xffff0000);
0256
0257 gpu_write(gpu, REG_A4XX_CP_DEBUG, (1 << 25) |
0258 (adreno_is_a420(adreno_gpu) ? (1 << 29) : 0));
0259
0260
0261
0262 if (!adreno_is_a420(adreno_gpu)) {
0263 gpu_write(gpu, REG_A4XX_RBBM_SP_REGFILE_SLEEP_CNTL_0,
0264 0x00000441);
0265 gpu_write(gpu, REG_A4XX_RBBM_SP_REGFILE_SLEEP_CNTL_1,
0266 0x00000441);
0267 }
0268
0269 a4xx_enable_hwcg(gpu);
0270
0271
0272
0273
0274
0275 if (adreno_is_a420(adreno_gpu)) {
0276 unsigned int val;
0277 val = gpu_read(gpu, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ);
0278 val &= ~A4XX_CGC_HLSQ_EARLY_CYC__MASK;
0279 val |= 2 << A4XX_CGC_HLSQ_EARLY_CYC__SHIFT;
0280 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ, val);
0281 }
0282
0283
0284 gpu_write(gpu, REG_A4XX_CP_PROTECT_CTRL, 0x00000007);
0285
0286
0287 gpu_write(gpu, REG_A4XX_CP_PROTECT(0), 0x62000010);
0288 gpu_write(gpu, REG_A4XX_CP_PROTECT(1), 0x63000020);
0289 gpu_write(gpu, REG_A4XX_CP_PROTECT(2), 0x64000040);
0290 gpu_write(gpu, REG_A4XX_CP_PROTECT(3), 0x65000080);
0291 gpu_write(gpu, REG_A4XX_CP_PROTECT(4), 0x66000100);
0292 gpu_write(gpu, REG_A4XX_CP_PROTECT(5), 0x64000200);
0293
0294
0295 gpu_write(gpu, REG_A4XX_CP_PROTECT(6), 0x67000800);
0296 gpu_write(gpu, REG_A4XX_CP_PROTECT(7), 0x64001600);
0297
0298
0299
0300 gpu_write(gpu, REG_A4XX_CP_PROTECT(8), 0x60003300);
0301
0302
0303 gpu_write(gpu, REG_A4XX_CP_PROTECT(9), 0x60003800);
0304
0305
0306 gpu_write(gpu, REG_A4XX_CP_PROTECT(10), 0x61003980);
0307
0308
0309 gpu_write(gpu, REG_A4XX_CP_PROTECT(11), 0x6e010000);
0310
0311 gpu_write(gpu, REG_A4XX_RBBM_INT_0_MASK, A4XX_INT0_MASK);
0312
0313 ret = adreno_hw_init(gpu);
0314 if (ret)
0315 return ret;
0316
0317
0318
0319
0320
0321 gpu_write(gpu, REG_A4XX_CP_RB_CNTL,
0322 MSM_GPU_RB_CNTL_DEFAULT | AXXX_CP_RB_CNTL_NO_UPDATE);
0323
0324
0325 gpu_write(gpu, REG_A4XX_CP_RB_BASE, lower_32_bits(gpu->rb[0]->iova));
0326
0327
0328 ptr = (uint32_t *)(adreno_gpu->fw[ADRENO_FW_PM4]->data);
0329 len = adreno_gpu->fw[ADRENO_FW_PM4]->size / 4;
0330 DBG("loading PM4 ucode version: %u", ptr[0]);
0331 gpu_write(gpu, REG_A4XX_CP_ME_RAM_WADDR, 0);
0332 for (i = 1; i < len; i++)
0333 gpu_write(gpu, REG_A4XX_CP_ME_RAM_DATA, ptr[i]);
0334
0335
0336 ptr = (uint32_t *)(adreno_gpu->fw[ADRENO_FW_PFP]->data);
0337 len = adreno_gpu->fw[ADRENO_FW_PFP]->size / 4;
0338 DBG("loading PFP ucode version: %u", ptr[0]);
0339
0340 gpu_write(gpu, REG_A4XX_CP_PFP_UCODE_ADDR, 0);
0341 for (i = 1; i < len; i++)
0342 gpu_write(gpu, REG_A4XX_CP_PFP_UCODE_DATA, ptr[i]);
0343
0344
0345 gpu_write(gpu, REG_A4XX_CP_ME_CNTL, 0);
0346
0347 return a4xx_me_init(gpu) ? 0 : -EINVAL;
0348 }
0349
0350 static void a4xx_recover(struct msm_gpu *gpu)
0351 {
0352 int i;
0353
0354 adreno_dump_info(gpu);
0355
0356 for (i = 0; i < 8; i++) {
0357 printk("CP_SCRATCH_REG%d: %u\n", i,
0358 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i));
0359 }
0360
0361
0362 if (hang_debug)
0363 a4xx_dump(gpu);
0364
0365 gpu_write(gpu, REG_A4XX_RBBM_SW_RESET_CMD, 1);
0366 gpu_read(gpu, REG_A4XX_RBBM_SW_RESET_CMD);
0367 gpu_write(gpu, REG_A4XX_RBBM_SW_RESET_CMD, 0);
0368 adreno_recover(gpu);
0369 }
0370
0371 static void a4xx_destroy(struct msm_gpu *gpu)
0372 {
0373 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0374 struct a4xx_gpu *a4xx_gpu = to_a4xx_gpu(adreno_gpu);
0375
0376 DBG("%s", gpu->name);
0377
0378 adreno_gpu_cleanup(adreno_gpu);
0379
0380 adreno_gpu_ocmem_cleanup(&a4xx_gpu->ocmem);
0381
0382 kfree(a4xx_gpu);
0383 }
0384
0385 static bool a4xx_idle(struct msm_gpu *gpu)
0386 {
0387
0388 if (!adreno_idle(gpu, gpu->rb[0]))
0389 return false;
0390
0391
0392 if (spin_until(!(gpu_read(gpu, REG_A4XX_RBBM_STATUS) &
0393 A4XX_RBBM_STATUS_GPU_BUSY))) {
0394 DRM_ERROR("%s: timeout waiting for GPU to idle!\n", gpu->name);
0395
0396 return false;
0397 }
0398
0399 return true;
0400 }
0401
0402 static irqreturn_t a4xx_irq(struct msm_gpu *gpu)
0403 {
0404 uint32_t status;
0405
0406 status = gpu_read(gpu, REG_A4XX_RBBM_INT_0_STATUS);
0407 DBG("%s: Int status %08x", gpu->name, status);
0408
0409 if (status & A4XX_INT0_CP_REG_PROTECT_FAULT) {
0410 uint32_t reg = gpu_read(gpu, REG_A4XX_CP_PROTECT_STATUS);
0411 printk("CP | Protected mode error| %s | addr=%x\n",
0412 reg & (1 << 24) ? "WRITE" : "READ",
0413 (reg & 0xFFFFF) >> 2);
0414 }
0415
0416 gpu_write(gpu, REG_A4XX_RBBM_INT_CLEAR_CMD, status);
0417
0418 msm_gpu_retire(gpu);
0419
0420 return IRQ_HANDLED;
0421 }
0422
0423 static const unsigned int a4xx_registers[] = {
0424
0425 0x0000, 0x0002, 0x0004, 0x0021, 0x0023, 0x0024, 0x0026, 0x0026,
0426 0x0028, 0x002B, 0x002E, 0x0034, 0x0037, 0x0044, 0x0047, 0x0066,
0427 0x0068, 0x0095, 0x009C, 0x0170, 0x0174, 0x01AF,
0428
0429 0x0200, 0x0233, 0x0240, 0x0250, 0x04C0, 0x04DD, 0x0500, 0x050B,
0430 0x0578, 0x058F,
0431
0432 0x0C00, 0x0C03, 0x0C08, 0x0C41, 0x0C50, 0x0C51,
0433
0434 0x0C80, 0x0C81, 0x0C88, 0x0C8F,
0435
0436 0x0CC0, 0x0CC0, 0x0CC4, 0x0CD2,
0437
0438 0x0D00, 0x0D0C, 0x0D10, 0x0D17, 0x0D20, 0x0D23,
0439
0440 0x0E40, 0x0E4A,
0441
0442 0x0E60, 0x0E61, 0x0E63, 0x0E68,
0443
0444 0x0E80, 0x0E84, 0x0E88, 0x0E95,
0445
0446 0x1000, 0x1000, 0x1002, 0x1002, 0x1004, 0x1004, 0x1008, 0x100A,
0447 0x100C, 0x100D, 0x100F, 0x1010, 0x1012, 0x1016, 0x1024, 0x1024,
0448 0x1027, 0x1027, 0x1100, 0x1100, 0x1102, 0x1102, 0x1104, 0x1104,
0449 0x1110, 0x1110, 0x1112, 0x1116, 0x1124, 0x1124, 0x1300, 0x1300,
0450 0x1380, 0x1380,
0451
0452 0x2000, 0x2004, 0x2008, 0x2067, 0x2070, 0x2078, 0x207B, 0x216E,
0453
0454 0x21C0, 0x21C6, 0x21D0, 0x21D0, 0x21D9, 0x21D9, 0x21E5, 0x21E7,
0455
0456 0x2200, 0x2204, 0x2208, 0x22A9,
0457
0458 0x2400, 0x2404, 0x2408, 0x2467, 0x2470, 0x2478, 0x247B, 0x256E,
0459
0460 0x25C0, 0x25C6, 0x25D0, 0x25D0, 0x25D9, 0x25D9, 0x25E5, 0x25E7,
0461
0462 0x2600, 0x2604, 0x2608, 0x26A9,
0463
0464 0x2C00, 0x2C01, 0x2C10, 0x2C10, 0x2C12, 0x2C16, 0x2C1D, 0x2C20,
0465 0x2C28, 0x2C28, 0x2C30, 0x2C30, 0x2C32, 0x2C36, 0x2C40, 0x2C40,
0466 0x2C50, 0x2C50, 0x2C52, 0x2C56, 0x2C80, 0x2C80, 0x2C94, 0x2C95,
0467
0468 0x3000, 0x3007, 0x300C, 0x3014, 0x3018, 0x301D, 0x3020, 0x3022,
0469 0x3024, 0x3026, 0x3028, 0x302A, 0x302C, 0x302D, 0x3030, 0x3031,
0470 0x3034, 0x3036, 0x3038, 0x3038, 0x303C, 0x303D, 0x3040, 0x3040,
0471 0x3049, 0x3049, 0x3058, 0x3058, 0x305B, 0x3061, 0x3064, 0x3068,
0472 0x306C, 0x306D, 0x3080, 0x3088, 0x308B, 0x308C, 0x3090, 0x3094,
0473 0x3098, 0x3098, 0x309C, 0x309C, 0x30C0, 0x30C0, 0x30C8, 0x30C8,
0474 0x30D0, 0x30D0, 0x30D8, 0x30D8, 0x30E0, 0x30E0, 0x3100, 0x3100,
0475 0x3108, 0x3108, 0x3110, 0x3110, 0x3118, 0x3118, 0x3120, 0x3120,
0476 0x3124, 0x3125, 0x3129, 0x3129, 0x3131, 0x3131, 0x330C, 0x330C,
0477 0x3310, 0x3310, 0x3400, 0x3401, 0x3410, 0x3410, 0x3412, 0x3416,
0478 0x341D, 0x3420, 0x3428, 0x3428, 0x3430, 0x3430, 0x3432, 0x3436,
0479 0x3440, 0x3440, 0x3450, 0x3450, 0x3452, 0x3456, 0x3480, 0x3480,
0480 0x3494, 0x3495, 0x4000, 0x4000, 0x4002, 0x4002, 0x4004, 0x4004,
0481 0x4008, 0x400A, 0x400C, 0x400D, 0x400F, 0x4012, 0x4014, 0x4016,
0482 0x401D, 0x401D, 0x4020, 0x4027, 0x4060, 0x4062, 0x4200, 0x4200,
0483 0x4300, 0x4300, 0x4400, 0x4400, 0x4500, 0x4500, 0x4800, 0x4802,
0484 0x480F, 0x480F, 0x4811, 0x4811, 0x4813, 0x4813, 0x4815, 0x4816,
0485 0x482B, 0x482B, 0x4857, 0x4857, 0x4883, 0x4883, 0x48AF, 0x48AF,
0486 0x48C5, 0x48C5, 0x48E5, 0x48E5, 0x4905, 0x4905, 0x4925, 0x4925,
0487 0x4945, 0x4945, 0x4950, 0x4950, 0x495B, 0x495B, 0x4980, 0x498E,
0488 0x4B00, 0x4B00, 0x4C00, 0x4C00, 0x4D00, 0x4D00, 0x4E00, 0x4E00,
0489 0x4E80, 0x4E80, 0x4F00, 0x4F00, 0x4F08, 0x4F08, 0x4F10, 0x4F10,
0490 0x4F18, 0x4F18, 0x4F20, 0x4F20, 0x4F30, 0x4F30, 0x4F60, 0x4F60,
0491 0x4F80, 0x4F81, 0x4F88, 0x4F89, 0x4FEE, 0x4FEE, 0x4FF3, 0x4FF3,
0492 0x6000, 0x6001, 0x6008, 0x600F, 0x6014, 0x6016, 0x6018, 0x601B,
0493 0x61FD, 0x61FD, 0x623C, 0x623C, 0x6380, 0x6380, 0x63A0, 0x63A0,
0494 0x63C0, 0x63C1, 0x63C8, 0x63C9, 0x63D0, 0x63D4, 0x63D6, 0x63D6,
0495 0x63EE, 0x63EE, 0x6400, 0x6401, 0x6408, 0x640F, 0x6414, 0x6416,
0496 0x6418, 0x641B, 0x65FD, 0x65FD, 0x663C, 0x663C, 0x6780, 0x6780,
0497 0x67A0, 0x67A0, 0x67C0, 0x67C1, 0x67C8, 0x67C9, 0x67D0, 0x67D4,
0498 0x67D6, 0x67D6, 0x67EE, 0x67EE, 0x6800, 0x6801, 0x6808, 0x680F,
0499 0x6814, 0x6816, 0x6818, 0x681B, 0x69FD, 0x69FD, 0x6A3C, 0x6A3C,
0500 0x6B80, 0x6B80, 0x6BA0, 0x6BA0, 0x6BC0, 0x6BC1, 0x6BC8, 0x6BC9,
0501 0x6BD0, 0x6BD4, 0x6BD6, 0x6BD6, 0x6BEE, 0x6BEE,
0502 ~0
0503 };
0504
0505 static const unsigned int a405_registers[] = {
0506
0507 0x0000, 0x0002, 0x0004, 0x0021, 0x0023, 0x0024, 0x0026, 0x0026,
0508 0x0028, 0x002B, 0x002E, 0x0034, 0x0037, 0x0044, 0x0047, 0x0066,
0509 0x0068, 0x0095, 0x009C, 0x0170, 0x0174, 0x01AF,
0510
0511 0x0200, 0x0233, 0x0240, 0x0250, 0x04C0, 0x04DD, 0x0500, 0x050B,
0512 0x0578, 0x058F,
0513
0514 0x0C00, 0x0C03, 0x0C08, 0x0C41, 0x0C50, 0x0C51,
0515
0516 0x0C80, 0x0C81, 0x0C88, 0x0C8F,
0517
0518 0x0CC0, 0x0CC0, 0x0CC4, 0x0CD2,
0519
0520 0x0D00, 0x0D0C, 0x0D10, 0x0D17, 0x0D20, 0x0D23,
0521
0522 0x0E40, 0x0E4A,
0523
0524 0x0E60, 0x0E61, 0x0E63, 0x0E68,
0525
0526 0x0E80, 0x0E84, 0x0E88, 0x0E95,
0527
0528 0x2000, 0x2004, 0x2008, 0x2067, 0x2070, 0x2078, 0x207B, 0x216E,
0529
0530 0x21C0, 0x21C6, 0x21D0, 0x21D0, 0x21D9, 0x21D9, 0x21E5, 0x21E7,
0531
0532 0x2200, 0x2204, 0x2208, 0x22A9,
0533
0534 0x2400, 0x2404, 0x2408, 0x2467, 0x2470, 0x2478, 0x247B, 0x256E,
0535
0536 0x25C0, 0x25C6, 0x25D0, 0x25D0, 0x25D9, 0x25D9, 0x25E5, 0x25E7,
0537
0538 0x2600, 0x2604, 0x2608, 0x26A9,
0539
0540 0x3000, 0x3007, 0x302C, 0x302C, 0x3030, 0x3030, 0x3034, 0x3036,
0541 0x3038, 0x3038, 0x303C, 0x303D, 0x3040, 0x3040, 0x3049, 0x3049,
0542 0x3058, 0x3058, 0x305B, 0x3061, 0x3064, 0x3068, 0x306C, 0x306D,
0543 0x3080, 0x3088, 0x308B, 0x308C, 0x3090, 0x3094, 0x3098, 0x3098,
0544 0x309C, 0x309C, 0x30C0, 0x30C0, 0x30C8, 0x30C8, 0x30D0, 0x30D0,
0545 0x30D8, 0x30D8, 0x30E0, 0x30E0, 0x3100, 0x3100, 0x3108, 0x3108,
0546 0x3110, 0x3110, 0x3118, 0x3118, 0x3120, 0x3120, 0x3124, 0x3125,
0547 0x3129, 0x3129, 0x340C, 0x340C, 0x3410, 0x3410,
0548 ~0
0549 };
0550
0551 static struct msm_gpu_state *a4xx_gpu_state_get(struct msm_gpu *gpu)
0552 {
0553 struct msm_gpu_state *state = kzalloc(sizeof(*state), GFP_KERNEL);
0554
0555 if (!state)
0556 return ERR_PTR(-ENOMEM);
0557
0558 adreno_gpu_state_get(gpu, state);
0559
0560 state->rbbm_status = gpu_read(gpu, REG_A4XX_RBBM_STATUS);
0561
0562 return state;
0563 }
0564
0565 static void a4xx_dump(struct msm_gpu *gpu)
0566 {
0567 printk("status: %08x\n",
0568 gpu_read(gpu, REG_A4XX_RBBM_STATUS));
0569 adreno_dump(gpu);
0570 }
0571
0572 static int a4xx_pm_resume(struct msm_gpu *gpu) {
0573 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0574 int ret;
0575
0576 ret = msm_gpu_pm_resume(gpu);
0577 if (ret)
0578 return ret;
0579
0580 if (adreno_is_a430(adreno_gpu)) {
0581 unsigned int reg;
0582
0583 gpu_write(gpu, REG_A4XX_RBBM_POWER_CNTL_IP, 0x778000);
0584 do {
0585 udelay(5);
0586 reg = gpu_read(gpu, REG_A4XX_RBBM_POWER_STATUS);
0587 } while (!(reg & A4XX_RBBM_POWER_CNTL_IP_SP_TP_PWR_ON));
0588 }
0589 return 0;
0590 }
0591
0592 static int a4xx_pm_suspend(struct msm_gpu *gpu) {
0593 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0594 int ret;
0595
0596 ret = msm_gpu_pm_suspend(gpu);
0597 if (ret)
0598 return ret;
0599
0600 if (adreno_is_a430(adreno_gpu)) {
0601
0602 gpu_write(gpu, REG_A4XX_RBBM_POWER_CNTL_IP, 0x778001);
0603 }
0604 return 0;
0605 }
0606
0607 static int a4xx_get_timestamp(struct msm_gpu *gpu, uint64_t *value)
0608 {
0609 *value = gpu_read64(gpu, REG_A4XX_RBBM_PERFCTR_CP_0_LO,
0610 REG_A4XX_RBBM_PERFCTR_CP_0_HI);
0611
0612 return 0;
0613 }
0614
0615 static u32 a4xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
0616 {
0617 ring->memptrs->rptr = gpu_read(gpu, REG_A4XX_CP_RB_RPTR);
0618 return ring->memptrs->rptr;
0619 }
0620
0621 static const struct adreno_gpu_funcs funcs = {
0622 .base = {
0623 .get_param = adreno_get_param,
0624 .set_param = adreno_set_param,
0625 .hw_init = a4xx_hw_init,
0626 .pm_suspend = a4xx_pm_suspend,
0627 .pm_resume = a4xx_pm_resume,
0628 .recover = a4xx_recover,
0629 .submit = a4xx_submit,
0630 .active_ring = adreno_active_ring,
0631 .irq = a4xx_irq,
0632 .destroy = a4xx_destroy,
0633 #if defined(CONFIG_DEBUG_FS) || defined(CONFIG_DEV_COREDUMP)
0634 .show = adreno_show,
0635 #endif
0636 .gpu_state_get = a4xx_gpu_state_get,
0637 .gpu_state_put = adreno_gpu_state_put,
0638 .create_address_space = adreno_iommu_create_address_space,
0639 .get_rptr = a4xx_get_rptr,
0640 },
0641 .get_timestamp = a4xx_get_timestamp,
0642 };
0643
0644 struct msm_gpu *a4xx_gpu_init(struct drm_device *dev)
0645 {
0646 struct a4xx_gpu *a4xx_gpu = NULL;
0647 struct adreno_gpu *adreno_gpu;
0648 struct msm_gpu *gpu;
0649 struct msm_drm_private *priv = dev->dev_private;
0650 struct platform_device *pdev = priv->gpu_pdev;
0651 struct icc_path *ocmem_icc_path;
0652 struct icc_path *icc_path;
0653 int ret;
0654
0655 if (!pdev) {
0656 DRM_DEV_ERROR(dev->dev, "no a4xx device\n");
0657 ret = -ENXIO;
0658 goto fail;
0659 }
0660
0661 a4xx_gpu = kzalloc(sizeof(*a4xx_gpu), GFP_KERNEL);
0662 if (!a4xx_gpu) {
0663 ret = -ENOMEM;
0664 goto fail;
0665 }
0666
0667 adreno_gpu = &a4xx_gpu->base;
0668 gpu = &adreno_gpu->base;
0669
0670 gpu->perfcntrs = NULL;
0671 gpu->num_perfcntrs = 0;
0672
0673 ret = adreno_gpu_init(dev, pdev, adreno_gpu, &funcs, 1);
0674 if (ret)
0675 goto fail;
0676
0677 adreno_gpu->registers = adreno_is_a405(adreno_gpu) ? a405_registers :
0678 a4xx_registers;
0679
0680
0681 ret = adreno_gpu_ocmem_init(dev->dev, adreno_gpu,
0682 &a4xx_gpu->ocmem);
0683 if (ret)
0684 goto fail;
0685
0686 if (!gpu->aspace) {
0687
0688
0689
0690
0691
0692
0693
0694 DRM_DEV_ERROR(dev->dev, "No memory protection without IOMMU\n");
0695 if (!allow_vram_carveout) {
0696 ret = -ENXIO;
0697 goto fail;
0698 }
0699 }
0700
0701 icc_path = devm_of_icc_get(&pdev->dev, "gfx-mem");
0702 if (IS_ERR(icc_path)) {
0703 ret = PTR_ERR(icc_path);
0704 goto fail;
0705 }
0706
0707 ocmem_icc_path = devm_of_icc_get(&pdev->dev, "ocmem");
0708 if (IS_ERR(ocmem_icc_path)) {
0709 ret = PTR_ERR(ocmem_icc_path);
0710
0711 if (ret != -ENODATA)
0712 goto fail;
0713 ocmem_icc_path = NULL;
0714 }
0715
0716
0717
0718
0719
0720
0721 icc_set_bw(icc_path, 0, Bps_to_icc(gpu->fast_rate) * 8);
0722 icc_set_bw(ocmem_icc_path, 0, Bps_to_icc(gpu->fast_rate) * 8);
0723
0724 return gpu;
0725
0726 fail:
0727 if (a4xx_gpu)
0728 a4xx_destroy(&a4xx_gpu->base.base);
0729
0730 return ERR_PTR(ret);
0731 }