0001
0002
0003
0004 #include "a2xx_gpu.h"
0005 #include "msm_gem.h"
0006 #include "msm_mmu.h"
0007
0008 extern bool hang_debug;
0009
0010 static void a2xx_dump(struct msm_gpu *gpu);
0011 static bool a2xx_idle(struct msm_gpu *gpu);
0012
0013 static void a2xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit)
0014 {
0015 struct msm_ringbuffer *ring = submit->ring;
0016 unsigned int i;
0017
0018 for (i = 0; i < submit->nr_cmds; i++) {
0019 switch (submit->cmd[i].type) {
0020 case MSM_SUBMIT_CMD_IB_TARGET_BUF:
0021
0022 break;
0023 case MSM_SUBMIT_CMD_CTX_RESTORE_BUF:
0024
0025 if (gpu->cur_ctx_seqno == submit->queue->ctx->seqno)
0026 break;
0027 fallthrough;
0028 case MSM_SUBMIT_CMD_BUF:
0029 OUT_PKT3(ring, CP_INDIRECT_BUFFER_PFD, 2);
0030 OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
0031 OUT_RING(ring, submit->cmd[i].size);
0032 OUT_PKT2(ring);
0033 break;
0034 }
0035 }
0036
0037 OUT_PKT0(ring, REG_AXXX_CP_SCRATCH_REG2, 1);
0038 OUT_RING(ring, submit->seqno);
0039
0040
0041 OUT_PKT3(ring, CP_WAIT_FOR_IDLE, 1);
0042 OUT_RING(ring, 0x00000000);
0043
0044 OUT_PKT3(ring, CP_EVENT_WRITE, 3);
0045 OUT_RING(ring, CACHE_FLUSH_TS);
0046 OUT_RING(ring, rbmemptr(ring, fence));
0047 OUT_RING(ring, submit->seqno);
0048 OUT_PKT3(ring, CP_INTERRUPT, 1);
0049 OUT_RING(ring, 0x80000000);
0050
0051 adreno_flush(gpu, ring, REG_AXXX_CP_RB_WPTR);
0052 }
0053
0054 static bool a2xx_me_init(struct msm_gpu *gpu)
0055 {
0056 struct msm_ringbuffer *ring = gpu->rb[0];
0057
0058 OUT_PKT3(ring, CP_ME_INIT, 18);
0059
0060
0061 OUT_RING(ring, 0x000003ff);
0062
0063 OUT_RING(ring, 0x00000000);
0064
0065 OUT_RING(ring, 0x00000000);
0066
0067 OUT_RING(ring, REG_A2XX_RB_SURFACE_INFO - 0x2000);
0068 OUT_RING(ring, REG_A2XX_PA_SC_WINDOW_OFFSET - 0x2000);
0069 OUT_RING(ring, REG_A2XX_VGT_MAX_VTX_INDX - 0x2000);
0070 OUT_RING(ring, REG_A2XX_SQ_PROGRAM_CNTL - 0x2000);
0071 OUT_RING(ring, REG_A2XX_RB_DEPTHCONTROL - 0x2000);
0072 OUT_RING(ring, REG_A2XX_PA_SU_POINT_SIZE - 0x2000);
0073 OUT_RING(ring, REG_A2XX_PA_SC_LINE_CNTL - 0x2000);
0074 OUT_RING(ring, REG_A2XX_PA_SU_POLY_OFFSET_FRONT_SCALE - 0x2000);
0075
0076
0077
0078 OUT_RING(ring, 0x80000180);
0079
0080 OUT_RING(ring, 0x00000001);
0081
0082
0083 OUT_RING(ring, 0x00000000);
0084
0085 OUT_RING(ring, 0x00000000);
0086
0087 OUT_RING(ring, 0x200001f2);
0088
0089 OUT_RING(ring, 0x00000000);
0090
0091 OUT_RING(ring, 0x00000000);
0092
0093
0094 OUT_PKT3(ring, CP_SET_PROTECTED_MODE, 1);
0095 OUT_RING(ring, 1);
0096
0097 adreno_flush(gpu, ring, REG_AXXX_CP_RB_WPTR);
0098 return a2xx_idle(gpu);
0099 }
0100
0101 static int a2xx_hw_init(struct msm_gpu *gpu)
0102 {
0103 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0104 dma_addr_t pt_base, tran_error;
0105 uint32_t *ptr, len;
0106 int i, ret;
0107
0108 msm_gpummu_params(gpu->aspace->mmu, &pt_base, &tran_error);
0109
0110 DBG("%s", gpu->name);
0111
0112
0113 gpu_write(gpu, REG_AXXX_CP_ME_CNTL, AXXX_CP_ME_CNTL_HALT);
0114
0115 gpu_write(gpu, REG_A2XX_RBBM_PM_OVERRIDE1, 0xfffffffe);
0116 gpu_write(gpu, REG_A2XX_RBBM_PM_OVERRIDE2, 0xffffffff);
0117
0118
0119 gpu_write(gpu, REG_A2XX_RBBM_SOFT_RESET, 0xffffffff);
0120 msleep(30);
0121 gpu_write(gpu, REG_A2XX_RBBM_SOFT_RESET, 0x00000000);
0122
0123 if (adreno_is_a225(adreno_gpu))
0124 gpu_write(gpu, REG_A2XX_SQ_FLOW_CONTROL, 0x18000000);
0125
0126
0127 gpu_write(gpu, REG_A2XX_RBBM_CNTL, 0x00004442);
0128
0129
0130 gpu_write(gpu, REG_A2XX_MH_MMU_MPU_BASE, 0x00000000);
0131 gpu_write(gpu, REG_A2XX_MH_MMU_MPU_END, 0xfffff000);
0132
0133 gpu_write(gpu, REG_A2XX_MH_MMU_CONFIG, A2XX_MH_MMU_CONFIG_MMU_ENABLE |
0134 A2XX_MH_MMU_CONFIG_RB_W_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0135 A2XX_MH_MMU_CONFIG_CP_W_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0136 A2XX_MH_MMU_CONFIG_CP_R0_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0137 A2XX_MH_MMU_CONFIG_CP_R1_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0138 A2XX_MH_MMU_CONFIG_CP_R2_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0139 A2XX_MH_MMU_CONFIG_CP_R3_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0140 A2XX_MH_MMU_CONFIG_CP_R4_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0141 A2XX_MH_MMU_CONFIG_VGT_R0_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0142 A2XX_MH_MMU_CONFIG_VGT_R1_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0143 A2XX_MH_MMU_CONFIG_TC_R_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
0144 A2XX_MH_MMU_CONFIG_PA_W_CLNT_BEHAVIOR(BEH_TRAN_RNG));
0145
0146
0147 gpu_write(gpu, REG_A2XX_MH_MMU_VA_RANGE, SZ_16M |
0148 A2XX_MH_MMU_VA_RANGE_NUM_64KB_REGIONS(0xfff));
0149
0150 gpu_write(gpu, REG_A2XX_MH_MMU_PT_BASE, pt_base);
0151 gpu_write(gpu, REG_A2XX_MH_MMU_TRAN_ERROR, tran_error);
0152
0153 gpu_write(gpu, REG_A2XX_MH_MMU_INVALIDATE,
0154 A2XX_MH_MMU_INVALIDATE_INVALIDATE_ALL |
0155 A2XX_MH_MMU_INVALIDATE_INVALIDATE_TC);
0156
0157 gpu_write(gpu, REG_A2XX_MH_ARBITER_CONFIG,
0158 A2XX_MH_ARBITER_CONFIG_SAME_PAGE_LIMIT(16) |
0159 A2XX_MH_ARBITER_CONFIG_L1_ARB_ENABLE |
0160 A2XX_MH_ARBITER_CONFIG_L1_ARB_HOLD_ENABLE |
0161 A2XX_MH_ARBITER_CONFIG_PAGE_SIZE(1) |
0162 A2XX_MH_ARBITER_CONFIG_TC_REORDER_ENABLE |
0163 A2XX_MH_ARBITER_CONFIG_TC_ARB_HOLD_ENABLE |
0164 A2XX_MH_ARBITER_CONFIG_IN_FLIGHT_LIMIT_ENABLE |
0165 A2XX_MH_ARBITER_CONFIG_IN_FLIGHT_LIMIT(8) |
0166 A2XX_MH_ARBITER_CONFIG_CP_CLNT_ENABLE |
0167 A2XX_MH_ARBITER_CONFIG_VGT_CLNT_ENABLE |
0168 A2XX_MH_ARBITER_CONFIG_TC_CLNT_ENABLE |
0169 A2XX_MH_ARBITER_CONFIG_RB_CLNT_ENABLE |
0170 A2XX_MH_ARBITER_CONFIG_PA_CLNT_ENABLE);
0171 if (!adreno_is_a20x(adreno_gpu))
0172 gpu_write(gpu, REG_A2XX_MH_CLNT_INTF_CTRL_CONFIG1, 0x00032f07);
0173
0174 gpu_write(gpu, REG_A2XX_SQ_VS_PROGRAM, 0x00000000);
0175 gpu_write(gpu, REG_A2XX_SQ_PS_PROGRAM, 0x00000000);
0176
0177 gpu_write(gpu, REG_A2XX_RBBM_PM_OVERRIDE1, 0);
0178 gpu_write(gpu, REG_A2XX_RBBM_PM_OVERRIDE2, 0);
0179
0180
0181 gpu_write(gpu, REG_A2XX_RBBM_DEBUG, 0x00080000);
0182
0183 gpu_write(gpu, REG_A2XX_RBBM_INT_CNTL,
0184 A2XX_RBBM_INT_CNTL_RDERR_INT_MASK);
0185 gpu_write(gpu, REG_AXXX_CP_INT_CNTL,
0186 AXXX_CP_INT_CNTL_T0_PACKET_IN_IB_MASK |
0187 AXXX_CP_INT_CNTL_OPCODE_ERROR_MASK |
0188 AXXX_CP_INT_CNTL_PROTECTED_MODE_ERROR_MASK |
0189 AXXX_CP_INT_CNTL_RESERVED_BIT_ERROR_MASK |
0190 AXXX_CP_INT_CNTL_IB_ERROR_MASK |
0191 AXXX_CP_INT_CNTL_IB1_INT_MASK |
0192 AXXX_CP_INT_CNTL_RB_INT_MASK);
0193 gpu_write(gpu, REG_A2XX_SQ_INT_CNTL, 0);
0194 gpu_write(gpu, REG_A2XX_MH_INTERRUPT_MASK,
0195 A2XX_MH_INTERRUPT_MASK_AXI_READ_ERROR |
0196 A2XX_MH_INTERRUPT_MASK_AXI_WRITE_ERROR |
0197 A2XX_MH_INTERRUPT_MASK_MMU_PAGE_FAULT);
0198
0199 for (i = 3; i <= 5; i++)
0200 if ((SZ_16K << i) == adreno_gpu->gmem)
0201 break;
0202 gpu_write(gpu, REG_A2XX_RB_EDRAM_INFO, i);
0203
0204 ret = adreno_hw_init(gpu);
0205 if (ret)
0206 return ret;
0207
0208 gpu_write(gpu, REG_AXXX_CP_RB_CNTL,
0209 MSM_GPU_RB_CNTL_DEFAULT | AXXX_CP_RB_CNTL_NO_UPDATE);
0210
0211 gpu_write(gpu, REG_AXXX_CP_RB_BASE, lower_32_bits(gpu->rb[0]->iova));
0212
0213
0214
0215
0216
0217
0218
0219
0220 ptr = (uint32_t *)(adreno_gpu->fw[ADRENO_FW_PM4]->data);
0221 len = adreno_gpu->fw[ADRENO_FW_PM4]->size / 4;
0222 DBG("loading PM4 ucode version: %x", ptr[1]);
0223
0224 gpu_write(gpu, REG_AXXX_CP_DEBUG,
0225 AXXX_CP_DEBUG_MIU_128BIT_WRITE_ENABLE);
0226 gpu_write(gpu, REG_AXXX_CP_ME_RAM_WADDR, 0);
0227 for (i = 1; i < len; i++)
0228 gpu_write(gpu, REG_AXXX_CP_ME_RAM_DATA, ptr[i]);
0229
0230
0231 ptr = (uint32_t *)(adreno_gpu->fw[ADRENO_FW_PFP]->data);
0232 len = adreno_gpu->fw[ADRENO_FW_PFP]->size / 4;
0233 DBG("loading PFP ucode version: %x", ptr[5]);
0234
0235 gpu_write(gpu, REG_A2XX_CP_PFP_UCODE_ADDR, 0);
0236 for (i = 1; i < len; i++)
0237 gpu_write(gpu, REG_A2XX_CP_PFP_UCODE_DATA, ptr[i]);
0238
0239 gpu_write(gpu, REG_AXXX_CP_QUEUE_THRESHOLDS, 0x000C0804);
0240
0241
0242 gpu_write(gpu, REG_AXXX_CP_ME_CNTL, 0);
0243
0244 return a2xx_me_init(gpu) ? 0 : -EINVAL;
0245 }
0246
0247 static void a2xx_recover(struct msm_gpu *gpu)
0248 {
0249 int i;
0250
0251 adreno_dump_info(gpu);
0252
0253 for (i = 0; i < 8; i++) {
0254 printk("CP_SCRATCH_REG%d: %u\n", i,
0255 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i));
0256 }
0257
0258
0259 if (hang_debug)
0260 a2xx_dump(gpu);
0261
0262 gpu_write(gpu, REG_A2XX_RBBM_SOFT_RESET, 1);
0263 gpu_read(gpu, REG_A2XX_RBBM_SOFT_RESET);
0264 gpu_write(gpu, REG_A2XX_RBBM_SOFT_RESET, 0);
0265 adreno_recover(gpu);
0266 }
0267
0268 static void a2xx_destroy(struct msm_gpu *gpu)
0269 {
0270 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0271 struct a2xx_gpu *a2xx_gpu = to_a2xx_gpu(adreno_gpu);
0272
0273 DBG("%s", gpu->name);
0274
0275 adreno_gpu_cleanup(adreno_gpu);
0276
0277 kfree(a2xx_gpu);
0278 }
0279
0280 static bool a2xx_idle(struct msm_gpu *gpu)
0281 {
0282
0283 if (!adreno_idle(gpu, gpu->rb[0]))
0284 return false;
0285
0286
0287 if (spin_until(!(gpu_read(gpu, REG_A2XX_RBBM_STATUS) &
0288 A2XX_RBBM_STATUS_GUI_ACTIVE))) {
0289 DRM_ERROR("%s: timeout waiting for GPU to idle!\n", gpu->name);
0290
0291
0292 return false;
0293 }
0294
0295 return true;
0296 }
0297
0298 static irqreturn_t a2xx_irq(struct msm_gpu *gpu)
0299 {
0300 uint32_t mstatus, status;
0301
0302 mstatus = gpu_read(gpu, REG_A2XX_MASTER_INT_SIGNAL);
0303
0304 if (mstatus & A2XX_MASTER_INT_SIGNAL_MH_INT_STAT) {
0305 status = gpu_read(gpu, REG_A2XX_MH_INTERRUPT_STATUS);
0306
0307 dev_warn(gpu->dev->dev, "MH_INT: %08X\n", status);
0308 dev_warn(gpu->dev->dev, "MMU_PAGE_FAULT: %08X\n",
0309 gpu_read(gpu, REG_A2XX_MH_MMU_PAGE_FAULT));
0310
0311 gpu_write(gpu, REG_A2XX_MH_INTERRUPT_CLEAR, status);
0312 }
0313
0314 if (mstatus & A2XX_MASTER_INT_SIGNAL_CP_INT_STAT) {
0315 status = gpu_read(gpu, REG_AXXX_CP_INT_STATUS);
0316
0317
0318 if (status & ~AXXX_CP_INT_CNTL_RB_INT_MASK)
0319 dev_warn(gpu->dev->dev, "CP_INT: %08X\n", status);
0320
0321 gpu_write(gpu, REG_AXXX_CP_INT_ACK, status);
0322 }
0323
0324 if (mstatus & A2XX_MASTER_INT_SIGNAL_RBBM_INT_STAT) {
0325 status = gpu_read(gpu, REG_A2XX_RBBM_INT_STATUS);
0326
0327 dev_warn(gpu->dev->dev, "RBBM_INT: %08X\n", status);
0328
0329 gpu_write(gpu, REG_A2XX_RBBM_INT_ACK, status);
0330 }
0331
0332 msm_gpu_retire(gpu);
0333
0334 return IRQ_HANDLED;
0335 }
0336
0337 static const unsigned int a200_registers[] = {
0338 0x0000, 0x0002, 0x0004, 0x000B, 0x003B, 0x003D, 0x0040, 0x0044,
0339 0x0046, 0x0047, 0x01C0, 0x01C1, 0x01C3, 0x01C8, 0x01D5, 0x01D9,
0340 0x01DC, 0x01DD, 0x01EA, 0x01EA, 0x01EE, 0x01F3, 0x01F6, 0x01F7,
0341 0x01FC, 0x01FF, 0x0391, 0x0392, 0x039B, 0x039E, 0x03B2, 0x03B5,
0342 0x03B7, 0x03B7, 0x03F8, 0x03FB, 0x0440, 0x0440, 0x0443, 0x0444,
0343 0x044B, 0x044B, 0x044D, 0x044F, 0x0452, 0x0452, 0x0454, 0x045B,
0344 0x047F, 0x047F, 0x0578, 0x0587, 0x05C9, 0x05C9, 0x05D0, 0x05D0,
0345 0x0601, 0x0604, 0x0606, 0x0609, 0x060B, 0x060E, 0x0613, 0x0614,
0346 0x0A29, 0x0A2B, 0x0A2F, 0x0A31, 0x0A40, 0x0A43, 0x0A45, 0x0A45,
0347 0x0A4E, 0x0A4F, 0x0C2C, 0x0C2C, 0x0C30, 0x0C30, 0x0C38, 0x0C3C,
0348 0x0C40, 0x0C40, 0x0C44, 0x0C44, 0x0C80, 0x0C86, 0x0C88, 0x0C94,
0349 0x0C99, 0x0C9A, 0x0CA4, 0x0CA5, 0x0D00, 0x0D03, 0x0D06, 0x0D06,
0350 0x0D08, 0x0D0B, 0x0D34, 0x0D35, 0x0DAE, 0x0DC1, 0x0DC8, 0x0DD4,
0351 0x0DD8, 0x0DD9, 0x0E00, 0x0E00, 0x0E02, 0x0E04, 0x0E17, 0x0E1E,
0352 0x0EC0, 0x0EC9, 0x0ECB, 0x0ECC, 0x0ED0, 0x0ED0, 0x0ED4, 0x0ED7,
0353 0x0EE0, 0x0EE2, 0x0F01, 0x0F02, 0x0F0C, 0x0F0C, 0x0F0E, 0x0F12,
0354 0x0F26, 0x0F2A, 0x0F2C, 0x0F2C, 0x2000, 0x2002, 0x2006, 0x200F,
0355 0x2080, 0x2082, 0x2100, 0x2109, 0x210C, 0x2114, 0x2180, 0x2184,
0356 0x21F5, 0x21F7, 0x2200, 0x2208, 0x2280, 0x2283, 0x2293, 0x2294,
0357 0x2300, 0x2308, 0x2312, 0x2312, 0x2316, 0x231D, 0x2324, 0x2326,
0358 0x2380, 0x2383, 0x2400, 0x2402, 0x2406, 0x240F, 0x2480, 0x2482,
0359 0x2500, 0x2509, 0x250C, 0x2514, 0x2580, 0x2584, 0x25F5, 0x25F7,
0360 0x2600, 0x2608, 0x2680, 0x2683, 0x2693, 0x2694, 0x2700, 0x2708,
0361 0x2712, 0x2712, 0x2716, 0x271D, 0x2724, 0x2726, 0x2780, 0x2783,
0362 0x4000, 0x4003, 0x4800, 0x4805, 0x4900, 0x4900, 0x4908, 0x4908,
0363 ~0
0364 };
0365
0366 static const unsigned int a220_registers[] = {
0367 0x0000, 0x0002, 0x0004, 0x000B, 0x003B, 0x003D, 0x0040, 0x0044,
0368 0x0046, 0x0047, 0x01C0, 0x01C1, 0x01C3, 0x01C8, 0x01D5, 0x01D9,
0369 0x01DC, 0x01DD, 0x01EA, 0x01EA, 0x01EE, 0x01F3, 0x01F6, 0x01F7,
0370 0x01FC, 0x01FF, 0x0391, 0x0392, 0x039B, 0x039E, 0x03B2, 0x03B5,
0371 0x03B7, 0x03B7, 0x03F8, 0x03FB, 0x0440, 0x0440, 0x0443, 0x0444,
0372 0x044B, 0x044B, 0x044D, 0x044F, 0x0452, 0x0452, 0x0454, 0x045B,
0373 0x047F, 0x047F, 0x0578, 0x0587, 0x05C9, 0x05C9, 0x05D0, 0x05D0,
0374 0x0601, 0x0604, 0x0606, 0x0609, 0x060B, 0x060E, 0x0613, 0x0614,
0375 0x0A29, 0x0A2B, 0x0A2F, 0x0A31, 0x0A40, 0x0A40, 0x0A42, 0x0A43,
0376 0x0A45, 0x0A45, 0x0A4E, 0x0A4F, 0x0C30, 0x0C30, 0x0C38, 0x0C39,
0377 0x0C3C, 0x0C3C, 0x0C80, 0x0C81, 0x0C88, 0x0C93, 0x0D00, 0x0D03,
0378 0x0D05, 0x0D06, 0x0D08, 0x0D0B, 0x0D34, 0x0D35, 0x0DAE, 0x0DC1,
0379 0x0DC8, 0x0DD4, 0x0DD8, 0x0DD9, 0x0E00, 0x0E00, 0x0E02, 0x0E04,
0380 0x0E17, 0x0E1E, 0x0EC0, 0x0EC9, 0x0ECB, 0x0ECC, 0x0ED0, 0x0ED0,
0381 0x0ED4, 0x0ED7, 0x0EE0, 0x0EE2, 0x0F01, 0x0F02, 0x2000, 0x2002,
0382 0x2006, 0x200F, 0x2080, 0x2082, 0x2100, 0x2102, 0x2104, 0x2109,
0383 0x210C, 0x2114, 0x2180, 0x2184, 0x21F5, 0x21F7, 0x2200, 0x2202,
0384 0x2204, 0x2204, 0x2208, 0x2208, 0x2280, 0x2282, 0x2294, 0x2294,
0385 0x2300, 0x2308, 0x2309, 0x230A, 0x2312, 0x2312, 0x2316, 0x2316,
0386 0x2318, 0x231D, 0x2324, 0x2326, 0x2380, 0x2383, 0x2400, 0x2402,
0387 0x2406, 0x240F, 0x2480, 0x2482, 0x2500, 0x2502, 0x2504, 0x2509,
0388 0x250C, 0x2514, 0x2580, 0x2584, 0x25F5, 0x25F7, 0x2600, 0x2602,
0389 0x2604, 0x2606, 0x2608, 0x2608, 0x2680, 0x2682, 0x2694, 0x2694,
0390 0x2700, 0x2708, 0x2712, 0x2712, 0x2716, 0x2716, 0x2718, 0x271D,
0391 0x2724, 0x2726, 0x2780, 0x2783, 0x4000, 0x4003, 0x4800, 0x4805,
0392 0x4900, 0x4900, 0x4908, 0x4908,
0393 ~0
0394 };
0395
0396 static const unsigned int a225_registers[] = {
0397 0x0000, 0x0002, 0x0004, 0x000B, 0x003B, 0x003D, 0x0040, 0x0044,
0398 0x0046, 0x0047, 0x013C, 0x013C, 0x0140, 0x014F, 0x01C0, 0x01C1,
0399 0x01C3, 0x01C8, 0x01D5, 0x01D9, 0x01DC, 0x01DD, 0x01EA, 0x01EA,
0400 0x01EE, 0x01F3, 0x01F6, 0x01F7, 0x01FC, 0x01FF, 0x0391, 0x0392,
0401 0x039B, 0x039E, 0x03B2, 0x03B5, 0x03B7, 0x03B7, 0x03F8, 0x03FB,
0402 0x0440, 0x0440, 0x0443, 0x0444, 0x044B, 0x044B, 0x044D, 0x044F,
0403 0x0452, 0x0452, 0x0454, 0x045B, 0x047F, 0x047F, 0x0578, 0x0587,
0404 0x05C9, 0x05C9, 0x05D0, 0x05D0, 0x0601, 0x0604, 0x0606, 0x0609,
0405 0x060B, 0x060E, 0x0613, 0x0614, 0x0A29, 0x0A2B, 0x0A2F, 0x0A31,
0406 0x0A40, 0x0A40, 0x0A42, 0x0A43, 0x0A45, 0x0A45, 0x0A4E, 0x0A4F,
0407 0x0C01, 0x0C1D, 0x0C30, 0x0C30, 0x0C38, 0x0C39, 0x0C3C, 0x0C3C,
0408 0x0C80, 0x0C81, 0x0C88, 0x0C93, 0x0D00, 0x0D03, 0x0D05, 0x0D06,
0409 0x0D08, 0x0D0B, 0x0D34, 0x0D35, 0x0DAE, 0x0DC1, 0x0DC8, 0x0DD4,
0410 0x0DD8, 0x0DD9, 0x0E00, 0x0E00, 0x0E02, 0x0E04, 0x0E17, 0x0E1E,
0411 0x0EC0, 0x0EC9, 0x0ECB, 0x0ECC, 0x0ED0, 0x0ED0, 0x0ED4, 0x0ED7,
0412 0x0EE0, 0x0EE2, 0x0F01, 0x0F02, 0x2000, 0x200F, 0x2080, 0x2082,
0413 0x2100, 0x2109, 0x210C, 0x2114, 0x2180, 0x2184, 0x21F5, 0x21F7,
0414 0x2200, 0x2202, 0x2204, 0x2206, 0x2208, 0x2210, 0x2220, 0x2222,
0415 0x2280, 0x2282, 0x2294, 0x2294, 0x2297, 0x2297, 0x2300, 0x230A,
0416 0x2312, 0x2312, 0x2315, 0x2316, 0x2318, 0x231D, 0x2324, 0x2326,
0417 0x2340, 0x2357, 0x2360, 0x2360, 0x2380, 0x2383, 0x2400, 0x240F,
0418 0x2480, 0x2482, 0x2500, 0x2509, 0x250C, 0x2514, 0x2580, 0x2584,
0419 0x25F5, 0x25F7, 0x2600, 0x2602, 0x2604, 0x2606, 0x2608, 0x2610,
0420 0x2620, 0x2622, 0x2680, 0x2682, 0x2694, 0x2694, 0x2697, 0x2697,
0421 0x2700, 0x270A, 0x2712, 0x2712, 0x2715, 0x2716, 0x2718, 0x271D,
0422 0x2724, 0x2726, 0x2740, 0x2757, 0x2760, 0x2760, 0x2780, 0x2783,
0423 0x4000, 0x4003, 0x4800, 0x4806, 0x4808, 0x4808, 0x4900, 0x4900,
0424 0x4908, 0x4908,
0425 ~0
0426 };
0427
0428
0429 static void a2xx_dump(struct msm_gpu *gpu)
0430 {
0431 printk("status: %08x\n",
0432 gpu_read(gpu, REG_A2XX_RBBM_STATUS));
0433 adreno_dump(gpu);
0434 }
0435
0436 static struct msm_gpu_state *a2xx_gpu_state_get(struct msm_gpu *gpu)
0437 {
0438 struct msm_gpu_state *state = kzalloc(sizeof(*state), GFP_KERNEL);
0439
0440 if (!state)
0441 return ERR_PTR(-ENOMEM);
0442
0443 adreno_gpu_state_get(gpu, state);
0444
0445 state->rbbm_status = gpu_read(gpu, REG_A2XX_RBBM_STATUS);
0446
0447 return state;
0448 }
0449
0450 static struct msm_gem_address_space *
0451 a2xx_create_address_space(struct msm_gpu *gpu, struct platform_device *pdev)
0452 {
0453 struct msm_mmu *mmu = msm_gpummu_new(&pdev->dev, gpu);
0454 struct msm_gem_address_space *aspace;
0455
0456 aspace = msm_gem_address_space_create(mmu, "gpu", SZ_16M,
0457 0xfff * SZ_64K);
0458
0459 if (IS_ERR(aspace) && !IS_ERR(mmu))
0460 mmu->funcs->destroy(mmu);
0461
0462 return aspace;
0463 }
0464
0465 static u32 a2xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
0466 {
0467 ring->memptrs->rptr = gpu_read(gpu, REG_AXXX_CP_RB_RPTR);
0468 return ring->memptrs->rptr;
0469 }
0470
0471 static const struct adreno_gpu_funcs funcs = {
0472 .base = {
0473 .get_param = adreno_get_param,
0474 .set_param = adreno_set_param,
0475 .hw_init = a2xx_hw_init,
0476 .pm_suspend = msm_gpu_pm_suspend,
0477 .pm_resume = msm_gpu_pm_resume,
0478 .recover = a2xx_recover,
0479 .submit = a2xx_submit,
0480 .active_ring = adreno_active_ring,
0481 .irq = a2xx_irq,
0482 .destroy = a2xx_destroy,
0483 #if defined(CONFIG_DEBUG_FS) || defined(CONFIG_DEV_COREDUMP)
0484 .show = adreno_show,
0485 #endif
0486 .gpu_state_get = a2xx_gpu_state_get,
0487 .gpu_state_put = adreno_gpu_state_put,
0488 .create_address_space = a2xx_create_address_space,
0489 .get_rptr = a2xx_get_rptr,
0490 },
0491 };
0492
0493 static const struct msm_gpu_perfcntr perfcntrs[] = {
0494
0495 };
0496
0497 struct msm_gpu *a2xx_gpu_init(struct drm_device *dev)
0498 {
0499 struct a2xx_gpu *a2xx_gpu = NULL;
0500 struct adreno_gpu *adreno_gpu;
0501 struct msm_gpu *gpu;
0502 struct msm_drm_private *priv = dev->dev_private;
0503 struct platform_device *pdev = priv->gpu_pdev;
0504 int ret;
0505
0506 if (!pdev) {
0507 dev_err(dev->dev, "no a2xx device\n");
0508 ret = -ENXIO;
0509 goto fail;
0510 }
0511
0512 a2xx_gpu = kzalloc(sizeof(*a2xx_gpu), GFP_KERNEL);
0513 if (!a2xx_gpu) {
0514 ret = -ENOMEM;
0515 goto fail;
0516 }
0517
0518 adreno_gpu = &a2xx_gpu->base;
0519 gpu = &adreno_gpu->base;
0520
0521 gpu->perfcntrs = perfcntrs;
0522 gpu->num_perfcntrs = ARRAY_SIZE(perfcntrs);
0523
0524 if (adreno_is_a20x(adreno_gpu))
0525 adreno_gpu->registers = a200_registers;
0526 else if (adreno_is_a225(adreno_gpu))
0527 adreno_gpu->registers = a225_registers;
0528 else
0529 adreno_gpu->registers = a220_registers;
0530
0531 ret = adreno_gpu_init(dev, pdev, adreno_gpu, &funcs, 1);
0532 if (ret)
0533 goto fail;
0534
0535 if (!gpu->aspace) {
0536 dev_err(dev->dev, "No memory protection without MMU\n");
0537 if (!allow_vram_carveout) {
0538 ret = -ENXIO;
0539 goto fail;
0540 }
0541 }
0542
0543 return gpu;
0544
0545 fail:
0546 if (a2xx_gpu)
0547 a2xx_destroy(&a2xx_gpu->base.base);
0548
0549 return ERR_PTR(ret);
0550 }