0001
0002
0003
0004
0005 #include <linux/pm_opp.h>
0006 #include "a5xx_gpu.h"
0007
0008
0009
0010
0011
0012
0013
0014 #define AGC_INIT_BASE REG_A5XX_GPMU_DATA_RAM_BASE
0015 #define AGC_INIT_MSG_MAGIC (AGC_INIT_BASE + 5)
0016 #define AGC_MSG_BASE (AGC_INIT_BASE + 7)
0017
0018 #define AGC_MSG_STATE (AGC_MSG_BASE + 0)
0019 #define AGC_MSG_COMMAND (AGC_MSG_BASE + 1)
0020 #define AGC_MSG_PAYLOAD_SIZE (AGC_MSG_BASE + 3)
0021 #define AGC_MSG_PAYLOAD(_o) ((AGC_MSG_BASE + 5) + (_o))
0022
0023 #define AGC_POWER_CONFIG_PRODUCTION_ID 1
0024 #define AGC_INIT_MSG_VALUE 0xBABEFACE
0025
0026
0027 #define AGC_LM_CONFIG (136/4)
0028 #define AGC_LM_CONFIG_GPU_VERSION_SHIFT 17
0029 #define AGC_LM_CONFIG_ENABLE_GPMU_ADAPTIVE 1
0030 #define AGC_LM_CONFIG_THROTTLE_DISABLE (2 << 8)
0031 #define AGC_LM_CONFIG_ISENSE_ENABLE (1 << 4)
0032 #define AGC_LM_CONFIG_ENABLE_ERROR (3 << 4)
0033 #define AGC_LM_CONFIG_LLM_ENABLED (1 << 16)
0034 #define AGC_LM_CONFIG_BCL_DISABLED (1 << 24)
0035
0036 #define AGC_LEVEL_CONFIG (140/4)
0037
0038 static struct {
0039 uint32_t reg;
0040 uint32_t value;
0041 } a5xx_sequence_regs[] = {
0042 { 0xB9A1, 0x00010303 },
0043 { 0xB9A2, 0x13000000 },
0044 { 0xB9A3, 0x00460020 },
0045 { 0xB9A4, 0x10000000 },
0046 { 0xB9A5, 0x040A1707 },
0047 { 0xB9A6, 0x00010000 },
0048 { 0xB9A7, 0x0E000904 },
0049 { 0xB9A8, 0x10000000 },
0050 { 0xB9A9, 0x01165000 },
0051 { 0xB9AA, 0x000E0002 },
0052 { 0xB9AB, 0x03884141 },
0053 { 0xB9AC, 0x10000840 },
0054 { 0xB9AD, 0x572A5000 },
0055 { 0xB9AE, 0x00000003 },
0056 { 0xB9AF, 0x00000000 },
0057 { 0xB9B0, 0x10000000 },
0058 { 0xB828, 0x6C204010 },
0059 { 0xB829, 0x6C204011 },
0060 { 0xB82A, 0x6C204012 },
0061 { 0xB82B, 0x6C204013 },
0062 { 0xB82C, 0x6C204014 },
0063 { 0xB90F, 0x00000004 },
0064 { 0xB910, 0x00000002 },
0065 { 0xB911, 0x00000002 },
0066 { 0xB912, 0x00000002 },
0067 { 0xB913, 0x00000002 },
0068 { 0xB92F, 0x00000004 },
0069 { 0xB930, 0x00000005 },
0070 { 0xB931, 0x00000005 },
0071 { 0xB932, 0x00000005 },
0072 { 0xB933, 0x00000005 },
0073 { 0xB96F, 0x00000001 },
0074 { 0xB970, 0x00000003 },
0075 { 0xB94F, 0x00000004 },
0076 { 0xB950, 0x0000000B },
0077 { 0xB951, 0x0000000B },
0078 { 0xB952, 0x0000000B },
0079 { 0xB953, 0x0000000B },
0080 { 0xB907, 0x00000019 },
0081 { 0xB927, 0x00000019 },
0082 { 0xB947, 0x00000019 },
0083 { 0xB967, 0x00000019 },
0084 { 0xB987, 0x00000019 },
0085 { 0xB906, 0x00220001 },
0086 { 0xB926, 0x00220001 },
0087 { 0xB946, 0x00220001 },
0088 { 0xB966, 0x00220001 },
0089 { 0xB986, 0x00300000 },
0090 { 0xAC40, 0x0340FF41 },
0091 { 0xAC41, 0x03BEFED0 },
0092 { 0xAC42, 0x00331FED },
0093 { 0xAC43, 0x021FFDD3 },
0094 { 0xAC44, 0x5555AAAA },
0095 { 0xAC45, 0x5555AAAA },
0096 { 0xB9BA, 0x00000008 },
0097 };
0098
0099
0100
0101
0102
0103 static inline uint32_t _get_mvolts(struct msm_gpu *gpu, uint32_t freq)
0104 {
0105 struct drm_device *dev = gpu->dev;
0106 struct msm_drm_private *priv = dev->dev_private;
0107 struct platform_device *pdev = priv->gpu_pdev;
0108 struct dev_pm_opp *opp;
0109 u32 ret = 0;
0110
0111 opp = dev_pm_opp_find_freq_exact(&pdev->dev, freq, true);
0112
0113 if (!IS_ERR(opp)) {
0114 ret = dev_pm_opp_get_voltage(opp) / 1000;
0115 dev_pm_opp_put(opp);
0116 }
0117
0118 return ret;
0119 }
0120
0121
0122 static void a530_lm_setup(struct msm_gpu *gpu)
0123 {
0124 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0125 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu);
0126 unsigned int i;
0127
0128
0129 for (i = 0; i < ARRAY_SIZE(a5xx_sequence_regs); i++)
0130 gpu_write(gpu, a5xx_sequence_regs[i].reg,
0131 a5xx_sequence_regs[i].value);
0132
0133
0134 gpu_write(gpu, REG_A5XX_GPMU_TEMP_SENSOR_ID, 0x60007);
0135 gpu_write(gpu, REG_A5XX_GPMU_DELTA_TEMP_THRESHOLD, 0x01);
0136 gpu_write(gpu, REG_A5XX_GPMU_TEMP_SENSOR_CONFIG, 0x01);
0137
0138
0139 gpu_write(gpu, REG_A5XX_GPMU_GPMU_VOLTAGE, 0x80000000 | 0);
0140
0141 gpu_write(gpu, REG_A5XX_GPMU_BASE_LEAKAGE, a5xx_gpu->lm_leakage);
0142
0143
0144 gpu_write(gpu, REG_A5XX_GPMU_GPMU_PWR_THRESHOLD, 0x80000000 | 6000);
0145
0146 gpu_write(gpu, REG_A5XX_GPMU_BEC_ENABLE, 0x10001FFF);
0147 gpu_write(gpu, REG_A5XX_GDPM_CONFIG1, 0x00201FF1);
0148
0149
0150 gpu_write(gpu, REG_A5XX_GPMU_BEC_ENABLE, 0x10001FFF);
0151 gpu_write(gpu, REG_A5XX_GDPM_CONFIG1, 0x201FF1);
0152
0153 gpu_write(gpu, AGC_MSG_STATE, 1);
0154 gpu_write(gpu, AGC_MSG_COMMAND, AGC_POWER_CONFIG_PRODUCTION_ID);
0155
0156
0157 gpu_write(gpu, AGC_MSG_PAYLOAD(0), 5448);
0158 gpu_write(gpu, AGC_MSG_PAYLOAD(1), 1);
0159
0160
0161
0162
0163
0164 gpu_write(gpu, AGC_MSG_PAYLOAD(2), _get_mvolts(gpu, gpu->fast_rate));
0165 gpu_write(gpu, AGC_MSG_PAYLOAD(3), gpu->fast_rate / 1000000);
0166
0167 gpu_write(gpu, AGC_MSG_PAYLOAD_SIZE, 4 * sizeof(uint32_t));
0168 gpu_write(gpu, AGC_INIT_MSG_MAGIC, AGC_INIT_MSG_VALUE);
0169 }
0170
0171 #define PAYLOAD_SIZE(_size) ((_size) * sizeof(u32))
0172 #define LM_DCVS_LIMIT 1
0173 #define LEVEL_CONFIG ~(0x303)
0174
0175 static void a540_lm_setup(struct msm_gpu *gpu)
0176 {
0177 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0178 u32 config;
0179
0180
0181 config = AGC_LM_CONFIG_BCL_DISABLED;
0182 config |= adreno_gpu->rev.patchid << AGC_LM_CONFIG_GPU_VERSION_SHIFT;
0183
0184
0185 config |= AGC_LM_CONFIG_THROTTLE_DISABLE;
0186
0187
0188 gpu_write(gpu, REG_A5XX_GPMU_GPMU_VOLTAGE, 0x80000000 | 0);
0189
0190
0191 gpu_write(gpu, REG_A5XX_GPMU_GPMU_PWR_THRESHOLD, 0x80000000 | 6000);
0192
0193 gpu_write(gpu, AGC_MSG_STATE, 0x80000001);
0194 gpu_write(gpu, AGC_MSG_COMMAND, AGC_POWER_CONFIG_PRODUCTION_ID);
0195
0196 gpu_write(gpu, AGC_MSG_PAYLOAD(0), 5448);
0197 gpu_write(gpu, AGC_MSG_PAYLOAD(1), 1);
0198
0199 gpu_write(gpu, AGC_MSG_PAYLOAD(2), _get_mvolts(gpu, gpu->fast_rate));
0200 gpu_write(gpu, AGC_MSG_PAYLOAD(3), gpu->fast_rate / 1000000);
0201
0202 gpu_write(gpu, AGC_MSG_PAYLOAD(AGC_LM_CONFIG), config);
0203 gpu_write(gpu, AGC_MSG_PAYLOAD(AGC_LEVEL_CONFIG), LEVEL_CONFIG);
0204 gpu_write(gpu, AGC_MSG_PAYLOAD_SIZE,
0205 PAYLOAD_SIZE(AGC_LEVEL_CONFIG + 1));
0206
0207 gpu_write(gpu, AGC_INIT_MSG_MAGIC, AGC_INIT_MSG_VALUE);
0208 }
0209
0210
0211 static void a5xx_pc_init(struct msm_gpu *gpu)
0212 {
0213 gpu_write(gpu, REG_A5XX_GPMU_PWR_COL_INTER_FRAME_CTRL, 0x7F);
0214 gpu_write(gpu, REG_A5XX_GPMU_PWR_COL_BINNING_CTRL, 0);
0215 gpu_write(gpu, REG_A5XX_GPMU_PWR_COL_INTER_FRAME_HYST, 0xA0080);
0216 gpu_write(gpu, REG_A5XX_GPMU_PWR_COL_STAGGER_DELAY, 0x600040);
0217 }
0218
0219
0220 static int a5xx_gpmu_init(struct msm_gpu *gpu)
0221 {
0222 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0223 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu);
0224 struct msm_ringbuffer *ring = gpu->rb[0];
0225
0226 if (!a5xx_gpu->gpmu_dwords)
0227 return 0;
0228
0229
0230 OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
0231 OUT_RING(ring, 0);
0232
0233
0234 OUT_PKT7(ring, CP_INDIRECT_BUFFER_PFE, 3);
0235 OUT_RING(ring, lower_32_bits(a5xx_gpu->gpmu_iova));
0236 OUT_RING(ring, upper_32_bits(a5xx_gpu->gpmu_iova));
0237 OUT_RING(ring, a5xx_gpu->gpmu_dwords);
0238
0239
0240 OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
0241 OUT_RING(ring, 1);
0242
0243 a5xx_flush(gpu, ring, true);
0244
0245 if (!a5xx_idle(gpu, ring)) {
0246 DRM_ERROR("%s: Unable to load GPMU firmware. GPMU will not be active\n",
0247 gpu->name);
0248 return -EINVAL;
0249 }
0250
0251 if (adreno_is_a530(adreno_gpu))
0252 gpu_write(gpu, REG_A5XX_GPMU_WFI_CONFIG, 0x4014);
0253
0254
0255 gpu_write(gpu, REG_A5XX_GPMU_CM3_SYSRESET, 0x0);
0256
0257
0258
0259
0260
0261 if (spin_usecs(gpu, 25, REG_A5XX_GPMU_GENERAL_0, 0xFFFFFFFF,
0262 0xBABEFACE))
0263 DRM_ERROR("%s: GPMU firmware initialization timed out\n",
0264 gpu->name);
0265
0266 if (!adreno_is_a530(adreno_gpu)) {
0267 u32 val = gpu_read(gpu, REG_A5XX_GPMU_GENERAL_1);
0268
0269 if (val)
0270 DRM_ERROR("%s: GPMU firmware initialization failed: %d\n",
0271 gpu->name, val);
0272 }
0273
0274 return 0;
0275 }
0276
0277
0278 static void a5xx_lm_enable(struct msm_gpu *gpu)
0279 {
0280 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0281
0282
0283 if (!adreno_is_a530(adreno_gpu))
0284 return;
0285
0286 gpu_write(gpu, REG_A5XX_GDPM_INT_MASK, 0x0);
0287 gpu_write(gpu, REG_A5XX_GDPM_INT_EN, 0x0A);
0288 gpu_write(gpu, REG_A5XX_GPMU_GPMU_VOLTAGE_INTR_EN_MASK, 0x01);
0289 gpu_write(gpu, REG_A5XX_GPMU_TEMP_THRESHOLD_INTR_EN_MASK, 0x50000);
0290 gpu_write(gpu, REG_A5XX_GPMU_THROTTLE_UNMASK_FORCE_CTRL, 0x30000);
0291
0292 gpu_write(gpu, REG_A5XX_GPMU_CLOCK_THROTTLE_CTRL, 0x011);
0293 }
0294
0295 int a5xx_power_init(struct msm_gpu *gpu)
0296 {
0297 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0298 int ret;
0299
0300
0301 if (!(adreno_is_a530(adreno_gpu) || adreno_is_a540(adreno_gpu)))
0302 return 0;
0303
0304
0305 if (adreno_is_a530(adreno_gpu))
0306 a530_lm_setup(gpu);
0307 else if (adreno_is_a540(adreno_gpu))
0308 a540_lm_setup(gpu);
0309
0310
0311 a5xx_pc_init(gpu);
0312
0313
0314 ret = a5xx_gpmu_init(gpu);
0315 if (ret)
0316 return ret;
0317
0318
0319 a5xx_lm_enable(gpu);
0320
0321 return 0;
0322 }
0323
0324 void a5xx_gpmu_ucode_init(struct msm_gpu *gpu)
0325 {
0326 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
0327 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu);
0328 struct drm_device *drm = gpu->dev;
0329 uint32_t dwords = 0, offset = 0, bosize;
0330 unsigned int *data, *ptr, *cmds;
0331 unsigned int cmds_size;
0332
0333 if (!(adreno_is_a530(adreno_gpu) || adreno_is_a540(adreno_gpu)))
0334 return;
0335
0336 if (a5xx_gpu->gpmu_bo)
0337 return;
0338
0339 data = (unsigned int *) adreno_gpu->fw[ADRENO_FW_GPMU]->data;
0340
0341
0342
0343
0344
0345
0346
0347 if (adreno_gpu->fw[ADRENO_FW_GPMU]->size < 8 ||
0348 (data[0] < 2) || (data[0] >=
0349 (adreno_gpu->fw[ADRENO_FW_GPMU]->size >> 2)))
0350 return;
0351
0352
0353 if (data[1] != 2)
0354 return;
0355
0356 cmds = data + data[2] + 3;
0357 cmds_size = data[0] - data[2] - 2;
0358
0359
0360
0361
0362
0363 bosize = (cmds_size + (cmds_size / TYPE4_MAX_PAYLOAD) + 1) << 2;
0364
0365 ptr = msm_gem_kernel_new(drm, bosize,
0366 MSM_BO_WC | MSM_BO_GPU_READONLY, gpu->aspace,
0367 &a5xx_gpu->gpmu_bo, &a5xx_gpu->gpmu_iova);
0368 if (IS_ERR(ptr))
0369 return;
0370
0371 msm_gem_object_set_name(a5xx_gpu->gpmu_bo, "gpmufw");
0372
0373 while (cmds_size > 0) {
0374 int i;
0375 uint32_t _size = cmds_size > TYPE4_MAX_PAYLOAD ?
0376 TYPE4_MAX_PAYLOAD : cmds_size;
0377
0378 ptr[dwords++] = PKT4(REG_A5XX_GPMU_INST_RAM_BASE + offset,
0379 _size);
0380
0381 for (i = 0; i < _size; i++)
0382 ptr[dwords++] = *cmds++;
0383
0384 offset += _size;
0385 cmds_size -= _size;
0386 }
0387
0388 msm_gem_put_vaddr(a5xx_gpu->gpmu_bo);
0389 a5xx_gpu->gpmu_dwords = dwords;
0390 }