0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023 #include "amdgpu.h"
0024 #include "amdgpu_atombios.h"
0025 #include "nbio_v2_3.h"
0026
0027 #include "nbio/nbio_2_3_default.h"
0028 #include "nbio/nbio_2_3_offset.h"
0029 #include "nbio/nbio_2_3_sh_mask.h"
0030 #include <uapi/linux/kfd_ioctl.h>
0031 #include <linux/pci.h>
0032
0033 #define smnPCIE_CONFIG_CNTL 0x11180044
0034 #define smnCPM_CONTROL 0x11180460
0035 #define smnPCIE_CNTL2 0x11180070
0036 #define smnPCIE_LC_CNTL 0x11140280
0037 #define smnPCIE_LC_CNTL3 0x111402d4
0038 #define smnPCIE_LC_CNTL6 0x111402ec
0039 #define smnPCIE_LC_CNTL7 0x111402f0
0040 #define smnBIF_CFG_DEV0_EPF0_DEVICE_CNTL2 0x1014008c
0041 #define smnRCC_EP_DEV0_0_EP_PCIE_TX_LTR_CNTL 0x10123538
0042 #define smnBIF_CFG_DEV0_EPF0_PCIE_LTR_CAP 0x10140324
0043 #define smnPSWUSP0_PCIE_LC_CNTL2 0x111402c4
0044 #define smnNBIF_MGCG_CTRL_LCLK 0x1013a21c
0045
0046 #define mmBIF_SDMA2_DOORBELL_RANGE 0x01d6
0047 #define mmBIF_SDMA2_DOORBELL_RANGE_BASE_IDX 2
0048 #define mmBIF_SDMA3_DOORBELL_RANGE 0x01d7
0049 #define mmBIF_SDMA3_DOORBELL_RANGE_BASE_IDX 2
0050
0051 #define mmBIF_MMSCH1_DOORBELL_RANGE 0x01d8
0052 #define mmBIF_MMSCH1_DOORBELL_RANGE_BASE_IDX 2
0053
0054 #define smnPCIE_LC_LINK_WIDTH_CNTL 0x11140288
0055
0056 #define GPU_HDP_FLUSH_DONE__RSVD_ENG0_MASK 0x00001000L
0057 #define GPU_HDP_FLUSH_DONE__RSVD_ENG1_MASK 0x00002000L
0058 #define GPU_HDP_FLUSH_DONE__RSVD_ENG2_MASK 0x00004000L
0059 #define GPU_HDP_FLUSH_DONE__RSVD_ENG3_MASK 0x00008000L
0060 #define GPU_HDP_FLUSH_DONE__RSVD_ENG4_MASK 0x00010000L
0061 #define GPU_HDP_FLUSH_DONE__RSVD_ENG5_MASK 0x00020000L
0062 #define GPU_HDP_FLUSH_DONE__RSVD_ENG6_MASK 0x00040000L
0063 #define GPU_HDP_FLUSH_DONE__RSVD_ENG7_MASK 0x00080000L
0064 #define GPU_HDP_FLUSH_DONE__RSVD_ENG8_MASK 0x00100000L
0065
0066 static void nbio_v2_3_remap_hdp_registers(struct amdgpu_device *adev)
0067 {
0068 WREG32_SOC15(NBIO, 0, mmREMAP_HDP_MEM_FLUSH_CNTL,
0069 adev->rmmio_remap.reg_offset + KFD_MMIO_REMAP_HDP_MEM_FLUSH_CNTL);
0070 WREG32_SOC15(NBIO, 0, mmREMAP_HDP_REG_FLUSH_CNTL,
0071 adev->rmmio_remap.reg_offset + KFD_MMIO_REMAP_HDP_REG_FLUSH_CNTL);
0072 }
0073
0074 static u32 nbio_v2_3_get_rev_id(struct amdgpu_device *adev)
0075 {
0076 u32 tmp;
0077
0078
0079
0080
0081
0082 if (amdgpu_sriov_vf(adev)) {
0083 return 0;
0084 }
0085
0086 tmp = RREG32_SOC15(NBIO, 0, mmRCC_DEV0_EPF0_STRAP0);
0087 tmp &= RCC_DEV0_EPF0_STRAP0__STRAP_ATI_REV_ID_DEV0_F0_MASK;
0088 tmp >>= RCC_DEV0_EPF0_STRAP0__STRAP_ATI_REV_ID_DEV0_F0__SHIFT;
0089
0090 return tmp;
0091 }
0092
0093 static void nbio_v2_3_mc_access_enable(struct amdgpu_device *adev, bool enable)
0094 {
0095 if (enable)
0096 WREG32_SOC15(NBIO, 0, mmBIF_FB_EN,
0097 BIF_FB_EN__FB_READ_EN_MASK |
0098 BIF_FB_EN__FB_WRITE_EN_MASK);
0099 else
0100 WREG32_SOC15(NBIO, 0, mmBIF_FB_EN, 0);
0101 }
0102
0103 static u32 nbio_v2_3_get_memsize(struct amdgpu_device *adev)
0104 {
0105 return RREG32_SOC15(NBIO, 0, mmRCC_DEV0_EPF0_RCC_CONFIG_MEMSIZE);
0106 }
0107
0108 static void nbio_v2_3_sdma_doorbell_range(struct amdgpu_device *adev, int instance,
0109 bool use_doorbell, int doorbell_index,
0110 int doorbell_size)
0111 {
0112 u32 reg = instance == 0 ? SOC15_REG_OFFSET(NBIO, 0, mmBIF_SDMA0_DOORBELL_RANGE) :
0113 instance == 1 ? SOC15_REG_OFFSET(NBIO, 0, mmBIF_SDMA1_DOORBELL_RANGE) :
0114 instance == 2 ? SOC15_REG_OFFSET(NBIO, 0, mmBIF_SDMA2_DOORBELL_RANGE) :
0115 SOC15_REG_OFFSET(NBIO, 0, mmBIF_SDMA3_DOORBELL_RANGE);
0116
0117 u32 doorbell_range = RREG32(reg);
0118
0119 if (use_doorbell) {
0120 doorbell_range = REG_SET_FIELD(doorbell_range,
0121 BIF_SDMA0_DOORBELL_RANGE, OFFSET,
0122 doorbell_index);
0123 doorbell_range = REG_SET_FIELD(doorbell_range,
0124 BIF_SDMA0_DOORBELL_RANGE, SIZE,
0125 doorbell_size);
0126 } else
0127 doorbell_range = REG_SET_FIELD(doorbell_range,
0128 BIF_SDMA0_DOORBELL_RANGE, SIZE,
0129 0);
0130
0131 WREG32(reg, doorbell_range);
0132 }
0133
0134 static void nbio_v2_3_vcn_doorbell_range(struct amdgpu_device *adev, bool use_doorbell,
0135 int doorbell_index, int instance)
0136 {
0137 u32 reg = instance ? SOC15_REG_OFFSET(NBIO, 0, mmBIF_MMSCH1_DOORBELL_RANGE) :
0138 SOC15_REG_OFFSET(NBIO, 0, mmBIF_MMSCH0_DOORBELL_RANGE);
0139
0140 u32 doorbell_range = RREG32(reg);
0141
0142 if (use_doorbell) {
0143 doorbell_range = REG_SET_FIELD(doorbell_range,
0144 BIF_MMSCH0_DOORBELL_RANGE, OFFSET,
0145 doorbell_index);
0146 doorbell_range = REG_SET_FIELD(doorbell_range,
0147 BIF_MMSCH0_DOORBELL_RANGE, SIZE, 8);
0148 } else
0149 doorbell_range = REG_SET_FIELD(doorbell_range,
0150 BIF_MMSCH0_DOORBELL_RANGE, SIZE, 0);
0151
0152 WREG32(reg, doorbell_range);
0153 }
0154
0155 static void nbio_v2_3_enable_doorbell_aperture(struct amdgpu_device *adev,
0156 bool enable)
0157 {
0158 WREG32_FIELD15(NBIO, 0, RCC_DEV0_EPF0_RCC_DOORBELL_APER_EN, BIF_DOORBELL_APER_EN,
0159 enable ? 1 : 0);
0160 }
0161
0162 static void nbio_v2_3_enable_doorbell_selfring_aperture(struct amdgpu_device *adev,
0163 bool enable)
0164 {
0165 u32 tmp = 0;
0166
0167 if (enable) {
0168 tmp = REG_SET_FIELD(tmp, BIF_BX_PF_DOORBELL_SELFRING_GPA_APER_CNTL,
0169 DOORBELL_SELFRING_GPA_APER_EN, 1) |
0170 REG_SET_FIELD(tmp, BIF_BX_PF_DOORBELL_SELFRING_GPA_APER_CNTL,
0171 DOORBELL_SELFRING_GPA_APER_MODE, 1) |
0172 REG_SET_FIELD(tmp, BIF_BX_PF_DOORBELL_SELFRING_GPA_APER_CNTL,
0173 DOORBELL_SELFRING_GPA_APER_SIZE, 0);
0174
0175 WREG32_SOC15(NBIO, 0, mmBIF_BX_PF_DOORBELL_SELFRING_GPA_APER_BASE_LOW,
0176 lower_32_bits(adev->doorbell.base));
0177 WREG32_SOC15(NBIO, 0, mmBIF_BX_PF_DOORBELL_SELFRING_GPA_APER_BASE_HIGH,
0178 upper_32_bits(adev->doorbell.base));
0179 }
0180
0181 WREG32_SOC15(NBIO, 0, mmBIF_BX_PF_DOORBELL_SELFRING_GPA_APER_CNTL,
0182 tmp);
0183 }
0184
0185
0186 static void nbio_v2_3_ih_doorbell_range(struct amdgpu_device *adev,
0187 bool use_doorbell, int doorbell_index)
0188 {
0189 u32 ih_doorbell_range = RREG32_SOC15(NBIO, 0, mmBIF_IH_DOORBELL_RANGE);
0190
0191 if (use_doorbell) {
0192 ih_doorbell_range = REG_SET_FIELD(ih_doorbell_range,
0193 BIF_IH_DOORBELL_RANGE, OFFSET,
0194 doorbell_index);
0195 ih_doorbell_range = REG_SET_FIELD(ih_doorbell_range,
0196 BIF_IH_DOORBELL_RANGE, SIZE,
0197 2);
0198 } else
0199 ih_doorbell_range = REG_SET_FIELD(ih_doorbell_range,
0200 BIF_IH_DOORBELL_RANGE, SIZE,
0201 0);
0202
0203 WREG32_SOC15(NBIO, 0, mmBIF_IH_DOORBELL_RANGE, ih_doorbell_range);
0204 }
0205
0206 static void nbio_v2_3_ih_control(struct amdgpu_device *adev)
0207 {
0208 u32 interrupt_cntl;
0209
0210
0211 WREG32_SOC15(NBIO, 0, mmINTERRUPT_CNTL2, adev->dummy_page_addr >> 8);
0212
0213 interrupt_cntl = RREG32_SOC15(NBIO, 0, mmINTERRUPT_CNTL);
0214
0215
0216
0217
0218 interrupt_cntl = REG_SET_FIELD(interrupt_cntl, INTERRUPT_CNTL,
0219 IH_DUMMY_RD_OVERRIDE, 0);
0220
0221
0222 interrupt_cntl = REG_SET_FIELD(interrupt_cntl, INTERRUPT_CNTL,
0223 IH_REQ_NONSNOOP_EN, 0);
0224
0225 WREG32_SOC15(NBIO, 0, mmINTERRUPT_CNTL, interrupt_cntl);
0226 }
0227
0228 static void nbio_v2_3_update_medium_grain_clock_gating(struct amdgpu_device *adev,
0229 bool enable)
0230 {
0231 uint32_t def, data;
0232
0233 if (!(adev->cg_flags & AMD_CG_SUPPORT_BIF_MGCG))
0234 return;
0235
0236 def = data = RREG32_PCIE(smnCPM_CONTROL);
0237 if (enable) {
0238 data |= (CPM_CONTROL__LCLK_DYN_GATE_ENABLE_MASK |
0239 CPM_CONTROL__TXCLK_DYN_GATE_ENABLE_MASK |
0240 CPM_CONTROL__TXCLK_LCNT_GATE_ENABLE_MASK |
0241 CPM_CONTROL__TXCLK_REGS_GATE_ENABLE_MASK |
0242 CPM_CONTROL__TXCLK_PRBS_GATE_ENABLE_MASK |
0243 CPM_CONTROL__REFCLK_REGS_GATE_ENABLE_MASK);
0244 } else {
0245 data &= ~(CPM_CONTROL__LCLK_DYN_GATE_ENABLE_MASK |
0246 CPM_CONTROL__TXCLK_DYN_GATE_ENABLE_MASK |
0247 CPM_CONTROL__TXCLK_LCNT_GATE_ENABLE_MASK |
0248 CPM_CONTROL__TXCLK_REGS_GATE_ENABLE_MASK |
0249 CPM_CONTROL__TXCLK_PRBS_GATE_ENABLE_MASK |
0250 CPM_CONTROL__REFCLK_REGS_GATE_ENABLE_MASK);
0251 }
0252
0253 if (def != data)
0254 WREG32_PCIE(smnCPM_CONTROL, data);
0255 }
0256
0257 static void nbio_v2_3_update_medium_grain_light_sleep(struct amdgpu_device *adev,
0258 bool enable)
0259 {
0260 uint32_t def, data;
0261
0262 if (!(adev->cg_flags & AMD_CG_SUPPORT_BIF_LS))
0263 return;
0264
0265 def = data = RREG32_PCIE(smnPCIE_CNTL2);
0266 if (enable) {
0267 data |= (PCIE_CNTL2__SLV_MEM_LS_EN_MASK |
0268 PCIE_CNTL2__MST_MEM_LS_EN_MASK |
0269 PCIE_CNTL2__REPLAY_MEM_LS_EN_MASK);
0270 } else {
0271 data &= ~(PCIE_CNTL2__SLV_MEM_LS_EN_MASK |
0272 PCIE_CNTL2__MST_MEM_LS_EN_MASK |
0273 PCIE_CNTL2__REPLAY_MEM_LS_EN_MASK);
0274 }
0275
0276 if (def != data)
0277 WREG32_PCIE(smnPCIE_CNTL2, data);
0278 }
0279
0280 static void nbio_v2_3_get_clockgating_state(struct amdgpu_device *adev,
0281 u64 *flags)
0282 {
0283 int data;
0284
0285
0286 data = RREG32_PCIE(smnCPM_CONTROL);
0287 if (data & CPM_CONTROL__LCLK_DYN_GATE_ENABLE_MASK)
0288 *flags |= AMD_CG_SUPPORT_BIF_MGCG;
0289
0290
0291 data = RREG32_PCIE(smnPCIE_CNTL2);
0292 if (data & PCIE_CNTL2__SLV_MEM_LS_EN_MASK)
0293 *flags |= AMD_CG_SUPPORT_BIF_LS;
0294 }
0295
0296 static u32 nbio_v2_3_get_hdp_flush_req_offset(struct amdgpu_device *adev)
0297 {
0298 return SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF_GPU_HDP_FLUSH_REQ);
0299 }
0300
0301 static u32 nbio_v2_3_get_hdp_flush_done_offset(struct amdgpu_device *adev)
0302 {
0303 return SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF_GPU_HDP_FLUSH_DONE);
0304 }
0305
0306 static u32 nbio_v2_3_get_pcie_index_offset(struct amdgpu_device *adev)
0307 {
0308 return SOC15_REG_OFFSET(NBIO, 0, mmPCIE_INDEX2);
0309 }
0310
0311 static u32 nbio_v2_3_get_pcie_data_offset(struct amdgpu_device *adev)
0312 {
0313 return SOC15_REG_OFFSET(NBIO, 0, mmPCIE_DATA2);
0314 }
0315
0316 const struct nbio_hdp_flush_reg nbio_v2_3_hdp_flush_reg = {
0317 .ref_and_mask_cp0 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP0_MASK,
0318 .ref_and_mask_cp1 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP1_MASK,
0319 .ref_and_mask_cp2 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP2_MASK,
0320 .ref_and_mask_cp3 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP3_MASK,
0321 .ref_and_mask_cp4 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP4_MASK,
0322 .ref_and_mask_cp5 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP5_MASK,
0323 .ref_and_mask_cp6 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP6_MASK,
0324 .ref_and_mask_cp7 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP7_MASK,
0325 .ref_and_mask_cp8 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP8_MASK,
0326 .ref_and_mask_cp9 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__CP9_MASK,
0327 .ref_and_mask_sdma0 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__SDMA0_MASK,
0328 .ref_and_mask_sdma1 = BIF_BX_PF_GPU_HDP_FLUSH_DONE__SDMA1_MASK,
0329 };
0330
0331 static void nbio_v2_3_init_registers(struct amdgpu_device *adev)
0332 {
0333 uint32_t def, data;
0334
0335 def = data = RREG32_PCIE(smnPCIE_CONFIG_CNTL);
0336 data = REG_SET_FIELD(data, PCIE_CONFIG_CNTL, CI_SWUS_MAX_READ_REQUEST_SIZE_MODE, 1);
0337 data = REG_SET_FIELD(data, PCIE_CONFIG_CNTL, CI_SWUS_MAX_READ_REQUEST_SIZE_PRIV, 1);
0338
0339 if (def != data)
0340 WREG32_PCIE(smnPCIE_CONFIG_CNTL, data);
0341
0342 if (amdgpu_sriov_vf(adev))
0343 adev->rmmio_remap.reg_offset = SOC15_REG_OFFSET(NBIO, 0,
0344 mmBIF_BX_DEV0_EPF0_VF0_HDP_MEM_COHERENCY_FLUSH_CNTL) << 2;
0345 }
0346
0347 #define NAVI10_PCIE__LC_L0S_INACTIVITY_DEFAULT 0x00000000
0348 #define NAVI10_PCIE__LC_L1_INACTIVITY_DEFAULT 0x00000009
0349 #define NAVI10_PCIE__LC_L1_INACTIVITY_TBT_DEFAULT 0x0000000E
0350
0351 static void nbio_v2_3_enable_aspm(struct amdgpu_device *adev,
0352 bool enable)
0353 {
0354 uint32_t def, data;
0355
0356 def = data = RREG32_PCIE(smnPCIE_LC_CNTL);
0357
0358 if (enable) {
0359
0360 data &= ~(PCIE_LC_CNTL__LC_L0S_INACTIVITY_MASK | PCIE_LC_CNTL__LC_L1_INACTIVITY_MASK);
0361
0362 data |= NAVI10_PCIE__LC_L0S_INACTIVITY_DEFAULT << PCIE_LC_CNTL__LC_L0S_INACTIVITY__SHIFT;
0363
0364 if (pci_is_thunderbolt_attached(adev->pdev))
0365 data |= NAVI10_PCIE__LC_L1_INACTIVITY_TBT_DEFAULT << PCIE_LC_CNTL__LC_L1_INACTIVITY__SHIFT;
0366 else
0367 data |= NAVI10_PCIE__LC_L1_INACTIVITY_DEFAULT << PCIE_LC_CNTL__LC_L1_INACTIVITY__SHIFT;
0368
0369 data &= ~PCIE_LC_CNTL__LC_PMI_TO_L1_DIS_MASK;
0370 } else {
0371
0372 data &= ~PCIE_LC_CNTL__LC_L1_INACTIVITY_MASK;
0373
0374 data &= ~PCIE_LC_CNTL__LC_L0S_INACTIVITY_MASK;
0375
0376 data |= PCIE_LC_CNTL__LC_PMI_TO_L1_DIS_MASK;
0377 }
0378
0379 if (def != data)
0380 WREG32_PCIE(smnPCIE_LC_CNTL, data);
0381 }
0382
0383 #ifdef CONFIG_PCIEASPM
0384 static void nbio_v2_3_program_ltr(struct amdgpu_device *adev)
0385 {
0386 uint32_t def, data;
0387
0388 WREG32_PCIE(smnRCC_EP_DEV0_0_EP_PCIE_TX_LTR_CNTL, 0x75EB);
0389
0390 def = data = RREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP2);
0391 data &= ~RCC_BIF_STRAP2__STRAP_LTR_IN_ASPML1_DIS_MASK;
0392 if (def != data)
0393 WREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP2, data);
0394
0395 def = data = RREG32_PCIE(smnRCC_EP_DEV0_0_EP_PCIE_TX_LTR_CNTL);
0396 data &= ~EP_PCIE_TX_LTR_CNTL__LTR_PRIV_MSG_DIS_IN_PM_NON_D0_MASK;
0397 if (def != data)
0398 WREG32_PCIE(smnRCC_EP_DEV0_0_EP_PCIE_TX_LTR_CNTL, data);
0399
0400 def = data = RREG32_PCIE(smnBIF_CFG_DEV0_EPF0_DEVICE_CNTL2);
0401 data |= BIF_CFG_DEV0_EPF0_DEVICE_CNTL2__LTR_EN_MASK;
0402 if (def != data)
0403 WREG32_PCIE(smnBIF_CFG_DEV0_EPF0_DEVICE_CNTL2, data);
0404 }
0405 #endif
0406
0407 static void nbio_v2_3_program_aspm(struct amdgpu_device *adev)
0408 {
0409 #ifdef CONFIG_PCIEASPM
0410 uint32_t def, data;
0411
0412 def = data = RREG32_PCIE(smnPCIE_LC_CNTL);
0413 data &= ~PCIE_LC_CNTL__LC_L1_INACTIVITY_MASK;
0414 data &= ~PCIE_LC_CNTL__LC_L0S_INACTIVITY_MASK;
0415 data |= PCIE_LC_CNTL__LC_PMI_TO_L1_DIS_MASK;
0416 if (def != data)
0417 WREG32_PCIE(smnPCIE_LC_CNTL, data);
0418
0419 def = data = RREG32_PCIE(smnPCIE_LC_CNTL7);
0420 data |= PCIE_LC_CNTL7__LC_NBIF_ASPM_INPUT_EN_MASK;
0421 if (def != data)
0422 WREG32_PCIE(smnPCIE_LC_CNTL7, data);
0423
0424 def = data = RREG32_PCIE(smnNBIF_MGCG_CTRL_LCLK);
0425 data |= NBIF_MGCG_CTRL_LCLK__NBIF_MGCG_REG_DIS_LCLK_MASK;
0426 if (def != data)
0427 WREG32_PCIE(smnNBIF_MGCG_CTRL_LCLK, data);
0428
0429 def = data = RREG32_PCIE(smnPCIE_LC_CNTL3);
0430 data |= PCIE_LC_CNTL3__LC_DSC_DONT_ENTER_L23_AFTER_PME_ACK_MASK;
0431 if (def != data)
0432 WREG32_PCIE(smnPCIE_LC_CNTL3, data);
0433
0434 def = data = RREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP3);
0435 data &= ~RCC_BIF_STRAP3__STRAP_VLINK_ASPM_IDLE_TIMER_MASK;
0436 data &= ~RCC_BIF_STRAP3__STRAP_VLINK_PM_L1_ENTRY_TIMER_MASK;
0437 if (def != data)
0438 WREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP3, data);
0439
0440 def = data = RREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP5);
0441 data &= ~RCC_BIF_STRAP5__STRAP_VLINK_LDN_ENTRY_TIMER_MASK;
0442 if (def != data)
0443 WREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP5, data);
0444
0445 def = data = RREG32_PCIE(smnBIF_CFG_DEV0_EPF0_DEVICE_CNTL2);
0446 data &= ~BIF_CFG_DEV0_EPF0_DEVICE_CNTL2__LTR_EN_MASK;
0447 if (def != data)
0448 WREG32_PCIE(smnBIF_CFG_DEV0_EPF0_DEVICE_CNTL2, data);
0449
0450 WREG32_PCIE(smnBIF_CFG_DEV0_EPF0_PCIE_LTR_CAP, 0x10011001);
0451
0452 def = data = RREG32_PCIE(smnPSWUSP0_PCIE_LC_CNTL2);
0453 data |= PSWUSP0_PCIE_LC_CNTL2__LC_ALLOW_PDWN_IN_L1_MASK |
0454 PSWUSP0_PCIE_LC_CNTL2__LC_ALLOW_PDWN_IN_L23_MASK;
0455 data &= ~PSWUSP0_PCIE_LC_CNTL2__LC_RCV_L0_TO_RCV_L0S_DIS_MASK;
0456 if (def != data)
0457 WREG32_PCIE(smnPSWUSP0_PCIE_LC_CNTL2, data);
0458
0459 def = data = RREG32_PCIE(smnPCIE_LC_CNTL6);
0460 data |= PCIE_LC_CNTL6__LC_L1_POWERDOWN_MASK |
0461 PCIE_LC_CNTL6__LC_RX_L0S_STANDBY_EN_MASK;
0462 if (def != data)
0463 WREG32_PCIE(smnPCIE_LC_CNTL6, data);
0464
0465
0466
0467 if (adev->pdev->ltr_path)
0468 nbio_v2_3_program_ltr(adev);
0469
0470 def = data = RREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP3);
0471 data |= 0x5DE0 << RCC_BIF_STRAP3__STRAP_VLINK_ASPM_IDLE_TIMER__SHIFT;
0472 data |= 0x0010 << RCC_BIF_STRAP3__STRAP_VLINK_PM_L1_ENTRY_TIMER__SHIFT;
0473 if (def != data)
0474 WREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP3, data);
0475
0476 def = data = RREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP5);
0477 data |= 0x0010 << RCC_BIF_STRAP5__STRAP_VLINK_LDN_ENTRY_TIMER__SHIFT;
0478 if (def != data)
0479 WREG32_SOC15(NBIO, 0, mmRCC_BIF_STRAP5, data);
0480
0481 def = data = RREG32_PCIE(smnPCIE_LC_CNTL);
0482 data &= ~PCIE_LC_CNTL__LC_L0S_INACTIVITY_MASK;
0483 data |= 0x9 << PCIE_LC_CNTL__LC_L1_INACTIVITY__SHIFT;
0484 data |= 0x1 << PCIE_LC_CNTL__LC_PMI_TO_L1_DIS__SHIFT;
0485 if (def != data)
0486 WREG32_PCIE(smnPCIE_LC_CNTL, data);
0487
0488 def = data = RREG32_PCIE(smnPCIE_LC_CNTL3);
0489 data &= ~PCIE_LC_CNTL3__LC_DSC_DONT_ENTER_L23_AFTER_PME_ACK_MASK;
0490 if (def != data)
0491 WREG32_PCIE(smnPCIE_LC_CNTL3, data);
0492 #endif
0493 }
0494
0495 static void nbio_v2_3_apply_lc_spc_mode_wa(struct amdgpu_device *adev)
0496 {
0497 uint32_t reg_data = 0;
0498 uint32_t link_width = 0;
0499
0500 if (!((adev->asic_type >= CHIP_NAVI10) &&
0501 (adev->asic_type <= CHIP_NAVI12)))
0502 return;
0503
0504 reg_data = RREG32_PCIE(smnPCIE_LC_LINK_WIDTH_CNTL);
0505 link_width = (reg_data & PCIE_LC_LINK_WIDTH_CNTL__LC_LINK_WIDTH_RD_MASK)
0506 >> PCIE_LC_LINK_WIDTH_CNTL__LC_LINK_WIDTH_RD__SHIFT;
0507
0508
0509
0510
0511
0512 if (0x3 == link_width) {
0513 reg_data = RREG32_PCIE(smnPCIE_LC_CNTL6);
0514 reg_data &= ~PCIE_LC_CNTL6__LC_SPC_MODE_8GT_MASK;
0515 reg_data |= (0x2 << PCIE_LC_CNTL6__LC_SPC_MODE_8GT__SHIFT);
0516 WREG32_PCIE(smnPCIE_LC_CNTL6, reg_data);
0517 }
0518 }
0519
0520 static void nbio_v2_3_apply_l1_link_width_reconfig_wa(struct amdgpu_device *adev)
0521 {
0522 uint32_t reg_data = 0;
0523
0524 if (adev->asic_type != CHIP_NAVI10)
0525 return;
0526
0527 reg_data = RREG32_PCIE(smnPCIE_LC_LINK_WIDTH_CNTL);
0528 reg_data |= PCIE_LC_LINK_WIDTH_CNTL__LC_L1_RECONFIG_EN_MASK;
0529 WREG32_PCIE(smnPCIE_LC_LINK_WIDTH_CNTL, reg_data);
0530 }
0531
0532 static void nbio_v2_3_clear_doorbell_interrupt(struct amdgpu_device *adev)
0533 {
0534 uint32_t reg, reg_data;
0535
0536 if (adev->ip_versions[NBIO_HWIP][0] != IP_VERSION(3, 3, 0))
0537 return;
0538
0539 reg = RREG32_SOC15(NBIO, 0, mmBIF_RB_CNTL);
0540
0541
0542
0543 if ((reg & BIF_RB_CNTL__RB_ENABLE_MASK) == 0) {
0544 reg = RREG32_SOC15(NBIO, 0, mmBIF_DOORBELL_INT_CNTL);
0545 if (reg & BIF_DOORBELL_INT_CNTL__DOORBELL_INTERRUPT_STATUS_MASK) {
0546 reg_data = 1 << BIF_DOORBELL_INT_CNTL__DOORBELL_INTERRUPT_CLEAR__SHIFT;
0547 WREG32_SOC15(NBIO, 0, mmBIF_DOORBELL_INT_CNTL, reg_data);
0548 }
0549 }
0550 }
0551
0552 const struct amdgpu_nbio_funcs nbio_v2_3_funcs = {
0553 .get_hdp_flush_req_offset = nbio_v2_3_get_hdp_flush_req_offset,
0554 .get_hdp_flush_done_offset = nbio_v2_3_get_hdp_flush_done_offset,
0555 .get_pcie_index_offset = nbio_v2_3_get_pcie_index_offset,
0556 .get_pcie_data_offset = nbio_v2_3_get_pcie_data_offset,
0557 .get_rev_id = nbio_v2_3_get_rev_id,
0558 .mc_access_enable = nbio_v2_3_mc_access_enable,
0559 .get_memsize = nbio_v2_3_get_memsize,
0560 .sdma_doorbell_range = nbio_v2_3_sdma_doorbell_range,
0561 .vcn_doorbell_range = nbio_v2_3_vcn_doorbell_range,
0562 .enable_doorbell_aperture = nbio_v2_3_enable_doorbell_aperture,
0563 .enable_doorbell_selfring_aperture = nbio_v2_3_enable_doorbell_selfring_aperture,
0564 .ih_doorbell_range = nbio_v2_3_ih_doorbell_range,
0565 .update_medium_grain_clock_gating = nbio_v2_3_update_medium_grain_clock_gating,
0566 .update_medium_grain_light_sleep = nbio_v2_3_update_medium_grain_light_sleep,
0567 .get_clockgating_state = nbio_v2_3_get_clockgating_state,
0568 .ih_control = nbio_v2_3_ih_control,
0569 .init_registers = nbio_v2_3_init_registers,
0570 .remap_hdp_registers = nbio_v2_3_remap_hdp_registers,
0571 .enable_aspm = nbio_v2_3_enable_aspm,
0572 .program_aspm = nbio_v2_3_program_aspm,
0573 .apply_lc_spc_mode_wa = nbio_v2_3_apply_lc_spc_mode_wa,
0574 .apply_l1_link_width_reconfig_wa = nbio_v2_3_apply_l1_link_width_reconfig_wa,
0575 .clear_doorbell_interrupt = nbio_v2_3_clear_doorbell_interrupt,
0576 };