0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041 #include <linux/module.h>
0042 #include <linux/platform_device.h>
0043 #include <linux/io.h>
0044 #include <linux/delay.h>
0045 #include <linux/phy/phy.h>
0046 #include <linux/clk.h>
0047
0048
0049 #define MAX_LANE 2
0050
0051
0052 #define SERDES_PLL_INDIRECT_OFFSET 0x0000
0053 #define SERDES_PLL_REF_INDIRECT_OFFSET 0x2000
0054 #define SERDES_INDIRECT_OFFSET 0x0400
0055 #define SERDES_LANE_STRIDE 0x0200
0056
0057
0058 #define DEFAULT_SATA_TXBOOST_GAIN { 0x1e, 0x1e, 0x1e }
0059 #define DEFAULT_SATA_TXEYEDIRECTION { 0x0, 0x0, 0x0 }
0060 #define DEFAULT_SATA_TXEYETUNING { 0xa, 0xa, 0xa }
0061 #define DEFAULT_SATA_SPD_SEL { 0x1, 0x3, 0x7 }
0062 #define DEFAULT_SATA_TXAMP { 0x8, 0x8, 0x8 }
0063 #define DEFAULT_SATA_TXCN1 { 0x2, 0x2, 0x2 }
0064 #define DEFAULT_SATA_TXCN2 { 0x0, 0x0, 0x0 }
0065 #define DEFAULT_SATA_TXCP1 { 0xa, 0xa, 0xa }
0066
0067 #define SATA_SPD_SEL_GEN3 0x7
0068 #define SATA_SPD_SEL_GEN2 0x3
0069 #define SATA_SPD_SEL_GEN1 0x1
0070
0071 #define SSC_DISABLE 0
0072 #define SSC_ENABLE 1
0073
0074 #define FBDIV_VAL_50M 0x77
0075 #define REFDIV_VAL_50M 0x1
0076 #define FBDIV_VAL_100M 0x3B
0077 #define REFDIV_VAL_100M 0x0
0078
0079
0080 #define SATACLKENREG 0x00000000
0081 #define SATA0_CORE_CLKEN 0x00000002
0082 #define SATA1_CORE_CLKEN 0x00000004
0083 #define SATASRESETREG 0x00000004
0084 #define SATA_MEM_RESET_MASK 0x00000020
0085 #define SATA_MEM_RESET_RD(src) (((src) & 0x00000020) >> 5)
0086 #define SATA_SDS_RESET_MASK 0x00000004
0087 #define SATA_CSR_RESET_MASK 0x00000001
0088 #define SATA_CORE_RESET_MASK 0x00000002
0089 #define SATA_PMCLK_RESET_MASK 0x00000010
0090 #define SATA_PCLK_RESET_MASK 0x00000008
0091
0092
0093 #define SATA_ENET_SDS_PCS_CTL0 0x00000000
0094 #define REGSPEC_CFG_I_TX_WORDMODE0_SET(dst, src) \
0095 (((dst) & ~0x00070000) | (((u32) (src) << 16) & 0x00070000))
0096 #define REGSPEC_CFG_I_RX_WORDMODE0_SET(dst, src) \
0097 (((dst) & ~0x00e00000) | (((u32) (src) << 21) & 0x00e00000))
0098 #define SATA_ENET_SDS_CTL0 0x0000000c
0099 #define REGSPEC_CFG_I_CUSTOMER_PIN_MODE0_SET(dst, src) \
0100 (((dst) & ~0x00007fff) | (((u32) (src)) & 0x00007fff))
0101 #define SATA_ENET_SDS_CTL1 0x00000010
0102 #define CFG_I_SPD_SEL_CDR_OVR1_SET(dst, src) \
0103 (((dst) & ~0x0000000f) | (((u32) (src)) & 0x0000000f))
0104 #define SATA_ENET_SDS_RST_CTL 0x00000024
0105 #define SATA_ENET_SDS_IND_CMD_REG 0x0000003c
0106 #define CFG_IND_WR_CMD_MASK 0x00000001
0107 #define CFG_IND_RD_CMD_MASK 0x00000002
0108 #define CFG_IND_CMD_DONE_MASK 0x00000004
0109 #define CFG_IND_ADDR_SET(dst, src) \
0110 (((dst) & ~0x003ffff0) | (((u32) (src) << 4) & 0x003ffff0))
0111 #define SATA_ENET_SDS_IND_RDATA_REG 0x00000040
0112 #define SATA_ENET_SDS_IND_WDATA_REG 0x00000044
0113 #define SATA_ENET_CLK_MACRO_REG 0x0000004c
0114 #define I_RESET_B_SET(dst, src) \
0115 (((dst) & ~0x00000001) | (((u32) (src)) & 0x00000001))
0116 #define I_PLL_FBDIV_SET(dst, src) \
0117 (((dst) & ~0x001ff000) | (((u32) (src) << 12) & 0x001ff000))
0118 #define I_CUSTOMEROV_SET(dst, src) \
0119 (((dst) & ~0x00000f80) | (((u32) (src) << 7) & 0x00000f80))
0120 #define O_PLL_LOCK_RD(src) (((src) & 0x40000000) >> 30)
0121 #define O_PLL_READY_RD(src) (((src) & 0x80000000) >> 31)
0122
0123
0124 #define CMU_REG0 0x00000
0125 #define CMU_REG0_PLL_REF_SEL_MASK 0x00002000
0126 #define CMU_REG0_PLL_REF_SEL_SET(dst, src) \
0127 (((dst) & ~0x00002000) | (((u32) (src) << 13) & 0x00002000))
0128 #define CMU_REG0_PDOWN_MASK 0x00004000
0129 #define CMU_REG0_CAL_COUNT_RESOL_SET(dst, src) \
0130 (((dst) & ~0x000000e0) | (((u32) (src) << 5) & 0x000000e0))
0131 #define CMU_REG1 0x00002
0132 #define CMU_REG1_PLL_CP_SET(dst, src) \
0133 (((dst) & ~0x00003c00) | (((u32) (src) << 10) & 0x00003c00))
0134 #define CMU_REG1_PLL_MANUALCAL_SET(dst, src) \
0135 (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008))
0136 #define CMU_REG1_PLL_CP_SEL_SET(dst, src) \
0137 (((dst) & ~0x000003e0) | (((u32) (src) << 5) & 0x000003e0))
0138 #define CMU_REG1_REFCLK_CMOS_SEL_MASK 0x00000001
0139 #define CMU_REG1_REFCLK_CMOS_SEL_SET(dst, src) \
0140 (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001))
0141 #define CMU_REG2 0x00004
0142 #define CMU_REG2_PLL_REFDIV_SET(dst, src) \
0143 (((dst) & ~0x0000c000) | (((u32) (src) << 14) & 0x0000c000))
0144 #define CMU_REG2_PLL_LFRES_SET(dst, src) \
0145 (((dst) & ~0x0000001e) | (((u32) (src) << 1) & 0x0000001e))
0146 #define CMU_REG2_PLL_FBDIV_SET(dst, src) \
0147 (((dst) & ~0x00003fe0) | (((u32) (src) << 5) & 0x00003fe0))
0148 #define CMU_REG3 0x00006
0149 #define CMU_REG3_VCOVARSEL_SET(dst, src) \
0150 (((dst) & ~0x0000000f) | (((u32) (src) << 0) & 0x0000000f))
0151 #define CMU_REG3_VCO_MOMSEL_INIT_SET(dst, src) \
0152 (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0))
0153 #define CMU_REG3_VCO_MANMOMSEL_SET(dst, src) \
0154 (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00))
0155 #define CMU_REG4 0x00008
0156 #define CMU_REG5 0x0000a
0157 #define CMU_REG5_PLL_LFSMCAP_SET(dst, src) \
0158 (((dst) & ~0x0000c000) | (((u32) (src) << 14) & 0x0000c000))
0159 #define CMU_REG5_PLL_LOCK_RESOLUTION_SET(dst, src) \
0160 (((dst) & ~0x0000000e) | (((u32) (src) << 1) & 0x0000000e))
0161 #define CMU_REG5_PLL_LFCAP_SET(dst, src) \
0162 (((dst) & ~0x00003000) | (((u32) (src) << 12) & 0x00003000))
0163 #define CMU_REG5_PLL_RESETB_MASK 0x00000001
0164 #define CMU_REG6 0x0000c
0165 #define CMU_REG6_PLL_VREGTRIM_SET(dst, src) \
0166 (((dst) & ~0x00000600) | (((u32) (src) << 9) & 0x00000600))
0167 #define CMU_REG6_MAN_PVT_CAL_SET(dst, src) \
0168 (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004))
0169 #define CMU_REG7 0x0000e
0170 #define CMU_REG7_PLL_CALIB_DONE_RD(src) ((0x00004000 & (u32) (src)) >> 14)
0171 #define CMU_REG7_VCO_CAL_FAIL_RD(src) ((0x00000c00 & (u32) (src)) >> 10)
0172 #define CMU_REG8 0x00010
0173 #define CMU_REG9 0x00012
0174 #define CMU_REG9_WORD_LEN_8BIT 0x000
0175 #define CMU_REG9_WORD_LEN_10BIT 0x001
0176 #define CMU_REG9_WORD_LEN_16BIT 0x002
0177 #define CMU_REG9_WORD_LEN_20BIT 0x003
0178 #define CMU_REG9_WORD_LEN_32BIT 0x004
0179 #define CMU_REG9_WORD_LEN_40BIT 0x005
0180 #define CMU_REG9_WORD_LEN_64BIT 0x006
0181 #define CMU_REG9_WORD_LEN_66BIT 0x007
0182 #define CMU_REG9_TX_WORD_MODE_CH1_SET(dst, src) \
0183 (((dst) & ~0x00000380) | (((u32) (src) << 7) & 0x00000380))
0184 #define CMU_REG9_TX_WORD_MODE_CH0_SET(dst, src) \
0185 (((dst) & ~0x00000070) | (((u32) (src) << 4) & 0x00000070))
0186 #define CMU_REG9_PLL_POST_DIVBY2_SET(dst, src) \
0187 (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008))
0188 #define CMU_REG9_VBG_BYPASSB_SET(dst, src) \
0189 (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004))
0190 #define CMU_REG9_IGEN_BYPASS_SET(dst, src) \
0191 (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002))
0192 #define CMU_REG10 0x00014
0193 #define CMU_REG10_VREG_REFSEL_SET(dst, src) \
0194 (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001))
0195 #define CMU_REG11 0x00016
0196 #define CMU_REG12 0x00018
0197 #define CMU_REG12_STATE_DELAY9_SET(dst, src) \
0198 (((dst) & ~0x000000f0) | (((u32) (src) << 4) & 0x000000f0))
0199 #define CMU_REG13 0x0001a
0200 #define CMU_REG14 0x0001c
0201 #define CMU_REG15 0x0001e
0202 #define CMU_REG16 0x00020
0203 #define CMU_REG16_PVT_DN_MAN_ENA_MASK 0x00000001
0204 #define CMU_REG16_PVT_UP_MAN_ENA_MASK 0x00000002
0205 #define CMU_REG16_VCOCAL_WAIT_BTW_CODE_SET(dst, src) \
0206 (((dst) & ~0x0000001c) | (((u32) (src) << 2) & 0x0000001c))
0207 #define CMU_REG16_CALIBRATION_DONE_OVERRIDE_SET(dst, src) \
0208 (((dst) & ~0x00000040) | (((u32) (src) << 6) & 0x00000040))
0209 #define CMU_REG16_BYPASS_PLL_LOCK_SET(dst, src) \
0210 (((dst) & ~0x00000020) | (((u32) (src) << 5) & 0x00000020))
0211 #define CMU_REG17 0x00022
0212 #define CMU_REG17_PVT_CODE_R2A_SET(dst, src) \
0213 (((dst) & ~0x00007f00) | (((u32) (src) << 8) & 0x00007f00))
0214 #define CMU_REG17_RESERVED_7_SET(dst, src) \
0215 (((dst) & ~0x000000e0) | (((u32) (src) << 5) & 0x000000e0))
0216 #define CMU_REG17_PVT_TERM_MAN_ENA_MASK 0x00008000
0217 #define CMU_REG18 0x00024
0218 #define CMU_REG19 0x00026
0219 #define CMU_REG20 0x00028
0220 #define CMU_REG21 0x0002a
0221 #define CMU_REG22 0x0002c
0222 #define CMU_REG23 0x0002e
0223 #define CMU_REG24 0x00030
0224 #define CMU_REG25 0x00032
0225 #define CMU_REG26 0x00034
0226 #define CMU_REG26_FORCE_PLL_LOCK_SET(dst, src) \
0227 (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001))
0228 #define CMU_REG27 0x00036
0229 #define CMU_REG28 0x00038
0230 #define CMU_REG29 0x0003a
0231 #define CMU_REG30 0x0003c
0232 #define CMU_REG30_LOCK_COUNT_SET(dst, src) \
0233 (((dst) & ~0x00000006) | (((u32) (src) << 1) & 0x00000006))
0234 #define CMU_REG30_PCIE_MODE_SET(dst, src) \
0235 (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008))
0236 #define CMU_REG31 0x0003e
0237 #define CMU_REG32 0x00040
0238 #define CMU_REG32_FORCE_VCOCAL_START_MASK 0x00004000
0239 #define CMU_REG32_PVT_CAL_WAIT_SEL_SET(dst, src) \
0240 (((dst) & ~0x00000006) | (((u32) (src) << 1) & 0x00000006))
0241 #define CMU_REG32_IREF_ADJ_SET(dst, src) \
0242 (((dst) & ~0x00000180) | (((u32) (src) << 7) & 0x00000180))
0243 #define CMU_REG33 0x00042
0244 #define CMU_REG34 0x00044
0245 #define CMU_REG34_VCO_CAL_VTH_LO_MAX_SET(dst, src) \
0246 (((dst) & ~0x0000000f) | (((u32) (src) << 0) & 0x0000000f))
0247 #define CMU_REG34_VCO_CAL_VTH_HI_MAX_SET(dst, src) \
0248 (((dst) & ~0x00000f00) | (((u32) (src) << 8) & 0x00000f00))
0249 #define CMU_REG34_VCO_CAL_VTH_LO_MIN_SET(dst, src) \
0250 (((dst) & ~0x000000f0) | (((u32) (src) << 4) & 0x000000f0))
0251 #define CMU_REG34_VCO_CAL_VTH_HI_MIN_SET(dst, src) \
0252 (((dst) & ~0x0000f000) | (((u32) (src) << 12) & 0x0000f000))
0253 #define CMU_REG35 0x00046
0254 #define CMU_REG35_PLL_SSC_MOD_SET(dst, src) \
0255 (((dst) & ~0x0000fe00) | (((u32) (src) << 9) & 0x0000fe00))
0256 #define CMU_REG36 0x00048
0257 #define CMU_REG36_PLL_SSC_EN_SET(dst, src) \
0258 (((dst) & ~0x00000010) | (((u32) (src) << 4) & 0x00000010))
0259 #define CMU_REG36_PLL_SSC_VSTEP_SET(dst, src) \
0260 (((dst) & ~0x0000ffc0) | (((u32) (src) << 6) & 0x0000ffc0))
0261 #define CMU_REG36_PLL_SSC_DSMSEL_SET(dst, src) \
0262 (((dst) & ~0x00000020) | (((u32) (src) << 5) & 0x00000020))
0263 #define CMU_REG37 0x0004a
0264 #define CMU_REG38 0x0004c
0265 #define CMU_REG39 0x0004e
0266
0267
0268 #define RXTX_REG0 0x000
0269 #define RXTX_REG0_CTLE_EQ_HR_SET(dst, src) \
0270 (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800))
0271 #define RXTX_REG0_CTLE_EQ_QR_SET(dst, src) \
0272 (((dst) & ~0x000007c0) | (((u32) (src) << 6) & 0x000007c0))
0273 #define RXTX_REG0_CTLE_EQ_FR_SET(dst, src) \
0274 (((dst) & ~0x0000003e) | (((u32) (src) << 1) & 0x0000003e))
0275 #define RXTX_REG1 0x002
0276 #define RXTX_REG1_RXACVCM_SET(dst, src) \
0277 (((dst) & ~0x0000f000) | (((u32) (src) << 12) & 0x0000f000))
0278 #define RXTX_REG1_CTLE_EQ_SET(dst, src) \
0279 (((dst) & ~0x00000f80) | (((u32) (src) << 7) & 0x00000f80))
0280 #define RXTX_REG1_RXVREG1_SET(dst, src) \
0281 (((dst) & ~0x00000060) | (((u32) (src) << 5) & 0x00000060))
0282 #define RXTX_REG1_RXIREF_ADJ_SET(dst, src) \
0283 (((dst) & ~0x00000006) | (((u32) (src) << 1) & 0x00000006))
0284 #define RXTX_REG2 0x004
0285 #define RXTX_REG2_VTT_ENA_SET(dst, src) \
0286 (((dst) & ~0x00000100) | (((u32) (src) << 8) & 0x00000100))
0287 #define RXTX_REG2_TX_FIFO_ENA_SET(dst, src) \
0288 (((dst) & ~0x00000020) | (((u32) (src) << 5) & 0x00000020))
0289 #define RXTX_REG2_VTT_SEL_SET(dst, src) \
0290 (((dst) & ~0x000000c0) | (((u32) (src) << 6) & 0x000000c0))
0291 #define RXTX_REG4 0x008
0292 #define RXTX_REG4_TX_LOOPBACK_BUF_EN_MASK 0x00000040
0293 #define RXTX_REG4_TX_DATA_RATE_SET(dst, src) \
0294 (((dst) & ~0x0000c000) | (((u32) (src) << 14) & 0x0000c000))
0295 #define RXTX_REG4_TX_WORD_MODE_SET(dst, src) \
0296 (((dst) & ~0x00003800) | (((u32) (src) << 11) & 0x00003800))
0297 #define RXTX_REG5 0x00a
0298 #define RXTX_REG5_TX_CN1_SET(dst, src) \
0299 (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800))
0300 #define RXTX_REG5_TX_CP1_SET(dst, src) \
0301 (((dst) & ~0x000007e0) | (((u32) (src) << 5) & 0x000007e0))
0302 #define RXTX_REG5_TX_CN2_SET(dst, src) \
0303 (((dst) & ~0x0000001f) | (((u32) (src) << 0) & 0x0000001f))
0304 #define RXTX_REG6 0x00c
0305 #define RXTX_REG6_TXAMP_CNTL_SET(dst, src) \
0306 (((dst) & ~0x00000780) | (((u32) (src) << 7) & 0x00000780))
0307 #define RXTX_REG6_TXAMP_ENA_SET(dst, src) \
0308 (((dst) & ~0x00000040) | (((u32) (src) << 6) & 0x00000040))
0309 #define RXTX_REG6_RX_BIST_ERRCNT_RD_SET(dst, src) \
0310 (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001))
0311 #define RXTX_REG6_TX_IDLE_SET(dst, src) \
0312 (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008))
0313 #define RXTX_REG6_RX_BIST_RESYNC_SET(dst, src) \
0314 (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002))
0315 #define RXTX_REG7 0x00e
0316 #define RXTX_REG7_RESETB_RXD_MASK 0x00000100
0317 #define RXTX_REG7_RESETB_RXA_MASK 0x00000080
0318 #define RXTX_REG7_BIST_ENA_RX_SET(dst, src) \
0319 (((dst) & ~0x00000040) | (((u32) (src) << 6) & 0x00000040))
0320 #define RXTX_REG7_RX_WORD_MODE_SET(dst, src) \
0321 (((dst) & ~0x00003800) | (((u32) (src) << 11) & 0x00003800))
0322 #define RXTX_REG8 0x010
0323 #define RXTX_REG8_CDR_LOOP_ENA_SET(dst, src) \
0324 (((dst) & ~0x00004000) | (((u32) (src) << 14) & 0x00004000))
0325 #define RXTX_REG8_CDR_BYPASS_RXLOS_SET(dst, src) \
0326 (((dst) & ~0x00000800) | (((u32) (src) << 11) & 0x00000800))
0327 #define RXTX_REG8_SSC_ENABLE_SET(dst, src) \
0328 (((dst) & ~0x00000200) | (((u32) (src) << 9) & 0x00000200))
0329 #define RXTX_REG8_SD_VREF_SET(dst, src) \
0330 (((dst) & ~0x000000f0) | (((u32) (src) << 4) & 0x000000f0))
0331 #define RXTX_REG8_SD_DISABLE_SET(dst, src) \
0332 (((dst) & ~0x00000100) | (((u32) (src) << 8) & 0x00000100))
0333 #define RXTX_REG7 0x00e
0334 #define RXTX_REG7_RESETB_RXD_SET(dst, src) \
0335 (((dst) & ~0x00000100) | (((u32) (src) << 8) & 0x00000100))
0336 #define RXTX_REG7_RESETB_RXA_SET(dst, src) \
0337 (((dst) & ~0x00000080) | (((u32) (src) << 7) & 0x00000080))
0338 #define RXTX_REG7_LOOP_BACK_ENA_CTLE_MASK 0x00004000
0339 #define RXTX_REG7_LOOP_BACK_ENA_CTLE_SET(dst, src) \
0340 (((dst) & ~0x00004000) | (((u32) (src) << 14) & 0x00004000))
0341 #define RXTX_REG11 0x016
0342 #define RXTX_REG11_PHASE_ADJUST_LIMIT_SET(dst, src) \
0343 (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800))
0344 #define RXTX_REG12 0x018
0345 #define RXTX_REG12_LATCH_OFF_ENA_SET(dst, src) \
0346 (((dst) & ~0x00002000) | (((u32) (src) << 13) & 0x00002000))
0347 #define RXTX_REG12_SUMOS_ENABLE_SET(dst, src) \
0348 (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004))
0349 #define RXTX_REG12_RX_DET_TERM_ENABLE_MASK 0x00000002
0350 #define RXTX_REG12_RX_DET_TERM_ENABLE_SET(dst, src) \
0351 (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002))
0352 #define RXTX_REG13 0x01a
0353 #define RXTX_REG14 0x01c
0354 #define RXTX_REG14_CLTE_LATCAL_MAN_PROG_SET(dst, src) \
0355 (((dst) & ~0x0000003f) | (((u32) (src) << 0) & 0x0000003f))
0356 #define RXTX_REG14_CTLE_LATCAL_MAN_ENA_SET(dst, src) \
0357 (((dst) & ~0x00000040) | (((u32) (src) << 6) & 0x00000040))
0358 #define RXTX_REG26 0x034
0359 #define RXTX_REG26_PERIOD_ERROR_LATCH_SET(dst, src) \
0360 (((dst) & ~0x00003800) | (((u32) (src) << 11) & 0x00003800))
0361 #define RXTX_REG26_BLWC_ENA_SET(dst, src) \
0362 (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008))
0363 #define RXTX_REG21 0x02a
0364 #define RXTX_REG21_DO_LATCH_CALOUT_RD(src) ((0x0000fc00 & (u32) (src)) >> 10)
0365 #define RXTX_REG21_XO_LATCH_CALOUT_RD(src) ((0x000003f0 & (u32) (src)) >> 4)
0366 #define RXTX_REG21_LATCH_CAL_FAIL_ODD_RD(src) ((0x0000000f & (u32)(src)))
0367 #define RXTX_REG22 0x02c
0368 #define RXTX_REG22_SO_LATCH_CALOUT_RD(src) ((0x000003f0 & (u32) (src)) >> 4)
0369 #define RXTX_REG22_EO_LATCH_CALOUT_RD(src) ((0x0000fc00 & (u32) (src)) >> 10)
0370 #define RXTX_REG22_LATCH_CAL_FAIL_EVEN_RD(src) ((0x0000000f & (u32)(src)))
0371 #define RXTX_REG23 0x02e
0372 #define RXTX_REG23_DE_LATCH_CALOUT_RD(src) ((0x0000fc00 & (u32) (src)) >> 10)
0373 #define RXTX_REG23_XE_LATCH_CALOUT_RD(src) ((0x000003f0 & (u32) (src)) >> 4)
0374 #define RXTX_REG24 0x030
0375 #define RXTX_REG24_EE_LATCH_CALOUT_RD(src) ((0x0000fc00 & (u32) (src)) >> 10)
0376 #define RXTX_REG24_SE_LATCH_CALOUT_RD(src) ((0x000003f0 & (u32) (src)) >> 4)
0377 #define RXTX_REG27 0x036
0378 #define RXTX_REG28 0x038
0379 #define RXTX_REG31 0x03e
0380 #define RXTX_REG38 0x04c
0381 #define RXTX_REG38_CUSTOMER_PINMODE_INV_SET(dst, src) \
0382 (((dst) & 0x0000fffe) | (((u32) (src) << 1) & 0x0000fffe))
0383 #define RXTX_REG39 0x04e
0384 #define RXTX_REG40 0x050
0385 #define RXTX_REG41 0x052
0386 #define RXTX_REG42 0x054
0387 #define RXTX_REG43 0x056
0388 #define RXTX_REG44 0x058
0389 #define RXTX_REG45 0x05a
0390 #define RXTX_REG46 0x05c
0391 #define RXTX_REG47 0x05e
0392 #define RXTX_REG48 0x060
0393 #define RXTX_REG49 0x062
0394 #define RXTX_REG50 0x064
0395 #define RXTX_REG51 0x066
0396 #define RXTX_REG52 0x068
0397 #define RXTX_REG53 0x06a
0398 #define RXTX_REG54 0x06c
0399 #define RXTX_REG55 0x06e
0400 #define RXTX_REG61 0x07a
0401 #define RXTX_REG61_ISCAN_INBERT_SET(dst, src) \
0402 (((dst) & ~0x00000010) | (((u32) (src) << 4) & 0x00000010))
0403 #define RXTX_REG61_LOADFREQ_SHIFT_SET(dst, src) \
0404 (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008))
0405 #define RXTX_REG61_EYE_COUNT_WIDTH_SEL_SET(dst, src) \
0406 (((dst) & ~0x000000c0) | (((u32) (src) << 6) & 0x000000c0))
0407 #define RXTX_REG61_SPD_SEL_CDR_SET(dst, src) \
0408 (((dst) & ~0x00003c00) | (((u32) (src) << 10) & 0x00003c00))
0409 #define RXTX_REG62 0x07c
0410 #define RXTX_REG62_PERIOD_H1_QLATCH_SET(dst, src) \
0411 (((dst) & ~0x00003800) | (((u32) (src) << 11) & 0x00003800))
0412 #define RXTX_REG81 0x0a2
0413 #define RXTX_REG89_MU_TH7_SET(dst, src) \
0414 (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800))
0415 #define RXTX_REG89_MU_TH8_SET(dst, src) \
0416 (((dst) & ~0x000007c0) | (((u32) (src) << 6) & 0x000007c0))
0417 #define RXTX_REG89_MU_TH9_SET(dst, src) \
0418 (((dst) & ~0x0000003e) | (((u32) (src) << 1) & 0x0000003e))
0419 #define RXTX_REG96 0x0c0
0420 #define RXTX_REG96_MU_FREQ1_SET(dst, src) \
0421 (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800))
0422 #define RXTX_REG96_MU_FREQ2_SET(dst, src) \
0423 (((dst) & ~0x000007c0) | (((u32) (src) << 6) & 0x000007c0))
0424 #define RXTX_REG96_MU_FREQ3_SET(dst, src) \
0425 (((dst) & ~0x0000003e) | (((u32) (src) << 1) & 0x0000003e))
0426 #define RXTX_REG99 0x0c6
0427 #define RXTX_REG99_MU_PHASE1_SET(dst, src) \
0428 (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800))
0429 #define RXTX_REG99_MU_PHASE2_SET(dst, src) \
0430 (((dst) & ~0x000007c0) | (((u32) (src) << 6) & 0x000007c0))
0431 #define RXTX_REG99_MU_PHASE3_SET(dst, src) \
0432 (((dst) & ~0x0000003e) | (((u32) (src) << 1) & 0x0000003e))
0433 #define RXTX_REG102 0x0cc
0434 #define RXTX_REG102_FREQLOOP_LIMIT_SET(dst, src) \
0435 (((dst) & ~0x00000060) | (((u32) (src) << 5) & 0x00000060))
0436 #define RXTX_REG114 0x0e4
0437 #define RXTX_REG121 0x0f2
0438 #define RXTX_REG121_SUMOS_CAL_CODE_RD(src) ((0x0000003e & (u32)(src)) >> 0x1)
0439 #define RXTX_REG125 0x0fa
0440 #define RXTX_REG125_PQ_REG_SET(dst, src) \
0441 (((dst) & ~0x0000fe00) | (((u32) (src) << 9) & 0x0000fe00))
0442 #define RXTX_REG125_SIGN_PQ_SET(dst, src) \
0443 (((dst) & ~0x00000100) | (((u32) (src) << 8) & 0x00000100))
0444 #define RXTX_REG125_SIGN_PQ_2C_SET(dst, src) \
0445 (((dst) & ~0x00000080) | (((u32) (src) << 7) & 0x00000080))
0446 #define RXTX_REG125_PHZ_MANUALCODE_SET(dst, src) \
0447 (((dst) & ~0x0000007c) | (((u32) (src) << 2) & 0x0000007c))
0448 #define RXTX_REG125_PHZ_MANUAL_SET(dst, src) \
0449 (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002))
0450 #define RXTX_REG127 0x0fe
0451 #define RXTX_REG127_FORCE_SUM_CAL_START_MASK 0x00000002
0452 #define RXTX_REG127_FORCE_LAT_CAL_START_MASK 0x00000004
0453 #define RXTX_REG127_FORCE_SUM_CAL_START_SET(dst, src) \
0454 (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002))
0455 #define RXTX_REG127_FORCE_LAT_CAL_START_SET(dst, src) \
0456 (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004))
0457 #define RXTX_REG127_LATCH_MAN_CAL_ENA_SET(dst, src) \
0458 (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008))
0459 #define RXTX_REG127_DO_LATCH_MANCAL_SET(dst, src) \
0460 (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00))
0461 #define RXTX_REG127_XO_LATCH_MANCAL_SET(dst, src) \
0462 (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0))
0463 #define RXTX_REG128 0x100
0464 #define RXTX_REG128_LATCH_CAL_WAIT_SEL_SET(dst, src) \
0465 (((dst) & ~0x0000000c) | (((u32) (src) << 2) & 0x0000000c))
0466 #define RXTX_REG128_EO_LATCH_MANCAL_SET(dst, src) \
0467 (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00))
0468 #define RXTX_REG128_SO_LATCH_MANCAL_SET(dst, src) \
0469 (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0))
0470 #define RXTX_REG129 0x102
0471 #define RXTX_REG129_DE_LATCH_MANCAL_SET(dst, src) \
0472 (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00))
0473 #define RXTX_REG129_XE_LATCH_MANCAL_SET(dst, src) \
0474 (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0))
0475 #define RXTX_REG130 0x104
0476 #define RXTX_REG130_EE_LATCH_MANCAL_SET(dst, src) \
0477 (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00))
0478 #define RXTX_REG130_SE_LATCH_MANCAL_SET(dst, src) \
0479 (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0))
0480 #define RXTX_REG145 0x122
0481 #define RXTX_REG145_TX_IDLE_SATA_SET(dst, src) \
0482 (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001))
0483 #define RXTX_REG145_RXES_ENA_SET(dst, src) \
0484 (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002))
0485 #define RXTX_REG145_RXDFE_CONFIG_SET(dst, src) \
0486 (((dst) & ~0x0000c000) | (((u32) (src) << 14) & 0x0000c000))
0487 #define RXTX_REG145_RXVWES_LATENA_SET(dst, src) \
0488 (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004))
0489 #define RXTX_REG147 0x126
0490 #define RXTX_REG148 0x128
0491
0492
0493 enum cmu_type_t {
0494 REF_CMU = 0,
0495 PHY_CMU = 1,
0496 };
0497
0498 enum mux_type_t {
0499 MUX_SELECT_ATA = 0,
0500 MUX_SELECT_SGMMII = 0,
0501 };
0502
0503 enum clk_type_t {
0504 CLK_EXT_DIFF = 0,
0505 CLK_INT_DIFF = 1,
0506 CLK_INT_SING = 2,
0507 };
0508
0509 enum xgene_phy_mode {
0510 MODE_SATA = 0,
0511 MODE_SGMII = 1,
0512 MODE_PCIE = 2,
0513 MODE_USB = 3,
0514 MODE_XFI = 4,
0515 MODE_MAX
0516 };
0517
0518 struct xgene_sata_override_param {
0519 u32 speed[MAX_LANE];
0520 u32 txspeed[3];
0521 u32 txboostgain[MAX_LANE*3];
0522 u32 txeyetuning[MAX_LANE*3];
0523 u32 txeyedirection[MAX_LANE*3];
0524 u32 txamplitude[MAX_LANE*3];
0525 u32 txprecursor_cn1[MAX_LANE*3];
0526 u32 txprecursor_cn2[MAX_LANE*3];
0527 u32 txpostcursor_cp1[MAX_LANE*3];
0528 };
0529
0530 struct xgene_phy_ctx {
0531 struct device *dev;
0532 struct phy *phy;
0533 enum xgene_phy_mode mode;
0534 enum clk_type_t clk_type;
0535 void __iomem *sds_base;
0536 struct clk *clk;
0537
0538
0539 struct xgene_sata_override_param sata_param;
0540 };
0541
0542
0543
0544
0545
0546 static int preA3Chip;
0547 MODULE_PARM_DESC(preA3Chip, "Enable pre-A3 chip support (1=enable 0=disable)");
0548 module_param_named(preA3Chip, preA3Chip, int, 0444);
0549
0550 static void sds_wr(void __iomem *csr_base, u32 indirect_cmd_reg,
0551 u32 indirect_data_reg, u32 addr, u32 data)
0552 {
0553 unsigned long deadline = jiffies + HZ;
0554 u32 val;
0555 u32 cmd;
0556
0557 cmd = CFG_IND_WR_CMD_MASK | CFG_IND_CMD_DONE_MASK;
0558 cmd = CFG_IND_ADDR_SET(cmd, addr);
0559 writel(data, csr_base + indirect_data_reg);
0560 readl(csr_base + indirect_data_reg);
0561 writel(cmd, csr_base + indirect_cmd_reg);
0562 readl(csr_base + indirect_cmd_reg);
0563 do {
0564 val = readl(csr_base + indirect_cmd_reg);
0565 } while (!(val & CFG_IND_CMD_DONE_MASK) &&
0566 time_before(jiffies, deadline));
0567 if (!(val & CFG_IND_CMD_DONE_MASK))
0568 pr_err("SDS WR timeout at 0x%p offset 0x%08X value 0x%08X\n",
0569 csr_base + indirect_cmd_reg, addr, data);
0570 }
0571
0572 static void sds_rd(void __iomem *csr_base, u32 indirect_cmd_reg,
0573 u32 indirect_data_reg, u32 addr, u32 *data)
0574 {
0575 unsigned long deadline = jiffies + HZ;
0576 u32 val;
0577 u32 cmd;
0578
0579 cmd = CFG_IND_RD_CMD_MASK | CFG_IND_CMD_DONE_MASK;
0580 cmd = CFG_IND_ADDR_SET(cmd, addr);
0581 writel(cmd, csr_base + indirect_cmd_reg);
0582 readl(csr_base + indirect_cmd_reg);
0583 do {
0584 val = readl(csr_base + indirect_cmd_reg);
0585 } while (!(val & CFG_IND_CMD_DONE_MASK) &&
0586 time_before(jiffies, deadline));
0587 *data = readl(csr_base + indirect_data_reg);
0588 if (!(val & CFG_IND_CMD_DONE_MASK))
0589 pr_err("SDS WR timeout at 0x%p offset 0x%08X value 0x%08X\n",
0590 csr_base + indirect_cmd_reg, addr, *data);
0591 }
0592
0593 static void cmu_wr(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
0594 u32 reg, u32 data)
0595 {
0596 void __iomem *sds_base = ctx->sds_base;
0597 u32 val;
0598
0599 if (cmu_type == REF_CMU)
0600 reg += SERDES_PLL_REF_INDIRECT_OFFSET;
0601 else
0602 reg += SERDES_PLL_INDIRECT_OFFSET;
0603 sds_wr(sds_base, SATA_ENET_SDS_IND_CMD_REG,
0604 SATA_ENET_SDS_IND_WDATA_REG, reg, data);
0605 sds_rd(sds_base, SATA_ENET_SDS_IND_CMD_REG,
0606 SATA_ENET_SDS_IND_RDATA_REG, reg, &val);
0607 pr_debug("CMU WR addr 0x%X value 0x%08X <-> 0x%08X\n", reg, data, val);
0608 }
0609
0610 static void cmu_rd(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
0611 u32 reg, u32 *data)
0612 {
0613 void __iomem *sds_base = ctx->sds_base;
0614
0615 if (cmu_type == REF_CMU)
0616 reg += SERDES_PLL_REF_INDIRECT_OFFSET;
0617 else
0618 reg += SERDES_PLL_INDIRECT_OFFSET;
0619 sds_rd(sds_base, SATA_ENET_SDS_IND_CMD_REG,
0620 SATA_ENET_SDS_IND_RDATA_REG, reg, data);
0621 pr_debug("CMU RD addr 0x%X value 0x%08X\n", reg, *data);
0622 }
0623
0624 static void cmu_toggle1to0(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
0625 u32 reg, u32 bits)
0626 {
0627 u32 val;
0628
0629 cmu_rd(ctx, cmu_type, reg, &val);
0630 val |= bits;
0631 cmu_wr(ctx, cmu_type, reg, val);
0632 cmu_rd(ctx, cmu_type, reg, &val);
0633 val &= ~bits;
0634 cmu_wr(ctx, cmu_type, reg, val);
0635 }
0636
0637 static void cmu_clrbits(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
0638 u32 reg, u32 bits)
0639 {
0640 u32 val;
0641
0642 cmu_rd(ctx, cmu_type, reg, &val);
0643 val &= ~bits;
0644 cmu_wr(ctx, cmu_type, reg, val);
0645 }
0646
0647 static void cmu_setbits(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
0648 u32 reg, u32 bits)
0649 {
0650 u32 val;
0651
0652 cmu_rd(ctx, cmu_type, reg, &val);
0653 val |= bits;
0654 cmu_wr(ctx, cmu_type, reg, val);
0655 }
0656
0657 static void serdes_wr(struct xgene_phy_ctx *ctx, int lane, u32 reg, u32 data)
0658 {
0659 void __iomem *sds_base = ctx->sds_base;
0660 u32 val;
0661
0662 reg += SERDES_INDIRECT_OFFSET;
0663 reg += lane * SERDES_LANE_STRIDE;
0664 sds_wr(sds_base, SATA_ENET_SDS_IND_CMD_REG,
0665 SATA_ENET_SDS_IND_WDATA_REG, reg, data);
0666 sds_rd(sds_base, SATA_ENET_SDS_IND_CMD_REG,
0667 SATA_ENET_SDS_IND_RDATA_REG, reg, &val);
0668 pr_debug("SERDES WR addr 0x%X value 0x%08X <-> 0x%08X\n", reg, data,
0669 val);
0670 }
0671
0672 static void serdes_rd(struct xgene_phy_ctx *ctx, int lane, u32 reg, u32 *data)
0673 {
0674 void __iomem *sds_base = ctx->sds_base;
0675
0676 reg += SERDES_INDIRECT_OFFSET;
0677 reg += lane * SERDES_LANE_STRIDE;
0678 sds_rd(sds_base, SATA_ENET_SDS_IND_CMD_REG,
0679 SATA_ENET_SDS_IND_RDATA_REG, reg, data);
0680 pr_debug("SERDES RD addr 0x%X value 0x%08X\n", reg, *data);
0681 }
0682
0683 static void serdes_clrbits(struct xgene_phy_ctx *ctx, int lane, u32 reg,
0684 u32 bits)
0685 {
0686 u32 val;
0687
0688 serdes_rd(ctx, lane, reg, &val);
0689 val &= ~bits;
0690 serdes_wr(ctx, lane, reg, val);
0691 }
0692
0693 static void serdes_setbits(struct xgene_phy_ctx *ctx, int lane, u32 reg,
0694 u32 bits)
0695 {
0696 u32 val;
0697
0698 serdes_rd(ctx, lane, reg, &val);
0699 val |= bits;
0700 serdes_wr(ctx, lane, reg, val);
0701 }
0702
0703 static void xgene_phy_cfg_cmu_clk_type(struct xgene_phy_ctx *ctx,
0704 enum cmu_type_t cmu_type,
0705 enum clk_type_t clk_type)
0706 {
0707 u32 val;
0708
0709
0710 cmu_rd(ctx, cmu_type, CMU_REG12, &val);
0711 val = CMU_REG12_STATE_DELAY9_SET(val, 0x1);
0712 cmu_wr(ctx, cmu_type, CMU_REG12, val);
0713
0714 cmu_wr(ctx, cmu_type, CMU_REG13, 0x0222);
0715 cmu_wr(ctx, cmu_type, CMU_REG14, 0x2225);
0716
0717
0718 if (clk_type == CLK_EXT_DIFF) {
0719
0720 cmu_rd(ctx, cmu_type, CMU_REG0, &val);
0721 val = CMU_REG0_PLL_REF_SEL_SET(val, 0x0);
0722 cmu_wr(ctx, cmu_type, CMU_REG0, val);
0723
0724 cmu_rd(ctx, cmu_type, CMU_REG1, &val);
0725 val = CMU_REG1_REFCLK_CMOS_SEL_SET(val, 0x0);
0726 cmu_wr(ctx, cmu_type, CMU_REG1, val);
0727 dev_dbg(ctx->dev, "Set external reference clock\n");
0728 } else if (clk_type == CLK_INT_DIFF) {
0729
0730 cmu_rd(ctx, cmu_type, CMU_REG0, &val);
0731 val = CMU_REG0_PLL_REF_SEL_SET(val, 0x1);
0732 cmu_wr(ctx, cmu_type, CMU_REG0, val);
0733
0734 cmu_rd(ctx, cmu_type, CMU_REG1, &val);
0735 val = CMU_REG1_REFCLK_CMOS_SEL_SET(val, 0x1);
0736 cmu_wr(ctx, cmu_type, CMU_REG1, val);
0737 dev_dbg(ctx->dev, "Set internal reference clock\n");
0738 } else if (clk_type == CLK_INT_SING) {
0739
0740
0741
0742
0743
0744
0745 cmu_rd(ctx, cmu_type, CMU_REG1, &val);
0746 val = CMU_REG1_REFCLK_CMOS_SEL_SET(val, 0x1);
0747 cmu_wr(ctx, cmu_type, CMU_REG1, val);
0748
0749 cmu_rd(ctx, cmu_type, CMU_REG1, &val);
0750 val = CMU_REG1_REFCLK_CMOS_SEL_SET(val, 0x0);
0751 cmu_wr(ctx, cmu_type, CMU_REG1, val);
0752 dev_dbg(ctx->dev,
0753 "Set internal single ended reference clock\n");
0754 }
0755 }
0756
0757 static void xgene_phy_sata_cfg_cmu_core(struct xgene_phy_ctx *ctx,
0758 enum cmu_type_t cmu_type,
0759 enum clk_type_t clk_type)
0760 {
0761 u32 val;
0762 int ref_100MHz;
0763
0764 if (cmu_type == REF_CMU) {
0765
0766 cmu_rd(ctx, cmu_type, CMU_REG34, &val);
0767 val = CMU_REG34_VCO_CAL_VTH_LO_MAX_SET(val, 0x7);
0768 val = CMU_REG34_VCO_CAL_VTH_HI_MAX_SET(val, 0xc);
0769 val = CMU_REG34_VCO_CAL_VTH_LO_MIN_SET(val, 0x3);
0770 val = CMU_REG34_VCO_CAL_VTH_HI_MIN_SET(val, 0x8);
0771 cmu_wr(ctx, cmu_type, CMU_REG34, val);
0772 }
0773
0774
0775 cmu_rd(ctx, cmu_type, CMU_REG0, &val);
0776 if (cmu_type == REF_CMU || preA3Chip)
0777 val = CMU_REG0_CAL_COUNT_RESOL_SET(val, 0x4);
0778 else
0779 val = CMU_REG0_CAL_COUNT_RESOL_SET(val, 0x7);
0780 cmu_wr(ctx, cmu_type, CMU_REG0, val);
0781
0782
0783 cmu_rd(ctx, cmu_type, CMU_REG1, &val);
0784 val = CMU_REG1_PLL_CP_SET(val, 0x1);
0785 if (cmu_type == REF_CMU || preA3Chip)
0786 val = CMU_REG1_PLL_CP_SEL_SET(val, 0x5);
0787 else
0788 val = CMU_REG1_PLL_CP_SEL_SET(val, 0x3);
0789 if (cmu_type == REF_CMU)
0790 val = CMU_REG1_PLL_MANUALCAL_SET(val, 0x0);
0791 else
0792 val = CMU_REG1_PLL_MANUALCAL_SET(val, 0x1);
0793 cmu_wr(ctx, cmu_type, CMU_REG1, val);
0794
0795 if (cmu_type != REF_CMU)
0796 cmu_clrbits(ctx, cmu_type, CMU_REG5, CMU_REG5_PLL_RESETB_MASK);
0797
0798
0799 cmu_rd(ctx, cmu_type, CMU_REG2, &val);
0800 if (cmu_type == REF_CMU) {
0801 val = CMU_REG2_PLL_LFRES_SET(val, 0xa);
0802 ref_100MHz = 1;
0803 } else {
0804 val = CMU_REG2_PLL_LFRES_SET(val, 0x3);
0805 if (clk_type == CLK_EXT_DIFF)
0806 ref_100MHz = 0;
0807 else
0808 ref_100MHz = 1;
0809 }
0810 if (ref_100MHz) {
0811 val = CMU_REG2_PLL_FBDIV_SET(val, FBDIV_VAL_100M);
0812 val = CMU_REG2_PLL_REFDIV_SET(val, REFDIV_VAL_100M);
0813 } else {
0814 val = CMU_REG2_PLL_FBDIV_SET(val, FBDIV_VAL_50M);
0815 val = CMU_REG2_PLL_REFDIV_SET(val, REFDIV_VAL_50M);
0816 }
0817 cmu_wr(ctx, cmu_type, CMU_REG2, val);
0818
0819
0820 cmu_rd(ctx, cmu_type, CMU_REG3, &val);
0821 if (cmu_type == REF_CMU) {
0822 val = CMU_REG3_VCOVARSEL_SET(val, 0x3);
0823 val = CMU_REG3_VCO_MOMSEL_INIT_SET(val, 0x10);
0824 } else {
0825 val = CMU_REG3_VCOVARSEL_SET(val, 0xF);
0826 if (preA3Chip)
0827 val = CMU_REG3_VCO_MOMSEL_INIT_SET(val, 0x15);
0828 else
0829 val = CMU_REG3_VCO_MOMSEL_INIT_SET(val, 0x1a);
0830 val = CMU_REG3_VCO_MANMOMSEL_SET(val, 0x15);
0831 }
0832 cmu_wr(ctx, cmu_type, CMU_REG3, val);
0833
0834
0835 cmu_rd(ctx, cmu_type, CMU_REG26, &val);
0836 val = CMU_REG26_FORCE_PLL_LOCK_SET(val, 0x0);
0837 cmu_wr(ctx, cmu_type, CMU_REG26, val);
0838
0839
0840 cmu_rd(ctx, cmu_type, CMU_REG5, &val);
0841 val = CMU_REG5_PLL_LFSMCAP_SET(val, 0x3);
0842 val = CMU_REG5_PLL_LFCAP_SET(val, 0x3);
0843 if (cmu_type == REF_CMU || !preA3Chip)
0844 val = CMU_REG5_PLL_LOCK_RESOLUTION_SET(val, 0x7);
0845 else
0846 val = CMU_REG5_PLL_LOCK_RESOLUTION_SET(val, 0x4);
0847 cmu_wr(ctx, cmu_type, CMU_REG5, val);
0848
0849
0850 cmu_rd(ctx, cmu_type, CMU_REG6, &val);
0851 val = CMU_REG6_PLL_VREGTRIM_SET(val, preA3Chip ? 0x0 : 0x2);
0852 val = CMU_REG6_MAN_PVT_CAL_SET(val, preA3Chip ? 0x1 : 0x0);
0853 cmu_wr(ctx, cmu_type, CMU_REG6, val);
0854
0855
0856 if (cmu_type == PHY_CMU) {
0857 cmu_rd(ctx, cmu_type, CMU_REG9, &val);
0858 val = CMU_REG9_TX_WORD_MODE_CH1_SET(val,
0859 CMU_REG9_WORD_LEN_20BIT);
0860 val = CMU_REG9_TX_WORD_MODE_CH0_SET(val,
0861 CMU_REG9_WORD_LEN_20BIT);
0862 val = CMU_REG9_PLL_POST_DIVBY2_SET(val, 0x1);
0863 if (!preA3Chip) {
0864 val = CMU_REG9_VBG_BYPASSB_SET(val, 0x0);
0865 val = CMU_REG9_IGEN_BYPASS_SET(val , 0x0);
0866 }
0867 cmu_wr(ctx, cmu_type, CMU_REG9, val);
0868
0869 if (!preA3Chip) {
0870 cmu_rd(ctx, cmu_type, CMU_REG10, &val);
0871 val = CMU_REG10_VREG_REFSEL_SET(val, 0x1);
0872 cmu_wr(ctx, cmu_type, CMU_REG10, val);
0873 }
0874 }
0875
0876 cmu_rd(ctx, cmu_type, CMU_REG16, &val);
0877 val = CMU_REG16_CALIBRATION_DONE_OVERRIDE_SET(val, 0x1);
0878 val = CMU_REG16_BYPASS_PLL_LOCK_SET(val, 0x1);
0879 if (cmu_type == REF_CMU || preA3Chip)
0880 val = CMU_REG16_VCOCAL_WAIT_BTW_CODE_SET(val, 0x4);
0881 else
0882 val = CMU_REG16_VCOCAL_WAIT_BTW_CODE_SET(val, 0x7);
0883 cmu_wr(ctx, cmu_type, CMU_REG16, val);
0884
0885
0886 cmu_rd(ctx, cmu_type, CMU_REG30, &val);
0887 val = CMU_REG30_PCIE_MODE_SET(val, 0x0);
0888 val = CMU_REG30_LOCK_COUNT_SET(val, 0x3);
0889 cmu_wr(ctx, cmu_type, CMU_REG30, val);
0890
0891
0892 cmu_wr(ctx, cmu_type, CMU_REG31, 0xF);
0893
0894 cmu_rd(ctx, cmu_type, CMU_REG32, &val);
0895 val = CMU_REG32_PVT_CAL_WAIT_SEL_SET(val, 0x3);
0896 if (cmu_type == REF_CMU || preA3Chip)
0897 val = CMU_REG32_IREF_ADJ_SET(val, 0x3);
0898 else
0899 val = CMU_REG32_IREF_ADJ_SET(val, 0x1);
0900 cmu_wr(ctx, cmu_type, CMU_REG32, val);
0901
0902
0903 if (cmu_type != REF_CMU && preA3Chip)
0904 cmu_wr(ctx, cmu_type, CMU_REG34, 0x8d27);
0905 else
0906 cmu_wr(ctx, cmu_type, CMU_REG34, 0x873c);
0907
0908
0909 cmu_wr(ctx, cmu_type, CMU_REG37, 0xF00F);
0910 }
0911
0912 static void xgene_phy_ssc_enable(struct xgene_phy_ctx *ctx,
0913 enum cmu_type_t cmu_type)
0914 {
0915 u32 val;
0916
0917
0918 cmu_rd(ctx, cmu_type, CMU_REG35, &val);
0919 val = CMU_REG35_PLL_SSC_MOD_SET(val, 98);
0920 cmu_wr(ctx, cmu_type, CMU_REG35, val);
0921
0922
0923 cmu_rd(ctx, cmu_type, CMU_REG36, &val);
0924 val = CMU_REG36_PLL_SSC_VSTEP_SET(val, 30);
0925 val = CMU_REG36_PLL_SSC_EN_SET(val, 1);
0926 val = CMU_REG36_PLL_SSC_DSMSEL_SET(val, 1);
0927 cmu_wr(ctx, cmu_type, CMU_REG36, val);
0928
0929
0930 cmu_clrbits(ctx, cmu_type, CMU_REG5, CMU_REG5_PLL_RESETB_MASK);
0931 cmu_setbits(ctx, cmu_type, CMU_REG5, CMU_REG5_PLL_RESETB_MASK);
0932
0933
0934 cmu_toggle1to0(ctx, cmu_type, CMU_REG32,
0935 CMU_REG32_FORCE_VCOCAL_START_MASK);
0936 }
0937
0938 static void xgene_phy_sata_cfg_lanes(struct xgene_phy_ctx *ctx)
0939 {
0940 u32 val;
0941 u32 reg;
0942 int i;
0943 int lane;
0944
0945 for (lane = 0; lane < MAX_LANE; lane++) {
0946 serdes_wr(ctx, lane, RXTX_REG147, 0x6);
0947
0948
0949 serdes_rd(ctx, lane, RXTX_REG0, &val);
0950 val = RXTX_REG0_CTLE_EQ_HR_SET(val, 0x10);
0951 val = RXTX_REG0_CTLE_EQ_QR_SET(val, 0x10);
0952 val = RXTX_REG0_CTLE_EQ_FR_SET(val, 0x10);
0953 serdes_wr(ctx, lane, RXTX_REG0, val);
0954
0955
0956 serdes_rd(ctx, lane, RXTX_REG1, &val);
0957 val = RXTX_REG1_RXACVCM_SET(val, 0x7);
0958 val = RXTX_REG1_CTLE_EQ_SET(val,
0959 ctx->sata_param.txboostgain[lane * 3 +
0960 ctx->sata_param.speed[lane]]);
0961 serdes_wr(ctx, lane, RXTX_REG1, val);
0962
0963
0964
0965
0966 serdes_rd(ctx, lane, RXTX_REG2, &val);
0967 val = RXTX_REG2_VTT_ENA_SET(val, 0x1);
0968 val = RXTX_REG2_VTT_SEL_SET(val, 0x1);
0969 val = RXTX_REG2_TX_FIFO_ENA_SET(val, 0x1);
0970 serdes_wr(ctx, lane, RXTX_REG2, val);
0971
0972
0973 serdes_rd(ctx, lane, RXTX_REG4, &val);
0974 val = RXTX_REG4_TX_WORD_MODE_SET(val, CMU_REG9_WORD_LEN_20BIT);
0975 serdes_wr(ctx, lane, RXTX_REG4, val);
0976
0977 if (!preA3Chip) {
0978 serdes_rd(ctx, lane, RXTX_REG1, &val);
0979 val = RXTX_REG1_RXVREG1_SET(val, 0x2);
0980 val = RXTX_REG1_RXIREF_ADJ_SET(val, 0x2);
0981 serdes_wr(ctx, lane, RXTX_REG1, val);
0982 }
0983
0984
0985 serdes_rd(ctx, lane, RXTX_REG5, &val);
0986 val = RXTX_REG5_TX_CN1_SET(val,
0987 ctx->sata_param.txprecursor_cn1[lane * 3 +
0988 ctx->sata_param.speed[lane]]);
0989 val = RXTX_REG5_TX_CP1_SET(val,
0990 ctx->sata_param.txpostcursor_cp1[lane * 3 +
0991 ctx->sata_param.speed[lane]]);
0992 val = RXTX_REG5_TX_CN2_SET(val,
0993 ctx->sata_param.txprecursor_cn2[lane * 3 +
0994 ctx->sata_param.speed[lane]]);
0995 serdes_wr(ctx, lane, RXTX_REG5, val);
0996
0997
0998 serdes_rd(ctx, lane, RXTX_REG6, &val);
0999 val = RXTX_REG6_TXAMP_CNTL_SET(val,
1000 ctx->sata_param.txamplitude[lane * 3 +
1001 ctx->sata_param.speed[lane]]);
1002 val = RXTX_REG6_TXAMP_ENA_SET(val, 0x1);
1003 val = RXTX_REG6_TX_IDLE_SET(val, 0x0);
1004 val = RXTX_REG6_RX_BIST_RESYNC_SET(val, 0x0);
1005 val = RXTX_REG6_RX_BIST_ERRCNT_RD_SET(val, 0x0);
1006 serdes_wr(ctx, lane, RXTX_REG6, val);
1007
1008
1009 serdes_rd(ctx, lane, RXTX_REG7, &val);
1010 val = RXTX_REG7_BIST_ENA_RX_SET(val, 0x0);
1011 val = RXTX_REG7_RX_WORD_MODE_SET(val, CMU_REG9_WORD_LEN_20BIT);
1012 serdes_wr(ctx, lane, RXTX_REG7, val);
1013
1014
1015 serdes_rd(ctx, lane, RXTX_REG8, &val);
1016 val = RXTX_REG8_CDR_LOOP_ENA_SET(val, 0x1);
1017 val = RXTX_REG8_CDR_BYPASS_RXLOS_SET(val, 0x0);
1018 val = RXTX_REG8_SSC_ENABLE_SET(val, 0x1);
1019 val = RXTX_REG8_SD_DISABLE_SET(val, 0x0);
1020 val = RXTX_REG8_SD_VREF_SET(val, 0x4);
1021 serdes_wr(ctx, lane, RXTX_REG8, val);
1022
1023
1024 serdes_rd(ctx, lane, RXTX_REG11, &val);
1025 val = RXTX_REG11_PHASE_ADJUST_LIMIT_SET(val, 0x0);
1026 serdes_wr(ctx, lane, RXTX_REG11, val);
1027
1028
1029 serdes_rd(ctx, lane, RXTX_REG12, &val);
1030 val = RXTX_REG12_LATCH_OFF_ENA_SET(val, 0x1);
1031 val = RXTX_REG12_SUMOS_ENABLE_SET(val, 0x0);
1032 val = RXTX_REG12_RX_DET_TERM_ENABLE_SET(val, 0x0);
1033 serdes_wr(ctx, lane, RXTX_REG12, val);
1034
1035
1036 serdes_rd(ctx, lane, RXTX_REG26, &val);
1037 val = RXTX_REG26_PERIOD_ERROR_LATCH_SET(val, 0x0);
1038 val = RXTX_REG26_BLWC_ENA_SET(val, 0x1);
1039 serdes_wr(ctx, lane, RXTX_REG26, val);
1040
1041 serdes_wr(ctx, lane, RXTX_REG28, 0x0);
1042
1043
1044 serdes_wr(ctx, lane, RXTX_REG31, 0x0);
1045
1046
1047 serdes_rd(ctx, lane, RXTX_REG61, &val);
1048 val = RXTX_REG61_ISCAN_INBERT_SET(val, 0x1);
1049 val = RXTX_REG61_LOADFREQ_SHIFT_SET(val, 0x0);
1050 val = RXTX_REG61_EYE_COUNT_WIDTH_SEL_SET(val, 0x0);
1051 serdes_wr(ctx, lane, RXTX_REG61, val);
1052
1053 serdes_rd(ctx, lane, RXTX_REG62, &val);
1054 val = RXTX_REG62_PERIOD_H1_QLATCH_SET(val, 0x0);
1055 serdes_wr(ctx, lane, RXTX_REG62, val);
1056
1057
1058 for (i = 0; i < 9; i++) {
1059 reg = RXTX_REG81 + i * 2;
1060 serdes_rd(ctx, lane, reg, &val);
1061 val = RXTX_REG89_MU_TH7_SET(val, 0xe);
1062 val = RXTX_REG89_MU_TH8_SET(val, 0xe);
1063 val = RXTX_REG89_MU_TH9_SET(val, 0xe);
1064 serdes_wr(ctx, lane, reg, val);
1065 }
1066
1067
1068 for (i = 0; i < 3; i++) {
1069 reg = RXTX_REG96 + i * 2;
1070 serdes_rd(ctx, lane, reg, &val);
1071 val = RXTX_REG96_MU_FREQ1_SET(val, 0x10);
1072 val = RXTX_REG96_MU_FREQ2_SET(val, 0x10);
1073 val = RXTX_REG96_MU_FREQ3_SET(val, 0x10);
1074 serdes_wr(ctx, lane, reg, val);
1075 }
1076
1077
1078 for (i = 0; i < 3; i++) {
1079 reg = RXTX_REG99 + i * 2;
1080 serdes_rd(ctx, lane, reg, &val);
1081 val = RXTX_REG99_MU_PHASE1_SET(val, 0x7);
1082 val = RXTX_REG99_MU_PHASE2_SET(val, 0x7);
1083 val = RXTX_REG99_MU_PHASE3_SET(val, 0x7);
1084 serdes_wr(ctx, lane, reg, val);
1085 }
1086
1087 serdes_rd(ctx, lane, RXTX_REG102, &val);
1088 val = RXTX_REG102_FREQLOOP_LIMIT_SET(val, 0x0);
1089 serdes_wr(ctx, lane, RXTX_REG102, val);
1090
1091 serdes_wr(ctx, lane, RXTX_REG114, 0xffe0);
1092
1093 serdes_rd(ctx, lane, RXTX_REG125, &val);
1094 val = RXTX_REG125_SIGN_PQ_SET(val,
1095 ctx->sata_param.txeyedirection[lane * 3 +
1096 ctx->sata_param.speed[lane]]);
1097 val = RXTX_REG125_PQ_REG_SET(val,
1098 ctx->sata_param.txeyetuning[lane * 3 +
1099 ctx->sata_param.speed[lane]]);
1100 val = RXTX_REG125_PHZ_MANUAL_SET(val, 0x1);
1101 serdes_wr(ctx, lane, RXTX_REG125, val);
1102
1103 serdes_rd(ctx, lane, RXTX_REG127, &val);
1104 val = RXTX_REG127_LATCH_MAN_CAL_ENA_SET(val, 0x0);
1105 serdes_wr(ctx, lane, RXTX_REG127, val);
1106
1107 serdes_rd(ctx, lane, RXTX_REG128, &val);
1108 val = RXTX_REG128_LATCH_CAL_WAIT_SEL_SET(val, 0x3);
1109 serdes_wr(ctx, lane, RXTX_REG128, val);
1110
1111 serdes_rd(ctx, lane, RXTX_REG145, &val);
1112 val = RXTX_REG145_RXDFE_CONFIG_SET(val, 0x3);
1113 val = RXTX_REG145_TX_IDLE_SATA_SET(val, 0x0);
1114 if (preA3Chip) {
1115 val = RXTX_REG145_RXES_ENA_SET(val, 0x1);
1116 val = RXTX_REG145_RXVWES_LATENA_SET(val, 0x1);
1117 } else {
1118 val = RXTX_REG145_RXES_ENA_SET(val, 0x0);
1119 val = RXTX_REG145_RXVWES_LATENA_SET(val, 0x0);
1120 }
1121 serdes_wr(ctx, lane, RXTX_REG145, val);
1122
1123
1124
1125
1126
1127 for (i = 0; i < 4; i++) {
1128 reg = RXTX_REG148 + i * 2;
1129 serdes_wr(ctx, lane, reg, 0xFFFF);
1130 }
1131 }
1132 }
1133
1134 static int xgene_phy_cal_rdy_chk(struct xgene_phy_ctx *ctx,
1135 enum cmu_type_t cmu_type,
1136 enum clk_type_t clk_type)
1137 {
1138 void __iomem *csr_serdes = ctx->sds_base;
1139 int loop;
1140 u32 val;
1141
1142
1143 writel(0xdf, csr_serdes + SATA_ENET_SDS_RST_CTL);
1144 readl(csr_serdes + SATA_ENET_SDS_RST_CTL);
1145
1146 if (cmu_type != REF_CMU) {
1147 cmu_setbits(ctx, cmu_type, CMU_REG5, CMU_REG5_PLL_RESETB_MASK);
1148
1149
1150
1151
1152 usleep_range(800, 1000);
1153
1154 cmu_rd(ctx, cmu_type, CMU_REG1, &val);
1155 val = CMU_REG1_PLL_MANUALCAL_SET(val, 0x0);
1156 cmu_wr(ctx, cmu_type, CMU_REG1, val);
1157
1158
1159
1160
1161 usleep_range(800, 1000);
1162
1163 cmu_toggle1to0(ctx, cmu_type, CMU_REG32,
1164 CMU_REG32_FORCE_VCOCAL_START_MASK);
1165
1166
1167
1168
1169 usleep_range(800, 1000);
1170 }
1171
1172 if (!preA3Chip)
1173 goto skip_manual_cal;
1174
1175
1176
1177
1178
1179
1180 cmu_rd(ctx, cmu_type, CMU_REG17, &val);
1181 val = CMU_REG17_PVT_CODE_R2A_SET(val, 0x12);
1182 val = CMU_REG17_RESERVED_7_SET(val, 0x0);
1183 cmu_wr(ctx, cmu_type, CMU_REG17, val);
1184 cmu_toggle1to0(ctx, cmu_type, CMU_REG17,
1185 CMU_REG17_PVT_TERM_MAN_ENA_MASK);
1186
1187
1188
1189
1190
1191 cmu_rd(ctx, cmu_type, CMU_REG17, &val);
1192 val = CMU_REG17_PVT_CODE_R2A_SET(val, 0x29);
1193 val = CMU_REG17_RESERVED_7_SET(val, 0x0);
1194 cmu_wr(ctx, cmu_type, CMU_REG17, val);
1195 cmu_toggle1to0(ctx, cmu_type, CMU_REG16,
1196 CMU_REG16_PVT_DN_MAN_ENA_MASK);
1197
1198 cmu_rd(ctx, cmu_type, CMU_REG17, &val);
1199 val = CMU_REG17_PVT_CODE_R2A_SET(val, 0x28);
1200 val = CMU_REG17_RESERVED_7_SET(val, 0x0);
1201 cmu_wr(ctx, cmu_type, CMU_REG17, val);
1202 cmu_toggle1to0(ctx, cmu_type, CMU_REG16,
1203 CMU_REG16_PVT_UP_MAN_ENA_MASK);
1204
1205 skip_manual_cal:
1206
1207 loop = 100;
1208 do {
1209 cmu_rd(ctx, cmu_type, CMU_REG7, &val);
1210 if (CMU_REG7_PLL_CALIB_DONE_RD(val))
1211 break;
1212
1213
1214
1215
1216 usleep_range(10, 100);
1217 } while (--loop > 0);
1218
1219 cmu_rd(ctx, cmu_type, CMU_REG7, &val);
1220 dev_dbg(ctx->dev, "PLL calibration %s\n",
1221 CMU_REG7_PLL_CALIB_DONE_RD(val) ? "done" : "failed");
1222 if (CMU_REG7_VCO_CAL_FAIL_RD(val)) {
1223 dev_err(ctx->dev,
1224 "PLL calibration failed due to VCO failure\n");
1225 return -1;
1226 }
1227 dev_dbg(ctx->dev, "PLL calibration successful\n");
1228
1229 cmu_rd(ctx, cmu_type, CMU_REG15, &val);
1230 dev_dbg(ctx->dev, "PHY Tx is %sready\n", val & 0x300 ? "" : "not ");
1231 return 0;
1232 }
1233
1234 static void xgene_phy_pdwn_force_vco(struct xgene_phy_ctx *ctx,
1235 enum cmu_type_t cmu_type,
1236 enum clk_type_t clk_type)
1237 {
1238 u32 val;
1239
1240 dev_dbg(ctx->dev, "Reset VCO and re-start again\n");
1241 if (cmu_type == PHY_CMU) {
1242 cmu_rd(ctx, cmu_type, CMU_REG16, &val);
1243 val = CMU_REG16_VCOCAL_WAIT_BTW_CODE_SET(val, 0x7);
1244 cmu_wr(ctx, cmu_type, CMU_REG16, val);
1245 }
1246
1247 cmu_toggle1to0(ctx, cmu_type, CMU_REG0, CMU_REG0_PDOWN_MASK);
1248 cmu_toggle1to0(ctx, cmu_type, CMU_REG32,
1249 CMU_REG32_FORCE_VCOCAL_START_MASK);
1250 }
1251
1252 static int xgene_phy_hw_init_sata(struct xgene_phy_ctx *ctx,
1253 enum clk_type_t clk_type, int ssc_enable)
1254 {
1255 void __iomem *sds_base = ctx->sds_base;
1256 u32 val;
1257 int i;
1258
1259
1260 dev_dbg(ctx->dev, "Reset PHY\n");
1261
1262 writel(0x0, sds_base + SATA_ENET_SDS_RST_CTL);
1263 val = readl(sds_base + SATA_ENET_SDS_RST_CTL);
1264
1265 writel(0x20, sds_base + SATA_ENET_SDS_RST_CTL);
1266 readl(sds_base + SATA_ENET_SDS_RST_CTL);
1267
1268 writel(0xde, sds_base + SATA_ENET_SDS_RST_CTL);
1269 readl(sds_base + SATA_ENET_SDS_RST_CTL);
1270
1271
1272 val = readl(sds_base + SATA_ENET_SDS_CTL1);
1273 val = CFG_I_SPD_SEL_CDR_OVR1_SET(val,
1274 ctx->sata_param.txspeed[ctx->sata_param.speed[0]]);
1275 writel(val, sds_base + SATA_ENET_SDS_CTL1);
1276
1277 dev_dbg(ctx->dev, "Set the customer pin mode to SATA\n");
1278 val = readl(sds_base + SATA_ENET_SDS_CTL0);
1279 val = REGSPEC_CFG_I_CUSTOMER_PIN_MODE0_SET(val, 0x4421);
1280 writel(val, sds_base + SATA_ENET_SDS_CTL0);
1281
1282
1283 xgene_phy_cfg_cmu_clk_type(ctx, PHY_CMU, clk_type);
1284
1285
1286 xgene_phy_sata_cfg_cmu_core(ctx, PHY_CMU, clk_type);
1287
1288
1289 if (ssc_enable)
1290 xgene_phy_ssc_enable(ctx, PHY_CMU);
1291
1292
1293 xgene_phy_sata_cfg_lanes(ctx);
1294
1295
1296 val = readl(sds_base + SATA_ENET_SDS_PCS_CTL0);
1297 val = REGSPEC_CFG_I_RX_WORDMODE0_SET(val, 0x3);
1298 val = REGSPEC_CFG_I_TX_WORDMODE0_SET(val, 0x3);
1299 writel(val, sds_base + SATA_ENET_SDS_PCS_CTL0);
1300
1301
1302 i = 10;
1303 do {
1304 if (!xgene_phy_cal_rdy_chk(ctx, PHY_CMU, clk_type))
1305 break;
1306
1307 xgene_phy_pdwn_force_vco(ctx, PHY_CMU, clk_type);
1308 } while (--i > 0);
1309
1310 if (i <= 0)
1311 dev_err(ctx->dev, "PLL calibration failed\n");
1312
1313 return 0;
1314 }
1315
1316 static int xgene_phy_hw_initialize(struct xgene_phy_ctx *ctx,
1317 enum clk_type_t clk_type,
1318 int ssc_enable)
1319 {
1320 int rc;
1321
1322 dev_dbg(ctx->dev, "PHY init clk type %d\n", clk_type);
1323
1324 if (ctx->mode == MODE_SATA) {
1325 rc = xgene_phy_hw_init_sata(ctx, clk_type, ssc_enable);
1326 if (rc)
1327 return rc;
1328 } else {
1329 dev_err(ctx->dev, "Un-supported customer pin mode %d\n",
1330 ctx->mode);
1331 return -ENODEV;
1332 }
1333
1334 return 0;
1335 }
1336
1337
1338
1339
1340
1341
1342
1343 static void xgene_phy_force_lat_summer_cal(struct xgene_phy_ctx *ctx, int lane)
1344 {
1345 int i;
1346 static const struct {
1347 u32 reg;
1348 u32 val;
1349 } serdes_reg[] = {
1350 {RXTX_REG38, 0x0},
1351 {RXTX_REG39, 0xff00},
1352 {RXTX_REG40, 0xffff},
1353 {RXTX_REG41, 0xffff},
1354 {RXTX_REG42, 0xffff},
1355 {RXTX_REG43, 0xffff},
1356 {RXTX_REG44, 0xffff},
1357 {RXTX_REG45, 0xffff},
1358 {RXTX_REG46, 0xffff},
1359 {RXTX_REG47, 0xfffc},
1360 {RXTX_REG48, 0x0},
1361 {RXTX_REG49, 0x0},
1362 {RXTX_REG50, 0x0},
1363 {RXTX_REG51, 0x0},
1364 {RXTX_REG52, 0x0},
1365 {RXTX_REG53, 0x0},
1366 {RXTX_REG54, 0x0},
1367 {RXTX_REG55, 0x0},
1368 };
1369
1370
1371 serdes_setbits(ctx, lane, RXTX_REG127,
1372 RXTX_REG127_FORCE_SUM_CAL_START_MASK);
1373
1374
1375
1376
1377 usleep_range(100, 500);
1378 serdes_clrbits(ctx, lane, RXTX_REG127,
1379 RXTX_REG127_FORCE_SUM_CAL_START_MASK);
1380
1381
1382
1383
1384 usleep_range(100, 500);
1385
1386
1387 serdes_setbits(ctx, lane, RXTX_REG127,
1388 RXTX_REG127_FORCE_LAT_CAL_START_MASK);
1389
1390
1391
1392
1393 usleep_range(100, 500);
1394 serdes_clrbits(ctx, lane, RXTX_REG127,
1395 RXTX_REG127_FORCE_LAT_CAL_START_MASK);
1396
1397
1398 serdes_wr(ctx, lane, RXTX_REG28, 0x7);
1399 serdes_wr(ctx, lane, RXTX_REG31, 0x7e00);
1400 serdes_clrbits(ctx, lane, RXTX_REG4,
1401 RXTX_REG4_TX_LOOPBACK_BUF_EN_MASK);
1402 serdes_clrbits(ctx, lane, RXTX_REG7,
1403 RXTX_REG7_LOOP_BACK_ENA_CTLE_MASK);
1404 for (i = 0; i < ARRAY_SIZE(serdes_reg); i++)
1405 serdes_wr(ctx, lane, serdes_reg[i].reg,
1406 serdes_reg[i].val);
1407 }
1408
1409 static void xgene_phy_reset_rxd(struct xgene_phy_ctx *ctx, int lane)
1410 {
1411
1412 serdes_clrbits(ctx, lane, RXTX_REG7, RXTX_REG7_RESETB_RXD_MASK);
1413
1414 usleep_range(100, 150);
1415 serdes_setbits(ctx, lane, RXTX_REG7, RXTX_REG7_RESETB_RXD_MASK);
1416 }
1417
1418 static int xgene_phy_get_avg(int accum, int samples)
1419 {
1420 return (accum + (samples / 2)) / samples;
1421 }
1422
1423 static void xgene_phy_gen_avg_val(struct xgene_phy_ctx *ctx, int lane)
1424 {
1425 int max_loop = 10;
1426 int avg_loop = 0;
1427 int lat_do = 0, lat_xo = 0, lat_eo = 0, lat_so = 0;
1428 int lat_de = 0, lat_xe = 0, lat_ee = 0, lat_se = 0;
1429 int sum_cal = 0;
1430 int lat_do_itr, lat_xo_itr, lat_eo_itr, lat_so_itr;
1431 int lat_de_itr, lat_xe_itr, lat_ee_itr, lat_se_itr;
1432 int sum_cal_itr;
1433 int fail_even;
1434 int fail_odd;
1435 u32 val;
1436
1437 dev_dbg(ctx->dev, "Generating avg calibration value for lane %d\n",
1438 lane);
1439
1440
1441 serdes_setbits(ctx, lane, RXTX_REG12,
1442 RXTX_REG12_RX_DET_TERM_ENABLE_MASK);
1443
1444 serdes_wr(ctx, lane, RXTX_REG28, 0x0000);
1445
1446 serdes_wr(ctx, lane, RXTX_REG31, 0x0000);
1447
1448
1449
1450
1451
1452
1453
1454
1455 while (avg_loop < max_loop) {
1456
1457 xgene_phy_force_lat_summer_cal(ctx, lane);
1458
1459 serdes_rd(ctx, lane, RXTX_REG21, &val);
1460 lat_do_itr = RXTX_REG21_DO_LATCH_CALOUT_RD(val);
1461 lat_xo_itr = RXTX_REG21_XO_LATCH_CALOUT_RD(val);
1462 fail_odd = RXTX_REG21_LATCH_CAL_FAIL_ODD_RD(val);
1463
1464 serdes_rd(ctx, lane, RXTX_REG22, &val);
1465 lat_eo_itr = RXTX_REG22_EO_LATCH_CALOUT_RD(val);
1466 lat_so_itr = RXTX_REG22_SO_LATCH_CALOUT_RD(val);
1467 fail_even = RXTX_REG22_LATCH_CAL_FAIL_EVEN_RD(val);
1468
1469 serdes_rd(ctx, lane, RXTX_REG23, &val);
1470 lat_de_itr = RXTX_REG23_DE_LATCH_CALOUT_RD(val);
1471 lat_xe_itr = RXTX_REG23_XE_LATCH_CALOUT_RD(val);
1472
1473 serdes_rd(ctx, lane, RXTX_REG24, &val);
1474 lat_ee_itr = RXTX_REG24_EE_LATCH_CALOUT_RD(val);
1475 lat_se_itr = RXTX_REG24_SE_LATCH_CALOUT_RD(val);
1476
1477 serdes_rd(ctx, lane, RXTX_REG121, &val);
1478 sum_cal_itr = RXTX_REG121_SUMOS_CAL_CODE_RD(val);
1479
1480
1481 if ((fail_even == 0 || fail_even == 1) &&
1482 (fail_odd == 0 || fail_odd == 1)) {
1483 lat_do += lat_do_itr;
1484 lat_xo += lat_xo_itr;
1485 lat_eo += lat_eo_itr;
1486 lat_so += lat_so_itr;
1487 lat_de += lat_de_itr;
1488 lat_xe += lat_xe_itr;
1489 lat_ee += lat_ee_itr;
1490 lat_se += lat_se_itr;
1491 sum_cal += sum_cal_itr;
1492
1493 dev_dbg(ctx->dev, "Iteration %d:\n", avg_loop);
1494 dev_dbg(ctx->dev, "DO 0x%x XO 0x%x EO 0x%x SO 0x%x\n",
1495 lat_do_itr, lat_xo_itr, lat_eo_itr,
1496 lat_so_itr);
1497 dev_dbg(ctx->dev, "DE 0x%x XE 0x%x EE 0x%x SE 0x%x\n",
1498 lat_de_itr, lat_xe_itr, lat_ee_itr,
1499 lat_se_itr);
1500 dev_dbg(ctx->dev, "SUM 0x%x\n", sum_cal_itr);
1501 ++avg_loop;
1502 } else {
1503 dev_err(ctx->dev,
1504 "Receiver calibration failed at %d loop\n",
1505 avg_loop);
1506 }
1507 xgene_phy_reset_rxd(ctx, lane);
1508 }
1509
1510
1511 serdes_rd(ctx, lane, RXTX_REG127, &val);
1512 val = RXTX_REG127_DO_LATCH_MANCAL_SET(val,
1513 xgene_phy_get_avg(lat_do, max_loop));
1514 val = RXTX_REG127_XO_LATCH_MANCAL_SET(val,
1515 xgene_phy_get_avg(lat_xo, max_loop));
1516 serdes_wr(ctx, lane, RXTX_REG127, val);
1517
1518 serdes_rd(ctx, lane, RXTX_REG128, &val);
1519 val = RXTX_REG128_EO_LATCH_MANCAL_SET(val,
1520 xgene_phy_get_avg(lat_eo, max_loop));
1521 val = RXTX_REG128_SO_LATCH_MANCAL_SET(val,
1522 xgene_phy_get_avg(lat_so, max_loop));
1523 serdes_wr(ctx, lane, RXTX_REG128, val);
1524
1525 serdes_rd(ctx, lane, RXTX_REG129, &val);
1526 val = RXTX_REG129_DE_LATCH_MANCAL_SET(val,
1527 xgene_phy_get_avg(lat_de, max_loop));
1528 val = RXTX_REG129_XE_LATCH_MANCAL_SET(val,
1529 xgene_phy_get_avg(lat_xe, max_loop));
1530 serdes_wr(ctx, lane, RXTX_REG129, val);
1531
1532 serdes_rd(ctx, lane, RXTX_REG130, &val);
1533 val = RXTX_REG130_EE_LATCH_MANCAL_SET(val,
1534 xgene_phy_get_avg(lat_ee, max_loop));
1535 val = RXTX_REG130_SE_LATCH_MANCAL_SET(val,
1536 xgene_phy_get_avg(lat_se, max_loop));
1537 serdes_wr(ctx, lane, RXTX_REG130, val);
1538
1539
1540 serdes_rd(ctx, lane, RXTX_REG14, &val);
1541 val = RXTX_REG14_CLTE_LATCAL_MAN_PROG_SET(val,
1542 xgene_phy_get_avg(sum_cal, max_loop));
1543 serdes_wr(ctx, lane, RXTX_REG14, val);
1544
1545 dev_dbg(ctx->dev, "Average Value:\n");
1546 dev_dbg(ctx->dev, "DO 0x%x XO 0x%x EO 0x%x SO 0x%x\n",
1547 xgene_phy_get_avg(lat_do, max_loop),
1548 xgene_phy_get_avg(lat_xo, max_loop),
1549 xgene_phy_get_avg(lat_eo, max_loop),
1550 xgene_phy_get_avg(lat_so, max_loop));
1551 dev_dbg(ctx->dev, "DE 0x%x XE 0x%x EE 0x%x SE 0x%x\n",
1552 xgene_phy_get_avg(lat_de, max_loop),
1553 xgene_phy_get_avg(lat_xe, max_loop),
1554 xgene_phy_get_avg(lat_ee, max_loop),
1555 xgene_phy_get_avg(lat_se, max_loop));
1556 dev_dbg(ctx->dev, "SUM 0x%x\n",
1557 xgene_phy_get_avg(sum_cal, max_loop));
1558
1559 serdes_rd(ctx, lane, RXTX_REG14, &val);
1560 val = RXTX_REG14_CTLE_LATCAL_MAN_ENA_SET(val, 0x1);
1561 serdes_wr(ctx, lane, RXTX_REG14, val);
1562 dev_dbg(ctx->dev, "Enable Manual Summer calibration\n");
1563
1564 serdes_rd(ctx, lane, RXTX_REG127, &val);
1565 val = RXTX_REG127_LATCH_MAN_CAL_ENA_SET(val, 0x1);
1566 dev_dbg(ctx->dev, "Enable Manual Latch calibration\n");
1567 serdes_wr(ctx, lane, RXTX_REG127, val);
1568
1569
1570 serdes_rd(ctx, lane, RXTX_REG12, &val);
1571 val = RXTX_REG12_RX_DET_TERM_ENABLE_SET(val, 0);
1572 serdes_wr(ctx, lane, RXTX_REG12, val);
1573
1574 serdes_wr(ctx, lane, RXTX_REG28, 0x0007);
1575
1576 serdes_wr(ctx, lane, RXTX_REG31, 0x7e00);
1577 }
1578
1579 static int xgene_phy_hw_init(struct phy *phy)
1580 {
1581 struct xgene_phy_ctx *ctx = phy_get_drvdata(phy);
1582 int rc;
1583 int i;
1584
1585 rc = xgene_phy_hw_initialize(ctx, CLK_EXT_DIFF, SSC_DISABLE);
1586 if (rc) {
1587 dev_err(ctx->dev, "PHY initialize failed %d\n", rc);
1588 return rc;
1589 }
1590
1591
1592 if (!IS_ERR(ctx->clk)) {
1593
1594 clk_prepare_enable(ctx->clk);
1595 clk_disable_unprepare(ctx->clk);
1596 clk_prepare_enable(ctx->clk);
1597 }
1598
1599
1600 for (i = 0; i < MAX_LANE; i++)
1601 xgene_phy_gen_avg_val(ctx, i);
1602
1603 dev_dbg(ctx->dev, "PHY initialized\n");
1604 return 0;
1605 }
1606
1607 static const struct phy_ops xgene_phy_ops = {
1608 .init = xgene_phy_hw_init,
1609 .owner = THIS_MODULE,
1610 };
1611
1612 static struct phy *xgene_phy_xlate(struct device *dev,
1613 struct of_phandle_args *args)
1614 {
1615 struct xgene_phy_ctx *ctx = dev_get_drvdata(dev);
1616
1617 if (args->args_count <= 0)
1618 return ERR_PTR(-EINVAL);
1619 if (args->args[0] >= MODE_MAX)
1620 return ERR_PTR(-EINVAL);
1621
1622 ctx->mode = args->args[0];
1623 return ctx->phy;
1624 }
1625
1626 static void xgene_phy_get_param(struct platform_device *pdev,
1627 const char *name, u32 *buffer,
1628 int count, u32 *default_val,
1629 u32 conv_factor)
1630 {
1631 int i;
1632
1633 if (!of_property_read_u32_array(pdev->dev.of_node, name, buffer,
1634 count)) {
1635 for (i = 0; i < count; i++)
1636 buffer[i] /= conv_factor;
1637 return;
1638 }
1639
1640 for (i = 0; i < count; i++)
1641 buffer[i] = default_val[i % 3];
1642 }
1643
1644 static int xgene_phy_probe(struct platform_device *pdev)
1645 {
1646 struct phy_provider *phy_provider;
1647 struct xgene_phy_ctx *ctx;
1648 u32 default_spd[] = DEFAULT_SATA_SPD_SEL;
1649 u32 default_txboost_gain[] = DEFAULT_SATA_TXBOOST_GAIN;
1650 u32 default_txeye_direction[] = DEFAULT_SATA_TXEYEDIRECTION;
1651 u32 default_txeye_tuning[] = DEFAULT_SATA_TXEYETUNING;
1652 u32 default_txamp[] = DEFAULT_SATA_TXAMP;
1653 u32 default_txcn1[] = DEFAULT_SATA_TXCN1;
1654 u32 default_txcn2[] = DEFAULT_SATA_TXCN2;
1655 u32 default_txcp1[] = DEFAULT_SATA_TXCP1;
1656 int i;
1657
1658 ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
1659 if (!ctx)
1660 return -ENOMEM;
1661
1662 ctx->dev = &pdev->dev;
1663
1664 ctx->sds_base = devm_platform_ioremap_resource(pdev, 0);
1665 if (IS_ERR(ctx->sds_base))
1666 return PTR_ERR(ctx->sds_base);
1667
1668
1669 ctx->clk = clk_get(&pdev->dev, NULL);
1670
1671
1672 xgene_phy_get_param(pdev, "apm,tx-eye-tuning",
1673 ctx->sata_param.txeyetuning, 6, default_txeye_tuning, 1);
1674 xgene_phy_get_param(pdev, "apm,tx-eye-direction",
1675 ctx->sata_param.txeyedirection, 6, default_txeye_direction, 1);
1676 xgene_phy_get_param(pdev, "apm,tx-boost-gain",
1677 ctx->sata_param.txboostgain, 6, default_txboost_gain, 1);
1678 xgene_phy_get_param(pdev, "apm,tx-amplitude",
1679 ctx->sata_param.txamplitude, 6, default_txamp, 13300);
1680 xgene_phy_get_param(pdev, "apm,tx-pre-cursor1",
1681 ctx->sata_param.txprecursor_cn1, 6, default_txcn1, 18200);
1682 xgene_phy_get_param(pdev, "apm,tx-pre-cursor2",
1683 ctx->sata_param.txprecursor_cn2, 6, default_txcn2, 18200);
1684 xgene_phy_get_param(pdev, "apm,tx-post-cursor",
1685 ctx->sata_param.txpostcursor_cp1, 6, default_txcp1, 18200);
1686 xgene_phy_get_param(pdev, "apm,tx-speed",
1687 ctx->sata_param.txspeed, 3, default_spd, 1);
1688 for (i = 0; i < MAX_LANE; i++)
1689 ctx->sata_param.speed[i] = 2;
1690
1691 platform_set_drvdata(pdev, ctx);
1692
1693 ctx->phy = devm_phy_create(ctx->dev, NULL, &xgene_phy_ops);
1694 if (IS_ERR(ctx->phy)) {
1695 dev_dbg(&pdev->dev, "Failed to create PHY\n");
1696 return PTR_ERR(ctx->phy);
1697 }
1698 phy_set_drvdata(ctx->phy, ctx);
1699
1700 phy_provider = devm_of_phy_provider_register(ctx->dev, xgene_phy_xlate);
1701 return PTR_ERR_OR_ZERO(phy_provider);
1702 }
1703
1704 static const struct of_device_id xgene_phy_of_match[] = {
1705 {.compatible = "apm,xgene-phy",},
1706 {},
1707 };
1708 MODULE_DEVICE_TABLE(of, xgene_phy_of_match);
1709
1710 static struct platform_driver xgene_phy_driver = {
1711 .probe = xgene_phy_probe,
1712 .driver = {
1713 .name = "xgene-phy",
1714 .of_match_table = xgene_phy_of_match,
1715 },
1716 };
1717 module_platform_driver(xgene_phy_driver);
1718
1719 MODULE_DESCRIPTION("APM X-Gene Multi-Purpose PHY driver");
1720 MODULE_AUTHOR("Loc Ho <lho@apm.com>");
1721 MODULE_LICENSE("GPL v2");
1722 MODULE_VERSION("0.1");