0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024 #define nv50_ram(p) container_of((p), struct nv50_ram, base)
0025 #include "ram.h"
0026 #include "ramseq.h"
0027 #include "nv50.h"
0028
0029 #include <core/option.h>
0030 #include <subdev/bios.h>
0031 #include <subdev/bios/perf.h>
0032 #include <subdev/bios/pll.h>
0033 #include <subdev/bios/rammap.h>
0034 #include <subdev/bios/timing.h>
0035 #include <subdev/clk/pll.h>
0036 #include <subdev/gpio.h>
0037
0038 struct nv50_ramseq {
0039 struct hwsq base;
0040 struct hwsq_reg r_0x002504;
0041 struct hwsq_reg r_0x004008;
0042 struct hwsq_reg r_0x00400c;
0043 struct hwsq_reg r_0x00c040;
0044 struct hwsq_reg r_0x100200;
0045 struct hwsq_reg r_0x100210;
0046 struct hwsq_reg r_0x10021c;
0047 struct hwsq_reg r_0x1002d0;
0048 struct hwsq_reg r_0x1002d4;
0049 struct hwsq_reg r_0x1002dc;
0050 struct hwsq_reg r_0x10053c;
0051 struct hwsq_reg r_0x1005a0;
0052 struct hwsq_reg r_0x1005a4;
0053 struct hwsq_reg r_0x100710;
0054 struct hwsq_reg r_0x100714;
0055 struct hwsq_reg r_0x100718;
0056 struct hwsq_reg r_0x10071c;
0057 struct hwsq_reg r_0x100da0;
0058 struct hwsq_reg r_0x100e20;
0059 struct hwsq_reg r_0x100e24;
0060 struct hwsq_reg r_0x611200;
0061 struct hwsq_reg r_timing[9];
0062 struct hwsq_reg r_mr[4];
0063 struct hwsq_reg r_gpio[4];
0064 };
0065
0066 struct nv50_ram {
0067 struct nvkm_ram base;
0068 struct nv50_ramseq hwsq;
0069 };
0070
0071 #define T(t) cfg->timing_10_##t
0072 static int
0073 nv50_ram_timing_calc(struct nv50_ram *ram, u32 *timing)
0074 {
0075 struct nvbios_ramcfg *cfg = &ram->base.target.bios;
0076 struct nvkm_subdev *subdev = &ram->base.fb->subdev;
0077 struct nvkm_device *device = subdev->device;
0078 u32 cur2, cur4, cur7, cur8;
0079 u8 unkt3b;
0080
0081 cur2 = nvkm_rd32(device, 0x100228);
0082 cur4 = nvkm_rd32(device, 0x100230);
0083 cur7 = nvkm_rd32(device, 0x10023c);
0084 cur8 = nvkm_rd32(device, 0x100240);
0085
0086 switch ((!T(CWL)) * ram->base.type) {
0087 case NVKM_RAM_TYPE_DDR2:
0088 T(CWL) = T(CL) - 1;
0089 break;
0090 case NVKM_RAM_TYPE_GDDR3:
0091 T(CWL) = ((cur2 & 0xff000000) >> 24) + 1;
0092 break;
0093 }
0094
0095
0096 if (device->chipset == 0xa0) {
0097 unkt3b = 0x19 + ram->base.next->bios.rammap_00_16_40;
0098 timing[6] = (0x2d + T(CL) - T(CWL) +
0099 ram->base.next->bios.rammap_00_16_40) << 16 |
0100 T(CWL) << 8 |
0101 (0x2f + T(CL) - T(CWL));
0102 } else {
0103 unkt3b = 0x16;
0104 timing[6] = (0x2b + T(CL) - T(CWL)) << 16 |
0105 max_t(s8, T(CWL) - 2, 1) << 8 |
0106 (0x2e + T(CL) - T(CWL));
0107 }
0108
0109 timing[0] = (T(RP) << 24 | T(RAS) << 16 | T(RFC) << 8 | T(RC));
0110 timing[1] = (T(WR) + 1 + T(CWL)) << 24 |
0111 max_t(u8, T(18), 1) << 16 |
0112 (T(WTR) + 1 + T(CWL)) << 8 |
0113 (3 + T(CL) - T(CWL));
0114 timing[2] = (T(CWL) - 1) << 24 |
0115 (T(RRD) << 16) |
0116 (T(RCDWR) << 8) |
0117 T(RCDRD);
0118 timing[3] = (unkt3b - 2 + T(CL)) << 24 |
0119 unkt3b << 16 |
0120 (T(CL) - 1) << 8 |
0121 (T(CL) - 1);
0122 timing[4] = (cur4 & 0xffff0000) |
0123 T(13) << 8 |
0124 T(13);
0125 timing[5] = T(RFC) << 24 |
0126 max_t(u8, T(RCDRD), T(RCDWR)) << 16 |
0127 T(RP);
0128
0129 timing[7] = (cur7 & 0xff00ffff) | (T(CL) - 1) << 16;
0130 timing[8] = (cur8 & 0xffffff00);
0131
0132
0133 if (ram->base.type == NVKM_RAM_TYPE_DDR2) {
0134 timing[5] |= (T(CL) + 3) << 8;
0135 timing[8] |= (T(CL) - 4);
0136 } else
0137 if (ram->base.type == NVKM_RAM_TYPE_GDDR3) {
0138 timing[5] |= (T(CL) + 2) << 8;
0139 timing[8] |= (T(CL) - 2);
0140 }
0141
0142 nvkm_debug(subdev, " 220: %08x %08x %08x %08x\n",
0143 timing[0], timing[1], timing[2], timing[3]);
0144 nvkm_debug(subdev, " 230: %08x %08x %08x %08x\n",
0145 timing[4], timing[5], timing[6], timing[7]);
0146 nvkm_debug(subdev, " 240: %08x\n", timing[8]);
0147 return 0;
0148 }
0149
0150 static int
0151 nv50_ram_timing_read(struct nv50_ram *ram, u32 *timing)
0152 {
0153 unsigned int i;
0154 struct nvbios_ramcfg *cfg = &ram->base.target.bios;
0155 struct nvkm_subdev *subdev = &ram->base.fb->subdev;
0156 struct nvkm_device *device = subdev->device;
0157
0158 for (i = 0; i <= 8; i++)
0159 timing[i] = nvkm_rd32(device, 0x100220 + (i * 4));
0160
0161
0162 cfg->timing_ver = 0x10;
0163 T(CL) = (timing[3] & 0xff) + 1;
0164
0165 switch (ram->base.type) {
0166 case NVKM_RAM_TYPE_DDR2:
0167 T(CWL) = T(CL) - 1;
0168 break;
0169 case NVKM_RAM_TYPE_GDDR3:
0170 T(CWL) = ((timing[2] & 0xff000000) >> 24) + 1;
0171 break;
0172 default:
0173 return -ENOSYS;
0174 }
0175
0176 T(WR) = ((timing[1] >> 24) & 0xff) - 1 - T(CWL);
0177
0178 return 0;
0179 }
0180 #undef T
0181
0182 static void
0183 nvkm_sddr2_dll_reset(struct nv50_ramseq *hwsq)
0184 {
0185 ram_mask(hwsq, mr[0], 0x100, 0x100);
0186 ram_mask(hwsq, mr[0], 0x100, 0x000);
0187 ram_nsec(hwsq, 24000);
0188 }
0189
0190 static void
0191 nv50_ram_gpio(struct nv50_ramseq *hwsq, u8 tag, u32 val)
0192 {
0193 struct nvkm_gpio *gpio = hwsq->base.subdev->device->gpio;
0194 struct dcb_gpio_func func;
0195 u32 reg, sh, gpio_val;
0196 int ret;
0197
0198 if (nvkm_gpio_get(gpio, 0, tag, DCB_GPIO_UNUSED) != val) {
0199 ret = nvkm_gpio_find(gpio, 0, tag, DCB_GPIO_UNUSED, &func);
0200 if (ret)
0201 return;
0202
0203 reg = func.line >> 3;
0204 sh = (func.line & 0x7) << 2;
0205 gpio_val = ram_rd32(hwsq, gpio[reg]);
0206
0207 if (gpio_val & (8 << sh))
0208 val = !val;
0209 if (!(func.log[1] & 1))
0210 val = !val;
0211
0212 ram_mask(hwsq, gpio[reg], (0x3 << sh), ((val | 0x2) << sh));
0213 ram_nsec(hwsq, 20000);
0214 }
0215 }
0216
0217 static int
0218 nv50_ram_calc(struct nvkm_ram *base, u32 freq)
0219 {
0220 struct nv50_ram *ram = nv50_ram(base);
0221 struct nv50_ramseq *hwsq = &ram->hwsq;
0222 struct nvkm_subdev *subdev = &ram->base.fb->subdev;
0223 struct nvkm_bios *bios = subdev->device->bios;
0224 struct nvbios_perfE perfE;
0225 struct nvbios_pll mpll;
0226 struct nvkm_ram_data *next;
0227 u8 ver, hdr, cnt, len, strap, size;
0228 u32 data;
0229 u32 r100da0, r004008, unk710, unk714, unk718, unk71c;
0230 int N1, M1, N2, M2, P;
0231 int ret, i;
0232 u32 timing[9];
0233
0234 next = &ram->base.target;
0235 next->freq = freq;
0236 ram->base.next = next;
0237
0238
0239 i = 0;
0240 do {
0241 data = nvbios_perfEp(bios, i++, &ver, &hdr, &cnt,
0242 &size, &perfE);
0243 if (!data || (ver < 0x25 || ver >= 0x40) ||
0244 (size < 2)) {
0245 nvkm_error(subdev, "invalid/missing perftab entry\n");
0246 return -EINVAL;
0247 }
0248 } while (perfE.memory < freq);
0249
0250 nvbios_rammapEp_from_perf(bios, data, hdr, &next->bios);
0251
0252
0253 strap = nvbios_ramcfg_index(subdev);
0254 if (strap >= cnt) {
0255 nvkm_error(subdev, "invalid ramcfg strap\n");
0256 return -EINVAL;
0257 }
0258
0259 data = nvbios_rammapSp_from_perf(bios, data + hdr, size, strap,
0260 &next->bios);
0261 if (!data) {
0262 nvkm_error(subdev, "invalid/missing rammap entry ");
0263 return -EINVAL;
0264 }
0265
0266
0267 if (next->bios.ramcfg_timing != 0xff) {
0268 data = nvbios_timingEp(bios, next->bios.ramcfg_timing,
0269 &ver, &hdr, &cnt, &len, &next->bios);
0270 if (!data || ver != 0x10 || hdr < 0x12) {
0271 nvkm_error(subdev, "invalid/missing timing entry "
0272 "%02x %04x %02x %02x\n",
0273 strap, data, ver, hdr);
0274 return -EINVAL;
0275 }
0276 nv50_ram_timing_calc(ram, timing);
0277 } else {
0278 nv50_ram_timing_read(ram, timing);
0279 }
0280
0281 ret = ram_init(hwsq, subdev);
0282 if (ret)
0283 return ret;
0284
0285
0286 ram->base.mr[0] = ram_rd32(hwsq, mr[0]);
0287 ram->base.mr[1] = ram_rd32(hwsq, mr[1]);
0288 ram->base.mr[2] = ram_rd32(hwsq, mr[2]);
0289
0290 switch (ram->base.type) {
0291 case NVKM_RAM_TYPE_GDDR3:
0292 ret = nvkm_gddr3_calc(&ram->base);
0293 break;
0294 default:
0295 ret = -ENOSYS;
0296 break;
0297 }
0298
0299 if (ret) {
0300 nvkm_error(subdev, "Could not calculate MR\n");
0301 return ret;
0302 }
0303
0304 if (subdev->device->chipset <= 0x96 && !next->bios.ramcfg_00_03_02)
0305 ram_mask(hwsq, 0x100710, 0x00000200, 0x00000000);
0306
0307
0308 ram_mask(hwsq, 0x100200, 0x00000800, 0x00000000);
0309
0310 ram_wait_vblank(hwsq);
0311 ram_wr32(hwsq, 0x611200, 0x00003300);
0312 ram_wr32(hwsq, 0x002504, 0x00000001);
0313 ram_nsec(hwsq, 8000);
0314 ram_setf(hwsq, 0x10, 0x00);
0315 ram_wait(hwsq, 0x00, 0x01);
0316 ram_nsec(hwsq, 2000);
0317
0318 if (next->bios.timing_10_ODT)
0319 nv50_ram_gpio(hwsq, 0x2e, 1);
0320
0321 ram_wr32(hwsq, 0x1002d4, 0x00000001);
0322 ram_wr32(hwsq, 0x1002d0, 0x00000001);
0323 ram_wr32(hwsq, 0x1002d0, 0x00000001);
0324 ram_wr32(hwsq, 0x100210, 0x00000000);
0325 ram_wr32(hwsq, 0x1002dc, 0x00000001);
0326
0327 ret = nvbios_pll_parse(bios, 0x004008, &mpll);
0328 mpll.vco2.max_freq = 0;
0329 if (ret >= 0) {
0330 ret = nv04_pll_calc(subdev, &mpll, freq,
0331 &N1, &M1, &N2, &M2, &P);
0332 if (ret <= 0)
0333 ret = -EINVAL;
0334 }
0335
0336 if (ret < 0)
0337 return ret;
0338
0339
0340 if (freq <= 750000) {
0341 r100da0 = 0x00000010;
0342 r004008 = 0x90000000;
0343 } else {
0344 r100da0 = 0x00000000;
0345 r004008 = 0x80000000;
0346 }
0347
0348 r004008 |= (mpll.bias_p << 19) | (P << 22) | (P << 16);
0349
0350 ram_mask(hwsq, 0x00c040, 0xc000c000, 0x0000c000);
0351
0352
0353 ram_mask(hwsq, 0x004008, 0x00004200, 0x00000200 |
0354 next->bios.rammap_00_16_40 << 14);
0355 ram_mask(hwsq, 0x00400c, 0x0000ffff, (N1 << 8) | M1);
0356 ram_mask(hwsq, 0x004008, 0x91ff0000, r004008);
0357
0358
0359 if (subdev->device->chipset >= 0x92)
0360 ram_wr32(hwsq, 0x100da0, r100da0);
0361
0362 nv50_ram_gpio(hwsq, 0x18, !next->bios.ramcfg_FBVDDQ);
0363 ram_nsec(hwsq, 64000);
0364 ram_nsec(hwsq, 32000);
0365
0366 ram_mask(hwsq, 0x004008, 0x00002200, 0x00002000);
0367
0368 ram_wr32(hwsq, 0x1002dc, 0x00000000);
0369 ram_wr32(hwsq, 0x1002d4, 0x00000001);
0370 ram_wr32(hwsq, 0x100210, 0x80000000);
0371
0372 ram_nsec(hwsq, 12000);
0373
0374 switch (ram->base.type) {
0375 case NVKM_RAM_TYPE_DDR2:
0376 ram_nuke(hwsq, mr[0]);
0377 ram_mask(hwsq, mr[0], 0x000, 0x000);
0378 break;
0379 case NVKM_RAM_TYPE_GDDR3:
0380 ram_nuke(hwsq, mr[1]);
0381 ram_wr32(hwsq, mr[1], ram->base.mr[1]);
0382 ram_nuke(hwsq, mr[0]);
0383 ram_wr32(hwsq, mr[0], ram->base.mr[0]);
0384 break;
0385 default:
0386 break;
0387 }
0388
0389 ram_mask(hwsq, timing[3], 0xffffffff, timing[3]);
0390 ram_mask(hwsq, timing[1], 0xffffffff, timing[1]);
0391 ram_mask(hwsq, timing[6], 0xffffffff, timing[6]);
0392 ram_mask(hwsq, timing[7], 0xffffffff, timing[7]);
0393 ram_mask(hwsq, timing[8], 0xffffffff, timing[8]);
0394 ram_mask(hwsq, timing[0], 0xffffffff, timing[0]);
0395 ram_mask(hwsq, timing[2], 0xffffffff, timing[2]);
0396 ram_mask(hwsq, timing[4], 0xffffffff, timing[4]);
0397 ram_mask(hwsq, timing[5], 0xffffffff, timing[5]);
0398
0399 if (!next->bios.ramcfg_00_03_02)
0400 ram_mask(hwsq, 0x10021c, 0x00010000, 0x00000000);
0401 ram_mask(hwsq, 0x100200, 0x00001000, !next->bios.ramcfg_00_04_02 << 12);
0402
0403
0404 unk710 = ram_rd32(hwsq, 0x100710) & ~0x00000100;
0405 unk714 = ram_rd32(hwsq, 0x100714) & ~0xf0000020;
0406 unk718 = ram_rd32(hwsq, 0x100718) & ~0x00000100;
0407 unk71c = ram_rd32(hwsq, 0x10071c) & ~0x00000100;
0408 if (subdev->device->chipset <= 0x96) {
0409 unk710 &= ~0x0000006e;
0410 unk714 &= ~0x00000100;
0411
0412 if (!next->bios.ramcfg_00_03_08)
0413 unk710 |= 0x00000060;
0414 if (!next->bios.ramcfg_FBVDDQ)
0415 unk714 |= 0x00000100;
0416 if ( next->bios.ramcfg_00_04_04)
0417 unk710 |= 0x0000000e;
0418 } else {
0419 unk710 &= ~0x00000001;
0420
0421 if (!next->bios.ramcfg_00_03_08)
0422 unk710 |= 0x00000001;
0423 }
0424
0425 if ( next->bios.ramcfg_00_03_01)
0426 unk71c |= 0x00000100;
0427 if ( next->bios.ramcfg_00_03_02)
0428 unk710 |= 0x00000100;
0429 if (!next->bios.ramcfg_00_03_08)
0430 unk714 |= 0x00000020;
0431 if ( next->bios.ramcfg_00_04_04)
0432 unk714 |= 0x70000000;
0433 if ( next->bios.ramcfg_00_04_20)
0434 unk718 |= 0x00000100;
0435
0436 ram_mask(hwsq, 0x100714, 0xffffffff, unk714);
0437 ram_mask(hwsq, 0x10071c, 0xffffffff, unk71c);
0438 ram_mask(hwsq, 0x100718, 0xffffffff, unk718);
0439 ram_mask(hwsq, 0x100710, 0xffffffff, unk710);
0440
0441
0442
0443 if (next->bios.rammap_00_16_20) {
0444 ram_wr32(hwsq, 0x1005a0, next->bios.ramcfg_00_07 << 16 |
0445 next->bios.ramcfg_00_06 << 8 |
0446 next->bios.ramcfg_00_05);
0447 ram_wr32(hwsq, 0x1005a4, next->bios.ramcfg_00_09 << 8 |
0448 next->bios.ramcfg_00_08);
0449 ram_mask(hwsq, 0x10053c, 0x00001000, 0x00000000);
0450 } else {
0451 ram_mask(hwsq, 0x10053c, 0x00001000, 0x00001000);
0452 }
0453 ram_mask(hwsq, mr[1], 0xffffffff, ram->base.mr[1]);
0454
0455 if (!next->bios.timing_10_ODT)
0456 nv50_ram_gpio(hwsq, 0x2e, 0);
0457
0458
0459 if (!next->bios.ramcfg_DLLoff)
0460 nvkm_sddr2_dll_reset(hwsq);
0461
0462 ram_setf(hwsq, 0x10, 0x01);
0463 ram_wait(hwsq, 0x00, 0x00);
0464 ram_wr32(hwsq, 0x611200, 0x00003330);
0465 ram_wr32(hwsq, 0x002504, 0x00000000);
0466
0467 if (next->bios.rammap_00_17_02)
0468 ram_mask(hwsq, 0x100200, 0x00000800, 0x00000800);
0469 if (!next->bios.rammap_00_16_40)
0470 ram_mask(hwsq, 0x004008, 0x00004000, 0x00000000);
0471 if (next->bios.ramcfg_00_03_02)
0472 ram_mask(hwsq, 0x10021c, 0x00010000, 0x00010000);
0473 if (subdev->device->chipset <= 0x96 && next->bios.ramcfg_00_03_02)
0474 ram_mask(hwsq, 0x100710, 0x00000200, 0x00000200);
0475
0476 return 0;
0477 }
0478
0479 static int
0480 nv50_ram_prog(struct nvkm_ram *base)
0481 {
0482 struct nv50_ram *ram = nv50_ram(base);
0483 struct nvkm_device *device = ram->base.fb->subdev.device;
0484 ram_exec(&ram->hwsq, nvkm_boolopt(device->cfgopt, "NvMemExec", true));
0485 return 0;
0486 }
0487
0488 static void
0489 nv50_ram_tidy(struct nvkm_ram *base)
0490 {
0491 struct nv50_ram *ram = nv50_ram(base);
0492 ram_exec(&ram->hwsq, false);
0493 }
0494
0495 static const struct nvkm_ram_func
0496 nv50_ram_func = {
0497 .calc = nv50_ram_calc,
0498 .prog = nv50_ram_prog,
0499 .tidy = nv50_ram_tidy,
0500 };
0501
0502 static u32
0503 nv50_fb_vram_rblock(struct nvkm_ram *ram)
0504 {
0505 struct nvkm_subdev *subdev = &ram->fb->subdev;
0506 struct nvkm_device *device = subdev->device;
0507 int colbits, rowbitsa, rowbitsb, banks;
0508 u64 rowsize, predicted;
0509 u32 r0, r4, rt, rblock_size;
0510
0511 r0 = nvkm_rd32(device, 0x100200);
0512 r4 = nvkm_rd32(device, 0x100204);
0513 rt = nvkm_rd32(device, 0x100250);
0514 nvkm_debug(subdev, "memcfg %08x %08x %08x %08x\n",
0515 r0, r4, rt, nvkm_rd32(device, 0x001540));
0516
0517 colbits = (r4 & 0x0000f000) >> 12;
0518 rowbitsa = ((r4 & 0x000f0000) >> 16) + 8;
0519 rowbitsb = ((r4 & 0x00f00000) >> 20) + 8;
0520 banks = 1 << (((r4 & 0x03000000) >> 24) + 2);
0521
0522 rowsize = ram->parts * banks * (1 << colbits) * 8;
0523 predicted = rowsize << rowbitsa;
0524 if (r0 & 0x00000004)
0525 predicted += rowsize << rowbitsb;
0526
0527 if (predicted != ram->size) {
0528 nvkm_warn(subdev, "memory controller reports %d MiB VRAM\n",
0529 (u32)(ram->size >> 20));
0530 }
0531
0532 rblock_size = rowsize;
0533 if (rt & 1)
0534 rblock_size *= 3;
0535
0536 nvkm_debug(subdev, "rblock %d bytes\n", rblock_size);
0537 return rblock_size;
0538 }
0539
0540 int
0541 nv50_ram_ctor(const struct nvkm_ram_func *func,
0542 struct nvkm_fb *fb, struct nvkm_ram *ram)
0543 {
0544 struct nvkm_device *device = fb->subdev.device;
0545 struct nvkm_bios *bios = device->bios;
0546 const u32 rsvd_head = ( 256 * 1024);
0547 const u32 rsvd_tail = (1024 * 1024);
0548 u64 size = nvkm_rd32(device, 0x10020c);
0549 enum nvkm_ram_type type = NVKM_RAM_TYPE_UNKNOWN;
0550 int ret;
0551
0552 switch (nvkm_rd32(device, 0x100714) & 0x00000007) {
0553 case 0: type = NVKM_RAM_TYPE_DDR1; break;
0554 case 1:
0555 if (nvkm_fb_bios_memtype(bios) == NVKM_RAM_TYPE_DDR3)
0556 type = NVKM_RAM_TYPE_DDR3;
0557 else
0558 type = NVKM_RAM_TYPE_DDR2;
0559 break;
0560 case 2: type = NVKM_RAM_TYPE_GDDR3; break;
0561 case 3: type = NVKM_RAM_TYPE_GDDR4; break;
0562 case 4: type = NVKM_RAM_TYPE_GDDR5; break;
0563 default:
0564 break;
0565 }
0566
0567 size = (size & 0x000000ff) << 32 | (size & 0xffffff00);
0568
0569 ret = nvkm_ram_ctor(func, fb, type, size, ram);
0570 if (ret)
0571 return ret;
0572
0573 ram->part_mask = (nvkm_rd32(device, 0x001540) & 0x00ff0000) >> 16;
0574 ram->parts = hweight8(ram->part_mask);
0575 ram->ranks = (nvkm_rd32(device, 0x100200) & 0x4) ? 2 : 1;
0576 nvkm_mm_fini(&ram->vram);
0577
0578 return nvkm_mm_init(&ram->vram, NVKM_RAM_MM_NORMAL,
0579 rsvd_head >> NVKM_RAM_MM_SHIFT,
0580 (size - rsvd_head - rsvd_tail) >> NVKM_RAM_MM_SHIFT,
0581 nv50_fb_vram_rblock(ram) >> NVKM_RAM_MM_SHIFT);
0582 }
0583
0584 int
0585 nv50_ram_new(struct nvkm_fb *fb, struct nvkm_ram **pram)
0586 {
0587 struct nv50_ram *ram;
0588 int ret, i;
0589
0590 if (!(ram = kzalloc(sizeof(*ram), GFP_KERNEL)))
0591 return -ENOMEM;
0592 *pram = &ram->base;
0593
0594 ret = nv50_ram_ctor(&nv50_ram_func, fb, &ram->base);
0595 if (ret)
0596 return ret;
0597
0598 ram->hwsq.r_0x002504 = hwsq_reg(0x002504);
0599 ram->hwsq.r_0x00c040 = hwsq_reg(0x00c040);
0600 ram->hwsq.r_0x004008 = hwsq_reg(0x004008);
0601 ram->hwsq.r_0x00400c = hwsq_reg(0x00400c);
0602 ram->hwsq.r_0x100200 = hwsq_reg(0x100200);
0603 ram->hwsq.r_0x100210 = hwsq_reg(0x100210);
0604 ram->hwsq.r_0x10021c = hwsq_reg(0x10021c);
0605 ram->hwsq.r_0x1002d0 = hwsq_reg(0x1002d0);
0606 ram->hwsq.r_0x1002d4 = hwsq_reg(0x1002d4);
0607 ram->hwsq.r_0x1002dc = hwsq_reg(0x1002dc);
0608 ram->hwsq.r_0x10053c = hwsq_reg(0x10053c);
0609 ram->hwsq.r_0x1005a0 = hwsq_reg(0x1005a0);
0610 ram->hwsq.r_0x1005a4 = hwsq_reg(0x1005a4);
0611 ram->hwsq.r_0x100710 = hwsq_reg(0x100710);
0612 ram->hwsq.r_0x100714 = hwsq_reg(0x100714);
0613 ram->hwsq.r_0x100718 = hwsq_reg(0x100718);
0614 ram->hwsq.r_0x10071c = hwsq_reg(0x10071c);
0615 ram->hwsq.r_0x100da0 = hwsq_stride(0x100da0, 4, ram->base.part_mask);
0616 ram->hwsq.r_0x100e20 = hwsq_reg(0x100e20);
0617 ram->hwsq.r_0x100e24 = hwsq_reg(0x100e24);
0618 ram->hwsq.r_0x611200 = hwsq_reg(0x611200);
0619
0620 for (i = 0; i < 9; i++)
0621 ram->hwsq.r_timing[i] = hwsq_reg(0x100220 + (i * 0x04));
0622
0623 if (ram->base.ranks > 1) {
0624 ram->hwsq.r_mr[0] = hwsq_reg2(0x1002c0, 0x1002c8);
0625 ram->hwsq.r_mr[1] = hwsq_reg2(0x1002c4, 0x1002cc);
0626 ram->hwsq.r_mr[2] = hwsq_reg2(0x1002e0, 0x1002e8);
0627 ram->hwsq.r_mr[3] = hwsq_reg2(0x1002e4, 0x1002ec);
0628 } else {
0629 ram->hwsq.r_mr[0] = hwsq_reg(0x1002c0);
0630 ram->hwsq.r_mr[1] = hwsq_reg(0x1002c4);
0631 ram->hwsq.r_mr[2] = hwsq_reg(0x1002e0);
0632 ram->hwsq.r_mr[3] = hwsq_reg(0x1002e4);
0633 }
0634
0635 ram->hwsq.r_gpio[0] = hwsq_reg(0x00e104);
0636 ram->hwsq.r_gpio[1] = hwsq_reg(0x00e108);
0637 ram->hwsq.r_gpio[2] = hwsq_reg(0x00e120);
0638 ram->hwsq.r_gpio[3] = hwsq_reg(0x00e124);
0639
0640 return 0;
0641 }