0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024 #include "disp.h"
0025 #include "atom.h"
0026 #include "core.h"
0027 #include "head.h"
0028 #include "wndw.h"
0029 #include "handles.h"
0030
0031 #include <linux/dma-mapping.h>
0032 #include <linux/hdmi.h>
0033 #include <linux/component.h>
0034 #include <linux/iopoll.h>
0035
0036 #include <drm/display/drm_dp_helper.h>
0037 #include <drm/display/drm_scdc_helper.h>
0038 #include <drm/drm_atomic.h>
0039 #include <drm/drm_atomic_helper.h>
0040 #include <drm/drm_edid.h>
0041 #include <drm/drm_fb_helper.h>
0042 #include <drm/drm_plane_helper.h>
0043 #include <drm/drm_probe_helper.h>
0044 #include <drm/drm_vblank.h>
0045
0046 #include <nvif/push507c.h>
0047
0048 #include <nvif/class.h>
0049 #include <nvif/cl0002.h>
0050 #include <nvif/cl5070.h>
0051 #include <nvif/event.h>
0052 #include <nvif/if0014.h>
0053 #include <nvif/timer.h>
0054
0055 #include <nvhw/class/cl507c.h>
0056 #include <nvhw/class/cl507d.h>
0057 #include <nvhw/class/cl837d.h>
0058 #include <nvhw/class/cl887d.h>
0059 #include <nvhw/class/cl907d.h>
0060 #include <nvhw/class/cl917d.h>
0061
0062 #include "nouveau_drv.h"
0063 #include "nouveau_dma.h"
0064 #include "nouveau_gem.h"
0065 #include "nouveau_connector.h"
0066 #include "nouveau_encoder.h"
0067 #include "nouveau_fence.h"
0068 #include "nouveau_fbcon.h"
0069
0070 #include <subdev/bios/dp.h>
0071
0072
0073
0074
0075
0076 static int
0077 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
0078 const s32 *oclass, u8 head, void *data, u32 size,
0079 struct nv50_chan *chan)
0080 {
0081 struct nvif_sclass *sclass;
0082 int ret, i, n;
0083
0084 chan->device = device;
0085
0086 ret = n = nvif_object_sclass_get(disp, &sclass);
0087 if (ret < 0)
0088 return ret;
0089
0090 while (oclass[0]) {
0091 for (i = 0; i < n; i++) {
0092 if (sclass[i].oclass == oclass[0]) {
0093 ret = nvif_object_ctor(disp, "kmsChan", 0,
0094 oclass[0], data, size,
0095 &chan->user);
0096 if (ret == 0)
0097 nvif_object_map(&chan->user, NULL, 0);
0098 nvif_object_sclass_put(&sclass);
0099 return ret;
0100 }
0101 }
0102 oclass++;
0103 }
0104
0105 nvif_object_sclass_put(&sclass);
0106 return -ENOSYS;
0107 }
0108
0109 static void
0110 nv50_chan_destroy(struct nv50_chan *chan)
0111 {
0112 nvif_object_dtor(&chan->user);
0113 }
0114
0115
0116
0117
0118
0119 void
0120 nv50_dmac_destroy(struct nv50_dmac *dmac)
0121 {
0122 nvif_object_dtor(&dmac->vram);
0123 nvif_object_dtor(&dmac->sync);
0124
0125 nv50_chan_destroy(&dmac->base);
0126
0127 nvif_mem_dtor(&dmac->_push.mem);
0128 }
0129
0130 static void
0131 nv50_dmac_kick(struct nvif_push *push)
0132 {
0133 struct nv50_dmac *dmac = container_of(push, typeof(*dmac), _push);
0134
0135 dmac->cur = push->cur - (u32 *)dmac->_push.mem.object.map.ptr;
0136 if (dmac->put != dmac->cur) {
0137
0138
0139
0140 if (dmac->push->mem.type & NVIF_MEM_VRAM) {
0141 struct nvif_device *device = dmac->base.device;
0142 nvif_wr32(&device->object, 0x070000, 0x00000001);
0143 nvif_msec(device, 2000,
0144 if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002))
0145 break;
0146 );
0147 }
0148
0149 NVIF_WV32(&dmac->base.user, NV507C, PUT, PTR, dmac->cur);
0150 dmac->put = dmac->cur;
0151 }
0152
0153 push->bgn = push->cur;
0154 }
0155
0156 static int
0157 nv50_dmac_free(struct nv50_dmac *dmac)
0158 {
0159 u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR);
0160 if (get > dmac->cur)
0161 return get - dmac->cur - 5;
0162 return dmac->max - dmac->cur;
0163 }
0164
0165 static int
0166 nv50_dmac_wind(struct nv50_dmac *dmac)
0167 {
0168
0169
0170
0171 u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR);
0172 if (get == 0) {
0173
0174 if (dmac->put == 0)
0175 nv50_dmac_kick(dmac->push);
0176
0177 if (nvif_msec(dmac->base.device, 2000,
0178 if (NVIF_TV32(&dmac->base.user, NV507C, GET, PTR, >, 0))
0179 break;
0180 ) < 0)
0181 return -ETIMEDOUT;
0182 }
0183
0184 PUSH_RSVD(dmac->push, PUSH_JUMP(dmac->push, 0));
0185 dmac->cur = 0;
0186 return 0;
0187 }
0188
0189 static int
0190 nv50_dmac_wait(struct nvif_push *push, u32 size)
0191 {
0192 struct nv50_dmac *dmac = container_of(push, typeof(*dmac), _push);
0193 int free;
0194
0195 if (WARN_ON(size > dmac->max))
0196 return -EINVAL;
0197
0198 dmac->cur = push->cur - (u32 *)dmac->_push.mem.object.map.ptr;
0199 if (dmac->cur + size >= dmac->max) {
0200 int ret = nv50_dmac_wind(dmac);
0201 if (ret)
0202 return ret;
0203
0204 push->cur = dmac->_push.mem.object.map.ptr;
0205 push->cur = push->cur + dmac->cur;
0206 nv50_dmac_kick(push);
0207 }
0208
0209 if (nvif_msec(dmac->base.device, 2000,
0210 if ((free = nv50_dmac_free(dmac)) >= size)
0211 break;
0212 ) < 0) {
0213 WARN_ON(1);
0214 return -ETIMEDOUT;
0215 }
0216
0217 push->bgn = dmac->_push.mem.object.map.ptr;
0218 push->bgn = push->bgn + dmac->cur;
0219 push->cur = push->bgn;
0220 push->end = push->cur + free;
0221 return 0;
0222 }
0223
0224 MODULE_PARM_DESC(kms_vram_pushbuf, "Place EVO/NVD push buffers in VRAM (default: auto)");
0225 static int nv50_dmac_vram_pushbuf = -1;
0226 module_param_named(kms_vram_pushbuf, nv50_dmac_vram_pushbuf, int, 0400);
0227
0228 int
0229 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
0230 const s32 *oclass, u8 head, void *data, u32 size, s64 syncbuf,
0231 struct nv50_dmac *dmac)
0232 {
0233 struct nouveau_cli *cli = (void *)device->object.client;
0234 struct nvif_disp_chan_v0 *args = data;
0235 u8 type = NVIF_MEM_COHERENT;
0236 int ret;
0237
0238 mutex_init(&dmac->lock);
0239
0240
0241
0242
0243
0244
0245
0246
0247
0248
0249 if ((nv50_dmac_vram_pushbuf > 0) ||
0250 (nv50_dmac_vram_pushbuf < 0 && device->info.family == NV_DEVICE_INFO_V0_PASCAL))
0251 type |= NVIF_MEM_VRAM;
0252
0253 ret = nvif_mem_ctor_map(&cli->mmu, "kmsChanPush", type, 0x1000,
0254 &dmac->_push.mem);
0255 if (ret)
0256 return ret;
0257
0258 dmac->ptr = dmac->_push.mem.object.map.ptr;
0259 dmac->_push.wait = nv50_dmac_wait;
0260 dmac->_push.kick = nv50_dmac_kick;
0261 dmac->push = &dmac->_push;
0262 dmac->push->bgn = dmac->_push.mem.object.map.ptr;
0263 dmac->push->cur = dmac->push->bgn;
0264 dmac->push->end = dmac->push->bgn;
0265 dmac->max = 0x1000/4 - 1;
0266
0267
0268
0269
0270 if (disp->oclass < GV100_DISP)
0271 dmac->max -= 12;
0272
0273 args->pushbuf = nvif_handle(&dmac->_push.mem.object);
0274
0275 ret = nv50_chan_create(device, disp, oclass, head, data, size,
0276 &dmac->base);
0277 if (ret)
0278 return ret;
0279
0280 if (syncbuf < 0)
0281 return 0;
0282
0283 ret = nvif_object_ctor(&dmac->base.user, "kmsSyncCtxDma", NV50_DISP_HANDLE_SYNCBUF,
0284 NV_DMA_IN_MEMORY,
0285 &(struct nv_dma_v0) {
0286 .target = NV_DMA_V0_TARGET_VRAM,
0287 .access = NV_DMA_V0_ACCESS_RDWR,
0288 .start = syncbuf + 0x0000,
0289 .limit = syncbuf + 0x0fff,
0290 }, sizeof(struct nv_dma_v0),
0291 &dmac->sync);
0292 if (ret)
0293 return ret;
0294
0295 ret = nvif_object_ctor(&dmac->base.user, "kmsVramCtxDma", NV50_DISP_HANDLE_VRAM,
0296 NV_DMA_IN_MEMORY,
0297 &(struct nv_dma_v0) {
0298 .target = NV_DMA_V0_TARGET_VRAM,
0299 .access = NV_DMA_V0_ACCESS_RDWR,
0300 .start = 0,
0301 .limit = device->info.ram_user - 1,
0302 }, sizeof(struct nv_dma_v0),
0303 &dmac->vram);
0304 if (ret)
0305 return ret;
0306
0307 return ret;
0308 }
0309
0310
0311
0312
0313 static void
0314 nv50_outp_dump_caps(struct nouveau_drm *drm,
0315 struct nouveau_encoder *outp)
0316 {
0317 NV_DEBUG(drm, "%s caps: dp_interlace=%d\n",
0318 outp->base.base.name, outp->caps.dp_interlace);
0319 }
0320
0321 static void
0322 nv50_outp_release(struct nouveau_encoder *nv_encoder)
0323 {
0324 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
0325 struct {
0326 struct nv50_disp_mthd_v1 base;
0327 } args = {
0328 .base.version = 1,
0329 .base.method = NV50_DISP_MTHD_V1_RELEASE,
0330 .base.hasht = nv_encoder->dcb->hasht,
0331 .base.hashm = nv_encoder->dcb->hashm,
0332 };
0333
0334 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
0335 nv_encoder->or = -1;
0336 nv_encoder->link = 0;
0337 }
0338
0339 static int
0340 nv50_outp_acquire(struct nouveau_encoder *nv_encoder, bool hda)
0341 {
0342 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
0343 struct nv50_disp *disp = nv50_disp(drm->dev);
0344 struct {
0345 struct nv50_disp_mthd_v1 base;
0346 struct nv50_disp_acquire_v0 info;
0347 } args = {
0348 .base.version = 1,
0349 .base.method = NV50_DISP_MTHD_V1_ACQUIRE,
0350 .base.hasht = nv_encoder->dcb->hasht,
0351 .base.hashm = nv_encoder->dcb->hashm,
0352 .info.hda = hda,
0353 };
0354 int ret;
0355
0356 ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
0357 if (ret) {
0358 NV_ERROR(drm, "error acquiring output path: %d\n", ret);
0359 return ret;
0360 }
0361
0362 nv_encoder->or = args.info.or;
0363 nv_encoder->link = args.info.link;
0364 return 0;
0365 }
0366
0367 static int
0368 nv50_outp_atomic_check_view(struct drm_encoder *encoder,
0369 struct drm_crtc_state *crtc_state,
0370 struct drm_connector_state *conn_state,
0371 struct drm_display_mode *native_mode)
0372 {
0373 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
0374 struct drm_display_mode *mode = &crtc_state->mode;
0375 struct drm_connector *connector = conn_state->connector;
0376 struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
0377 struct nouveau_drm *drm = nouveau_drm(encoder->dev);
0378
0379 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
0380 asyc->scaler.full = false;
0381 if (!native_mode)
0382 return 0;
0383
0384 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
0385 switch (connector->connector_type) {
0386 case DRM_MODE_CONNECTOR_LVDS:
0387 case DRM_MODE_CONNECTOR_eDP:
0388
0389
0390
0391
0392 if (mode->hdisplay == native_mode->hdisplay &&
0393 mode->vdisplay == native_mode->vdisplay &&
0394 mode->type & DRM_MODE_TYPE_DRIVER)
0395 break;
0396 mode = native_mode;
0397 asyc->scaler.full = true;
0398 break;
0399 default:
0400 break;
0401 }
0402 } else {
0403 mode = native_mode;
0404 }
0405
0406 if (!drm_mode_equal(adjusted_mode, mode)) {
0407 drm_mode_copy(adjusted_mode, mode);
0408 crtc_state->mode_changed = true;
0409 }
0410
0411 return 0;
0412 }
0413
0414 static int
0415 nv50_outp_atomic_check(struct drm_encoder *encoder,
0416 struct drm_crtc_state *crtc_state,
0417 struct drm_connector_state *conn_state)
0418 {
0419 struct drm_connector *connector = conn_state->connector;
0420 struct nouveau_connector *nv_connector = nouveau_connector(connector);
0421 struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
0422 int ret;
0423
0424 ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
0425 nv_connector->native_mode);
0426 if (ret)
0427 return ret;
0428
0429 if (crtc_state->mode_changed || crtc_state->connectors_changed)
0430 asyh->or.bpc = connector->display_info.bpc;
0431
0432 return 0;
0433 }
0434
0435 struct nouveau_connector *
0436 nv50_outp_get_new_connector(struct drm_atomic_state *state, struct nouveau_encoder *outp)
0437 {
0438 struct drm_connector *connector;
0439 struct drm_connector_state *connector_state;
0440 struct drm_encoder *encoder = to_drm_encoder(outp);
0441 int i;
0442
0443 for_each_new_connector_in_state(state, connector, connector_state, i) {
0444 if (connector_state->best_encoder == encoder)
0445 return nouveau_connector(connector);
0446 }
0447
0448 return NULL;
0449 }
0450
0451 struct nouveau_connector *
0452 nv50_outp_get_old_connector(struct drm_atomic_state *state, struct nouveau_encoder *outp)
0453 {
0454 struct drm_connector *connector;
0455 struct drm_connector_state *connector_state;
0456 struct drm_encoder *encoder = to_drm_encoder(outp);
0457 int i;
0458
0459 for_each_old_connector_in_state(state, connector, connector_state, i) {
0460 if (connector_state->best_encoder == encoder)
0461 return nouveau_connector(connector);
0462 }
0463
0464 return NULL;
0465 }
0466
0467 static struct nouveau_crtc *
0468 nv50_outp_get_new_crtc(const struct drm_atomic_state *state, const struct nouveau_encoder *outp)
0469 {
0470 struct drm_crtc *crtc;
0471 struct drm_crtc_state *crtc_state;
0472 const u32 mask = drm_encoder_mask(&outp->base.base);
0473 int i;
0474
0475 for_each_new_crtc_in_state(state, crtc, crtc_state, i) {
0476 if (crtc_state->encoder_mask & mask)
0477 return nouveau_crtc(crtc);
0478 }
0479
0480 return NULL;
0481 }
0482
0483
0484
0485
0486 static void
0487 nv50_dac_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
0488 {
0489 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0490 struct nv50_core *core = nv50_disp(encoder->dev)->core;
0491 const u32 ctrl = NVDEF(NV507D, DAC_SET_CONTROL, OWNER, NONE);
0492
0493 core->func->dac->ctrl(core, nv_encoder->or, ctrl, NULL);
0494 nv_encoder->crtc = NULL;
0495 nv50_outp_release(nv_encoder);
0496 }
0497
0498 static void
0499 nv50_dac_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
0500 {
0501 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0502 struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
0503 struct nv50_head_atom *asyh =
0504 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
0505 struct nv50_core *core = nv50_disp(encoder->dev)->core;
0506 u32 ctrl = 0;
0507
0508 switch (nv_crtc->index) {
0509 case 0: ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, OWNER, HEAD0); break;
0510 case 1: ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, OWNER, HEAD1); break;
0511 case 2: ctrl |= NVDEF(NV907D, DAC_SET_CONTROL, OWNER_MASK, HEAD2); break;
0512 case 3: ctrl |= NVDEF(NV907D, DAC_SET_CONTROL, OWNER_MASK, HEAD3); break;
0513 default:
0514 WARN_ON(1);
0515 break;
0516 }
0517
0518 ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, PROTOCOL, RGB_CRT);
0519
0520 nv50_outp_acquire(nv_encoder, false);
0521
0522 core->func->dac->ctrl(core, nv_encoder->or, ctrl, asyh);
0523 asyh->or.depth = 0;
0524
0525 nv_encoder->crtc = &nv_crtc->base;
0526 }
0527
0528 static enum drm_connector_status
0529 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
0530 {
0531 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0532 u32 loadval;
0533 int ret;
0534
0535 loadval = nouveau_drm(encoder->dev)->vbios.dactestval;
0536 if (loadval == 0)
0537 loadval = 340;
0538
0539 ret = nvif_outp_load_detect(&nv_encoder->outp, loadval);
0540 if (ret <= 0)
0541 return connector_status_disconnected;
0542
0543 return connector_status_connected;
0544 }
0545
0546 static const struct drm_encoder_helper_funcs
0547 nv50_dac_help = {
0548 .atomic_check = nv50_outp_atomic_check,
0549 .atomic_enable = nv50_dac_atomic_enable,
0550 .atomic_disable = nv50_dac_atomic_disable,
0551 .detect = nv50_dac_detect
0552 };
0553
0554 static void
0555 nv50_dac_destroy(struct drm_encoder *encoder)
0556 {
0557 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0558
0559 nvif_outp_dtor(&nv_encoder->outp);
0560
0561 drm_encoder_cleanup(encoder);
0562 kfree(encoder);
0563 }
0564
0565 static const struct drm_encoder_funcs
0566 nv50_dac_func = {
0567 .destroy = nv50_dac_destroy,
0568 };
0569
0570 static int
0571 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
0572 {
0573 struct nouveau_drm *drm = nouveau_drm(connector->dev);
0574 struct nv50_disp *disp = nv50_disp(connector->dev);
0575 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
0576 struct nvkm_i2c_bus *bus;
0577 struct nouveau_encoder *nv_encoder;
0578 struct drm_encoder *encoder;
0579 int type = DRM_MODE_ENCODER_DAC;
0580
0581 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
0582 if (!nv_encoder)
0583 return -ENOMEM;
0584 nv_encoder->dcb = dcbe;
0585
0586 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
0587 if (bus)
0588 nv_encoder->i2c = &bus->i2c;
0589
0590 encoder = to_drm_encoder(nv_encoder);
0591 encoder->possible_crtcs = dcbe->heads;
0592 encoder->possible_clones = 0;
0593 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
0594 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
0595 drm_encoder_helper_add(encoder, &nv50_dac_help);
0596
0597 drm_connector_attach_encoder(connector, encoder);
0598 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp);
0599 }
0600
0601
0602
0603
0604 static void
0605 nv50_audio_component_eld_notify(struct drm_audio_component *acomp, int port,
0606 int dev_id)
0607 {
0608 if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify)
0609 acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr,
0610 port, dev_id);
0611 }
0612
0613 static int
0614 nv50_audio_component_get_eld(struct device *kdev, int port, int dev_id,
0615 bool *enabled, unsigned char *buf, int max_bytes)
0616 {
0617 struct drm_device *drm_dev = dev_get_drvdata(kdev);
0618 struct nouveau_drm *drm = nouveau_drm(drm_dev);
0619 struct drm_encoder *encoder;
0620 struct nouveau_encoder *nv_encoder;
0621 struct nouveau_crtc *nv_crtc;
0622 int ret = 0;
0623
0624 *enabled = false;
0625
0626 mutex_lock(&drm->audio.lock);
0627
0628 drm_for_each_encoder(encoder, drm->dev) {
0629 struct nouveau_connector *nv_connector = NULL;
0630
0631 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST)
0632 continue;
0633
0634 nv_encoder = nouveau_encoder(encoder);
0635 nv_connector = nouveau_connector(nv_encoder->audio.connector);
0636 nv_crtc = nouveau_crtc(nv_encoder->crtc);
0637
0638 if (!nv_crtc || nv_encoder->or != port || nv_crtc->index != dev_id)
0639 continue;
0640
0641 *enabled = nv_encoder->audio.enabled;
0642 if (*enabled) {
0643 ret = drm_eld_size(nv_connector->base.eld);
0644 memcpy(buf, nv_connector->base.eld,
0645 min(max_bytes, ret));
0646 }
0647 break;
0648 }
0649
0650 mutex_unlock(&drm->audio.lock);
0651
0652 return ret;
0653 }
0654
0655 static const struct drm_audio_component_ops nv50_audio_component_ops = {
0656 .get_eld = nv50_audio_component_get_eld,
0657 };
0658
0659 static int
0660 nv50_audio_component_bind(struct device *kdev, struct device *hda_kdev,
0661 void *data)
0662 {
0663 struct drm_device *drm_dev = dev_get_drvdata(kdev);
0664 struct nouveau_drm *drm = nouveau_drm(drm_dev);
0665 struct drm_audio_component *acomp = data;
0666
0667 if (WARN_ON(!device_link_add(hda_kdev, kdev, DL_FLAG_STATELESS)))
0668 return -ENOMEM;
0669
0670 drm_modeset_lock_all(drm_dev);
0671 acomp->ops = &nv50_audio_component_ops;
0672 acomp->dev = kdev;
0673 drm->audio.component = acomp;
0674 drm_modeset_unlock_all(drm_dev);
0675 return 0;
0676 }
0677
0678 static void
0679 nv50_audio_component_unbind(struct device *kdev, struct device *hda_kdev,
0680 void *data)
0681 {
0682 struct drm_device *drm_dev = dev_get_drvdata(kdev);
0683 struct nouveau_drm *drm = nouveau_drm(drm_dev);
0684 struct drm_audio_component *acomp = data;
0685
0686 drm_modeset_lock_all(drm_dev);
0687 drm->audio.component = NULL;
0688 acomp->ops = NULL;
0689 acomp->dev = NULL;
0690 drm_modeset_unlock_all(drm_dev);
0691 }
0692
0693 static const struct component_ops nv50_audio_component_bind_ops = {
0694 .bind = nv50_audio_component_bind,
0695 .unbind = nv50_audio_component_unbind,
0696 };
0697
0698 static void
0699 nv50_audio_component_init(struct nouveau_drm *drm)
0700 {
0701 if (component_add(drm->dev->dev, &nv50_audio_component_bind_ops))
0702 return;
0703
0704 drm->audio.component_registered = true;
0705 mutex_init(&drm->audio.lock);
0706 }
0707
0708 static void
0709 nv50_audio_component_fini(struct nouveau_drm *drm)
0710 {
0711 if (!drm->audio.component_registered)
0712 return;
0713
0714 component_del(drm->dev->dev, &nv50_audio_component_bind_ops);
0715 drm->audio.component_registered = false;
0716 mutex_destroy(&drm->audio.lock);
0717 }
0718
0719
0720
0721
0722 static void
0723 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
0724 {
0725 struct nouveau_drm *drm = nouveau_drm(encoder->dev);
0726 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0727 struct nv50_disp *disp = nv50_disp(encoder->dev);
0728 struct {
0729 struct nv50_disp_mthd_v1 base;
0730 struct nv50_disp_sor_hda_eld_v0 eld;
0731 } args = {
0732 .base.version = 1,
0733 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
0734 .base.hasht = nv_encoder->dcb->hasht,
0735 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
0736 (0x0100 << nv_crtc->index),
0737 };
0738
0739 mutex_lock(&drm->audio.lock);
0740 if (nv_encoder->audio.enabled) {
0741 nv_encoder->audio.enabled = false;
0742 nv_encoder->audio.connector = NULL;
0743 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
0744 }
0745 mutex_unlock(&drm->audio.lock);
0746
0747 nv50_audio_component_eld_notify(drm->audio.component, nv_encoder->or,
0748 nv_crtc->index);
0749 }
0750
0751 static void
0752 nv50_audio_enable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc,
0753 struct nouveau_connector *nv_connector, struct drm_atomic_state *state,
0754 struct drm_display_mode *mode)
0755 {
0756 struct nouveau_drm *drm = nouveau_drm(encoder->dev);
0757 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0758 struct nv50_disp *disp = nv50_disp(encoder->dev);
0759 struct __packed {
0760 struct {
0761 struct nv50_disp_mthd_v1 mthd;
0762 struct nv50_disp_sor_hda_eld_v0 eld;
0763 } base;
0764 u8 data[sizeof(nv_connector->base.eld)];
0765 } args = {
0766 .base.mthd.version = 1,
0767 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
0768 .base.mthd.hasht = nv_encoder->dcb->hasht,
0769 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
0770 (0x0100 << nv_crtc->index),
0771 };
0772
0773 if (!drm_detect_monitor_audio(nv_connector->edid))
0774 return;
0775
0776 mutex_lock(&drm->audio.lock);
0777
0778 memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
0779
0780 nvif_mthd(&disp->disp->object, 0, &args,
0781 sizeof(args.base) + drm_eld_size(args.data));
0782 nv_encoder->audio.enabled = true;
0783 nv_encoder->audio.connector = &nv_connector->base;
0784
0785 mutex_unlock(&drm->audio.lock);
0786
0787 nv50_audio_component_eld_notify(drm->audio.component, nv_encoder->or,
0788 nv_crtc->index);
0789 }
0790
0791
0792
0793
0794 static void
0795 nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
0796 {
0797 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0798 struct nv50_disp *disp = nv50_disp(encoder->dev);
0799 struct {
0800 struct nv50_disp_mthd_v1 base;
0801 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
0802 } args = {
0803 .base.version = 1,
0804 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
0805 .base.hasht = nv_encoder->dcb->hasht,
0806 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
0807 (0x0100 << nv_crtc->index),
0808 };
0809
0810 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
0811 }
0812
0813 static void
0814 nv50_hdmi_enable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc,
0815 struct nouveau_connector *nv_connector, struct drm_atomic_state *state,
0816 struct drm_display_mode *mode)
0817 {
0818 struct nouveau_drm *drm = nouveau_drm(encoder->dev);
0819 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0820 struct nv50_disp *disp = nv50_disp(encoder->dev);
0821 struct {
0822 struct nv50_disp_mthd_v1 base;
0823 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
0824 u8 infoframes[2 * 17];
0825 } args = {
0826 .base.version = 1,
0827 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
0828 .base.hasht = nv_encoder->dcb->hasht,
0829 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
0830 (0x0100 << nv_crtc->index),
0831 .pwr.state = 1,
0832 .pwr.rekey = 56,
0833 };
0834 struct drm_hdmi_info *hdmi;
0835 u32 max_ac_packet;
0836 union hdmi_infoframe avi_frame;
0837 union hdmi_infoframe vendor_frame;
0838 bool high_tmds_clock_ratio = false, scrambling = false;
0839 u8 config;
0840 int ret;
0841 int size;
0842
0843 if (!drm_detect_hdmi_monitor(nv_connector->edid))
0844 return;
0845
0846 hdmi = &nv_connector->base.display_info.hdmi;
0847
0848 ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi,
0849 &nv_connector->base, mode);
0850 if (!ret) {
0851 drm_hdmi_avi_infoframe_quant_range(&avi_frame.avi,
0852 &nv_connector->base, mode,
0853 HDMI_QUANTIZATION_RANGE_FULL);
0854
0855 args.pwr.avi_infoframe_length
0856 = hdmi_infoframe_pack(&avi_frame, args.infoframes, 17);
0857 }
0858
0859 ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi,
0860 &nv_connector->base, mode);
0861 if (!ret) {
0862
0863 args.pwr.vendor_infoframe_length
0864 = hdmi_infoframe_pack(&vendor_frame,
0865 args.infoframes
0866 + args.pwr.avi_infoframe_length,
0867 17);
0868 }
0869
0870 max_ac_packet = mode->htotal - mode->hdisplay;
0871 max_ac_packet -= args.pwr.rekey;
0872 max_ac_packet -= 18;
0873 args.pwr.max_ac_packet = max_ac_packet / 32;
0874
0875 if (hdmi->scdc.scrambling.supported) {
0876 high_tmds_clock_ratio = mode->clock > 340000;
0877 scrambling = high_tmds_clock_ratio ||
0878 hdmi->scdc.scrambling.low_rates;
0879 }
0880
0881 args.pwr.scdc =
0882 NV50_DISP_SOR_HDMI_PWR_V0_SCDC_SCRAMBLE * scrambling |
0883 NV50_DISP_SOR_HDMI_PWR_V0_SCDC_DIV_BY_4 * high_tmds_clock_ratio;
0884
0885 size = sizeof(args.base)
0886 + sizeof(args.pwr)
0887 + args.pwr.avi_infoframe_length
0888 + args.pwr.vendor_infoframe_length;
0889 nvif_mthd(&disp->disp->object, 0, &args, size);
0890
0891 nv50_audio_enable(encoder, nv_crtc, nv_connector, state, mode);
0892
0893
0894
0895
0896 if (!hdmi->scdc.scrambling.supported)
0897 return;
0898
0899 ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &config);
0900 if (ret < 0) {
0901 NV_ERROR(drm, "Failure to read SCDC_TMDS_CONFIG: %d\n", ret);
0902 return;
0903 }
0904 config &= ~(SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 | SCDC_SCRAMBLING_ENABLE);
0905 config |= SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 * high_tmds_clock_ratio;
0906 config |= SCDC_SCRAMBLING_ENABLE * scrambling;
0907 ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, config);
0908 if (ret < 0)
0909 NV_ERROR(drm, "Failure to write SCDC_TMDS_CONFIG = 0x%02x: %d\n",
0910 config, ret);
0911 }
0912
0913
0914
0915
0916 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
0917 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
0918 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
0919
0920 struct nv50_mstc {
0921 struct nv50_mstm *mstm;
0922 struct drm_dp_mst_port *port;
0923 struct drm_connector connector;
0924
0925 struct drm_display_mode *native;
0926 struct edid *edid;
0927 };
0928
0929 struct nv50_msto {
0930 struct drm_encoder encoder;
0931
0932
0933 struct nv50_head *head;
0934 struct nv50_mstc *mstc;
0935 bool disabled;
0936 };
0937
0938 struct nouveau_encoder *nv50_real_outp(struct drm_encoder *encoder)
0939 {
0940 struct nv50_msto *msto;
0941
0942 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST)
0943 return nouveau_encoder(encoder);
0944
0945 msto = nv50_msto(encoder);
0946 if (!msto->mstc)
0947 return NULL;
0948 return msto->mstc->mstm->outp;
0949 }
0950
0951 static struct drm_dp_payload *
0952 nv50_msto_payload(struct nv50_msto *msto)
0953 {
0954 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
0955 struct nv50_mstc *mstc = msto->mstc;
0956 struct nv50_mstm *mstm = mstc->mstm;
0957 int vcpi = mstc->port->vcpi.vcpi, i;
0958
0959 WARN_ON(!mutex_is_locked(&mstm->mgr.payload_lock));
0960
0961 NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
0962 for (i = 0; i < mstm->mgr.max_payloads; i++) {
0963 struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
0964 NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
0965 mstm->outp->base.base.name, i, payload->vcpi,
0966 payload->start_slot, payload->num_slots);
0967 }
0968
0969 for (i = 0; i < mstm->mgr.max_payloads; i++) {
0970 struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
0971 if (payload->vcpi == vcpi)
0972 return payload;
0973 }
0974
0975 return NULL;
0976 }
0977
0978 static void
0979 nv50_msto_cleanup(struct nv50_msto *msto)
0980 {
0981 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
0982 struct nv50_mstc *mstc = msto->mstc;
0983 struct nv50_mstm *mstm = mstc->mstm;
0984
0985 if (!msto->disabled)
0986 return;
0987
0988 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
0989
0990 drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
0991
0992 msto->mstc = NULL;
0993 msto->disabled = false;
0994 }
0995
0996 static void
0997 nv50_msto_prepare(struct nv50_msto *msto)
0998 {
0999 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
1000 struct nv50_mstc *mstc = msto->mstc;
1001 struct nv50_mstm *mstm = mstc->mstm;
1002 struct {
1003 struct nv50_disp_mthd_v1 base;
1004 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
1005 } args = {
1006 .base.version = 1,
1007 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
1008 .base.hasht = mstm->outp->dcb->hasht,
1009 .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) |
1010 (0x0100 << msto->head->base.index),
1011 };
1012
1013 mutex_lock(&mstm->mgr.payload_lock);
1014
1015 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
1016 if (mstc->port->vcpi.vcpi > 0) {
1017 struct drm_dp_payload *payload = nv50_msto_payload(msto);
1018 if (payload) {
1019 args.vcpi.start_slot = payload->start_slot;
1020 args.vcpi.num_slots = payload->num_slots;
1021 args.vcpi.pbn = mstc->port->vcpi.pbn;
1022 args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
1023 }
1024 }
1025
1026 NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
1027 msto->encoder.name, msto->head->base.base.name,
1028 args.vcpi.start_slot, args.vcpi.num_slots,
1029 args.vcpi.pbn, args.vcpi.aligned_pbn);
1030
1031 nvif_mthd(&drm->display->disp.object, 0, &args, sizeof(args));
1032 mutex_unlock(&mstm->mgr.payload_lock);
1033 }
1034
1035 static int
1036 nv50_msto_atomic_check(struct drm_encoder *encoder,
1037 struct drm_crtc_state *crtc_state,
1038 struct drm_connector_state *conn_state)
1039 {
1040 struct drm_atomic_state *state = crtc_state->state;
1041 struct drm_connector *connector = conn_state->connector;
1042 struct nv50_mstc *mstc = nv50_mstc(connector);
1043 struct nv50_mstm *mstm = mstc->mstm;
1044 struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
1045 int slots;
1046 int ret;
1047
1048 ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
1049 mstc->native);
1050 if (ret)
1051 return ret;
1052
1053 if (!crtc_state->mode_changed && !crtc_state->connectors_changed)
1054 return 0;
1055
1056
1057
1058
1059
1060
1061 if (!state->duplicated) {
1062 const int clock = crtc_state->adjusted_mode.clock;
1063
1064 asyh->or.bpc = connector->display_info.bpc;
1065 asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3,
1066 false);
1067 }
1068
1069 slots = drm_dp_atomic_find_vcpi_slots(state, &mstm->mgr, mstc->port,
1070 asyh->dp.pbn, 0);
1071 if (slots < 0)
1072 return slots;
1073
1074 asyh->dp.tu = slots;
1075
1076 return 0;
1077 }
1078
1079 static u8
1080 nv50_dp_bpc_to_depth(unsigned int bpc)
1081 {
1082 switch (bpc) {
1083 case 6: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444;
1084 case 8: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444;
1085 case 10:
1086 default: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444;
1087 }
1088 }
1089
1090 static void
1091 nv50_msto_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1092 {
1093 struct nv50_msto *msto = nv50_msto(encoder);
1094 struct nv50_head *head = msto->head;
1095 struct nv50_head_atom *asyh =
1096 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &head->base.base));
1097 struct nv50_mstc *mstc = NULL;
1098 struct nv50_mstm *mstm = NULL;
1099 struct drm_connector *connector;
1100 struct drm_connector_list_iter conn_iter;
1101 u8 proto;
1102 bool r;
1103
1104 drm_connector_list_iter_begin(encoder->dev, &conn_iter);
1105 drm_for_each_connector_iter(connector, &conn_iter) {
1106 if (connector->state->best_encoder == &msto->encoder) {
1107 mstc = nv50_mstc(connector);
1108 mstm = mstc->mstm;
1109 break;
1110 }
1111 }
1112 drm_connector_list_iter_end(&conn_iter);
1113
1114 if (WARN_ON(!mstc))
1115 return;
1116
1117 r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, asyh->dp.pbn, asyh->dp.tu);
1118 if (!r)
1119 DRM_DEBUG_KMS("Failed to allocate VCPI\n");
1120
1121 if (!mstm->links++)
1122 nv50_outp_acquire(mstm->outp, false );
1123
1124 if (mstm->outp->link & 1)
1125 proto = NV917D_SOR_SET_CONTROL_PROTOCOL_DP_A;
1126 else
1127 proto = NV917D_SOR_SET_CONTROL_PROTOCOL_DP_B;
1128
1129 mstm->outp->update(mstm->outp, head->base.index, asyh, proto,
1130 nv50_dp_bpc_to_depth(asyh->or.bpc));
1131
1132 msto->mstc = mstc;
1133 mstm->modified = true;
1134 }
1135
1136 static void
1137 nv50_msto_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1138 {
1139 struct nv50_msto *msto = nv50_msto(encoder);
1140 struct nv50_mstc *mstc = msto->mstc;
1141 struct nv50_mstm *mstm = mstc->mstm;
1142
1143 drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
1144
1145 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
1146 mstm->modified = true;
1147 if (!--mstm->links)
1148 mstm->disabled = true;
1149 msto->disabled = true;
1150 }
1151
1152 static const struct drm_encoder_helper_funcs
1153 nv50_msto_help = {
1154 .atomic_disable = nv50_msto_atomic_disable,
1155 .atomic_enable = nv50_msto_atomic_enable,
1156 .atomic_check = nv50_msto_atomic_check,
1157 };
1158
1159 static void
1160 nv50_msto_destroy(struct drm_encoder *encoder)
1161 {
1162 struct nv50_msto *msto = nv50_msto(encoder);
1163 drm_encoder_cleanup(&msto->encoder);
1164 kfree(msto);
1165 }
1166
1167 static const struct drm_encoder_funcs
1168 nv50_msto = {
1169 .destroy = nv50_msto_destroy,
1170 };
1171
1172 static struct nv50_msto *
1173 nv50_msto_new(struct drm_device *dev, struct nv50_head *head, int id)
1174 {
1175 struct nv50_msto *msto;
1176 int ret;
1177
1178 msto = kzalloc(sizeof(*msto), GFP_KERNEL);
1179 if (!msto)
1180 return ERR_PTR(-ENOMEM);
1181
1182 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
1183 DRM_MODE_ENCODER_DPMST, "mst-%d", id);
1184 if (ret) {
1185 kfree(msto);
1186 return ERR_PTR(ret);
1187 }
1188
1189 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
1190 msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base);
1191 msto->head = head;
1192 return msto;
1193 }
1194
1195 static struct drm_encoder *
1196 nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
1197 struct drm_atomic_state *state)
1198 {
1199 struct drm_connector_state *connector_state = drm_atomic_get_new_connector_state(state,
1200 connector);
1201 struct nv50_mstc *mstc = nv50_mstc(connector);
1202 struct drm_crtc *crtc = connector_state->crtc;
1203
1204 if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1205 return NULL;
1206
1207 return &nv50_head(crtc)->msto->encoder;
1208 }
1209
1210 static enum drm_mode_status
1211 nv50_mstc_mode_valid(struct drm_connector *connector,
1212 struct drm_display_mode *mode)
1213 {
1214 struct nv50_mstc *mstc = nv50_mstc(connector);
1215 struct nouveau_encoder *outp = mstc->mstm->outp;
1216
1217
1218
1219
1220
1221 return nv50_dp_mode_valid(connector, outp, mode, NULL);
1222 }
1223
1224 static int
1225 nv50_mstc_get_modes(struct drm_connector *connector)
1226 {
1227 struct nv50_mstc *mstc = nv50_mstc(connector);
1228 int ret = 0;
1229
1230 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
1231 drm_connector_update_edid_property(&mstc->connector, mstc->edid);
1232 if (mstc->edid)
1233 ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
1234
1235
1236
1237
1238
1239
1240
1241 if (connector->display_info.bpc)
1242 connector->display_info.bpc =
1243 clamp(connector->display_info.bpc, 6U, 8U);
1244 else
1245 connector->display_info.bpc = 8;
1246
1247 if (mstc->native)
1248 drm_mode_destroy(mstc->connector.dev, mstc->native);
1249 mstc->native = nouveau_conn_native_mode(&mstc->connector);
1250 return ret;
1251 }
1252
1253 static int
1254 nv50_mstc_atomic_check(struct drm_connector *connector,
1255 struct drm_atomic_state *state)
1256 {
1257 struct nv50_mstc *mstc = nv50_mstc(connector);
1258 struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr;
1259 struct drm_connector_state *new_conn_state =
1260 drm_atomic_get_new_connector_state(state, connector);
1261 struct drm_connector_state *old_conn_state =
1262 drm_atomic_get_old_connector_state(state, connector);
1263 struct drm_crtc_state *crtc_state;
1264 struct drm_crtc *new_crtc = new_conn_state->crtc;
1265
1266 if (!old_conn_state->crtc)
1267 return 0;
1268
1269
1270
1271
1272 if (new_crtc) {
1273 crtc_state = drm_atomic_get_new_crtc_state(state, new_crtc);
1274
1275 if (!crtc_state ||
1276 !drm_atomic_crtc_needs_modeset(crtc_state) ||
1277 crtc_state->enable)
1278 return 0;
1279 }
1280
1281 return drm_dp_atomic_release_vcpi_slots(state, mgr, mstc->port);
1282 }
1283
1284 static int
1285 nv50_mstc_detect(struct drm_connector *connector,
1286 struct drm_modeset_acquire_ctx *ctx, bool force)
1287 {
1288 struct nv50_mstc *mstc = nv50_mstc(connector);
1289 int ret;
1290
1291 if (drm_connector_is_unregistered(connector))
1292 return connector_status_disconnected;
1293
1294 ret = pm_runtime_get_sync(connector->dev->dev);
1295 if (ret < 0 && ret != -EACCES) {
1296 pm_runtime_put_autosuspend(connector->dev->dev);
1297 return connector_status_disconnected;
1298 }
1299
1300 ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr,
1301 mstc->port);
1302 if (ret != connector_status_connected)
1303 goto out;
1304
1305 out:
1306 pm_runtime_mark_last_busy(connector->dev->dev);
1307 pm_runtime_put_autosuspend(connector->dev->dev);
1308 return ret;
1309 }
1310
1311 static const struct drm_connector_helper_funcs
1312 nv50_mstc_help = {
1313 .get_modes = nv50_mstc_get_modes,
1314 .mode_valid = nv50_mstc_mode_valid,
1315 .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
1316 .atomic_check = nv50_mstc_atomic_check,
1317 .detect_ctx = nv50_mstc_detect,
1318 };
1319
1320 static void
1321 nv50_mstc_destroy(struct drm_connector *connector)
1322 {
1323 struct nv50_mstc *mstc = nv50_mstc(connector);
1324
1325 drm_connector_cleanup(&mstc->connector);
1326 drm_dp_mst_put_port_malloc(mstc->port);
1327
1328 kfree(mstc);
1329 }
1330
1331 static const struct drm_connector_funcs
1332 nv50_mstc = {
1333 .reset = nouveau_conn_reset,
1334 .fill_modes = drm_helper_probe_single_connector_modes,
1335 .destroy = nv50_mstc_destroy,
1336 .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
1337 .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
1338 .atomic_set_property = nouveau_conn_atomic_set_property,
1339 .atomic_get_property = nouveau_conn_atomic_get_property,
1340 };
1341
1342 static int
1343 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
1344 const char *path, struct nv50_mstc **pmstc)
1345 {
1346 struct drm_device *dev = mstm->outp->base.base.dev;
1347 struct drm_crtc *crtc;
1348 struct nv50_mstc *mstc;
1349 int ret;
1350
1351 if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
1352 return -ENOMEM;
1353 mstc->mstm = mstm;
1354 mstc->port = port;
1355
1356 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
1357 DRM_MODE_CONNECTOR_DisplayPort);
1358 if (ret) {
1359 kfree(*pmstc);
1360 *pmstc = NULL;
1361 return ret;
1362 }
1363
1364 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
1365
1366 mstc->connector.funcs->reset(&mstc->connector);
1367 nouveau_conn_attach_properties(&mstc->connector);
1368
1369 drm_for_each_crtc(crtc, dev) {
1370 if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1371 continue;
1372
1373 drm_connector_attach_encoder(&mstc->connector,
1374 &nv50_head(crtc)->msto->encoder);
1375 }
1376
1377 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
1378 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
1379 drm_connector_set_path_property(&mstc->connector, path);
1380 drm_dp_mst_get_port_malloc(port);
1381 return 0;
1382 }
1383
1384 static void
1385 nv50_mstm_cleanup(struct nv50_mstm *mstm)
1386 {
1387 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1388 struct drm_encoder *encoder;
1389
1390 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
1391 drm_dp_check_act_status(&mstm->mgr);
1392
1393 drm_dp_update_payload_part2(&mstm->mgr);
1394
1395 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1396 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1397 struct nv50_msto *msto = nv50_msto(encoder);
1398 struct nv50_mstc *mstc = msto->mstc;
1399 if (mstc && mstc->mstm == mstm)
1400 nv50_msto_cleanup(msto);
1401 }
1402 }
1403
1404 mstm->modified = false;
1405 }
1406
1407 static void
1408 nv50_mstm_prepare(struct nv50_mstm *mstm)
1409 {
1410 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1411 struct drm_encoder *encoder;
1412
1413 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
1414 drm_dp_update_payload_part1(&mstm->mgr, 1);
1415
1416 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1417 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1418 struct nv50_msto *msto = nv50_msto(encoder);
1419 struct nv50_mstc *mstc = msto->mstc;
1420 if (mstc && mstc->mstm == mstm)
1421 nv50_msto_prepare(msto);
1422 }
1423 }
1424
1425 if (mstm->disabled) {
1426 if (!mstm->links)
1427 nv50_outp_release(mstm->outp);
1428 mstm->disabled = false;
1429 }
1430 }
1431
1432 static struct drm_connector *
1433 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1434 struct drm_dp_mst_port *port, const char *path)
1435 {
1436 struct nv50_mstm *mstm = nv50_mstm(mgr);
1437 struct nv50_mstc *mstc;
1438 int ret;
1439
1440 ret = nv50_mstc_new(mstm, port, path, &mstc);
1441 if (ret)
1442 return NULL;
1443
1444 return &mstc->connector;
1445 }
1446
1447 static const struct drm_dp_mst_topology_cbs
1448 nv50_mstm = {
1449 .add_connector = nv50_mstm_add_connector,
1450 };
1451
1452 bool
1453 nv50_mstm_service(struct nouveau_drm *drm,
1454 struct nouveau_connector *nv_connector,
1455 struct nv50_mstm *mstm)
1456 {
1457 struct drm_dp_aux *aux = &nv_connector->aux;
1458 bool handled = true, ret = true;
1459 int rc;
1460 u8 esi[8] = {};
1461
1462 while (handled) {
1463 rc = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
1464 if (rc != 8) {
1465 ret = false;
1466 break;
1467 }
1468
1469 drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
1470 if (!handled)
1471 break;
1472
1473 rc = drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1],
1474 3);
1475 if (rc != 3) {
1476 ret = false;
1477 break;
1478 }
1479 }
1480
1481 if (!ret)
1482 NV_DEBUG(drm, "Failed to handle ESI on %s: %d\n",
1483 nv_connector->base.name, rc);
1484
1485 return ret;
1486 }
1487
1488 void
1489 nv50_mstm_remove(struct nv50_mstm *mstm)
1490 {
1491 mstm->is_mst = false;
1492 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1493 }
1494
1495 static int
1496 nv50_mstm_enable(struct nv50_mstm *mstm, int state)
1497 {
1498 struct nouveau_encoder *outp = mstm->outp;
1499 struct {
1500 struct nv50_disp_mthd_v1 base;
1501 struct nv50_disp_sor_dp_mst_link_v0 mst;
1502 } args = {
1503 .base.version = 1,
1504 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
1505 .base.hasht = outp->dcb->hasht,
1506 .base.hashm = outp->dcb->hashm,
1507 .mst.state = state,
1508 };
1509 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
1510 struct nvif_object *disp = &drm->display->disp.object;
1511
1512 return nvif_mthd(disp, 0, &args, sizeof(args));
1513 }
1514
1515 int
1516 nv50_mstm_detect(struct nouveau_encoder *outp)
1517 {
1518 struct nv50_mstm *mstm = outp->dp.mstm;
1519 struct drm_dp_aux *aux;
1520 int ret;
1521
1522 if (!mstm || !mstm->can_mst)
1523 return 0;
1524
1525 aux = mstm->mgr.aux;
1526
1527
1528
1529
1530 ret = drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
1531 if (ret < 0)
1532 return ret;
1533
1534
1535 ret = nv50_mstm_enable(mstm, true);
1536 if (ret)
1537 return ret;
1538
1539 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, true);
1540 if (ret) {
1541 nv50_mstm_enable(mstm, false);
1542 return ret;
1543 }
1544
1545 mstm->is_mst = true;
1546 return 1;
1547 }
1548
1549 static void
1550 nv50_mstm_fini(struct nouveau_encoder *outp)
1551 {
1552 struct nv50_mstm *mstm = outp->dp.mstm;
1553
1554 if (!mstm)
1555 return;
1556
1557
1558
1559
1560
1561 mutex_lock(&outp->dp.hpd_irq_lock);
1562 mstm->suspended = true;
1563 mutex_unlock(&outp->dp.hpd_irq_lock);
1564
1565 if (mstm->is_mst)
1566 drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
1567 }
1568
1569 static void
1570 nv50_mstm_init(struct nouveau_encoder *outp, bool runtime)
1571 {
1572 struct nv50_mstm *mstm = outp->dp.mstm;
1573 int ret = 0;
1574
1575 if (!mstm)
1576 return;
1577
1578 if (mstm->is_mst) {
1579 ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime);
1580 if (ret == -1)
1581 nv50_mstm_remove(mstm);
1582 }
1583
1584 mutex_lock(&outp->dp.hpd_irq_lock);
1585 mstm->suspended = false;
1586 mutex_unlock(&outp->dp.hpd_irq_lock);
1587
1588 if (ret == -1)
1589 drm_kms_helper_hotplug_event(mstm->mgr.dev);
1590 }
1591
1592 static void
1593 nv50_mstm_del(struct nv50_mstm **pmstm)
1594 {
1595 struct nv50_mstm *mstm = *pmstm;
1596 if (mstm) {
1597 drm_dp_mst_topology_mgr_destroy(&mstm->mgr);
1598 kfree(*pmstm);
1599 *pmstm = NULL;
1600 }
1601 }
1602
1603 static int
1604 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
1605 int conn_base_id, struct nv50_mstm **pmstm)
1606 {
1607 const int max_payloads = hweight8(outp->dcb->heads);
1608 struct drm_device *dev = outp->base.base.dev;
1609 struct nv50_mstm *mstm;
1610 int ret;
1611
1612 if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
1613 return -ENOMEM;
1614 mstm->outp = outp;
1615 mstm->mgr.cbs = &nv50_mstm;
1616
1617 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
1618 max_payloads, outp->dcb->dpconf.link_nr,
1619 drm_dp_bw_code_to_link_rate(outp->dcb->dpconf.link_bw),
1620 conn_base_id);
1621 if (ret)
1622 return ret;
1623
1624 return 0;
1625 }
1626
1627
1628
1629
1630 static void
1631 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
1632 struct nv50_head_atom *asyh, u8 proto, u8 depth)
1633 {
1634 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
1635 struct nv50_core *core = disp->core;
1636
1637 if (!asyh) {
1638 nv_encoder->ctrl &= ~BIT(head);
1639 if (NVDEF_TEST(nv_encoder->ctrl, NV507D, SOR_SET_CONTROL, OWNER, ==, NONE))
1640 nv_encoder->ctrl = 0;
1641 } else {
1642 nv_encoder->ctrl |= NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto);
1643 nv_encoder->ctrl |= BIT(head);
1644 asyh->or.depth = depth;
1645 }
1646
1647 core->func->sor->ctrl(core, nv_encoder->or, nv_encoder->ctrl, asyh);
1648 }
1649
1650
1651
1652
1653
1654
1655 static void
1656 nv50_sor_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1657 {
1658 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1659 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1660 struct nouveau_connector *nv_connector = nv50_outp_get_old_connector(state, nv_encoder);
1661 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1662 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
1663 struct nouveau_backlight *backlight = nv_connector->backlight;
1664 #endif
1665 struct drm_dp_aux *aux = &nv_connector->aux;
1666 int ret;
1667 u8 pwr;
1668
1669 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1670 if (backlight && backlight->uses_dpcd) {
1671 ret = drm_edp_backlight_disable(aux, &backlight->edp_info);
1672 if (ret < 0)
1673 NV_ERROR(drm, "Failed to disable backlight on [CONNECTOR:%d:%s]: %d\n",
1674 nv_connector->base.base.id, nv_connector->base.name, ret);
1675 }
1676 #endif
1677
1678 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1679 ret = drm_dp_dpcd_readb(aux, DP_SET_POWER, &pwr);
1680
1681 if (ret == 0) {
1682 pwr &= ~DP_SET_POWER_MASK;
1683 pwr |= DP_SET_POWER_D3;
1684 drm_dp_dpcd_writeb(aux, DP_SET_POWER, pwr);
1685 }
1686 }
1687
1688 nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
1689 nv50_audio_disable(encoder, nv_crtc);
1690 nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
1691 nv50_outp_release(nv_encoder);
1692 nv_encoder->crtc = NULL;
1693 }
1694
1695 static void
1696 nv50_sor_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1697 {
1698 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1699 struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
1700 struct nv50_head_atom *asyh =
1701 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
1702 struct drm_display_mode *mode = &asyh->state.adjusted_mode;
1703 struct {
1704 struct nv50_disp_mthd_v1 base;
1705 struct nv50_disp_sor_lvds_script_v0 lvds;
1706 } lvds = {
1707 .base.version = 1,
1708 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
1709 .base.hasht = nv_encoder->dcb->hasht,
1710 .base.hashm = nv_encoder->dcb->hashm,
1711 };
1712 struct nv50_disp *disp = nv50_disp(encoder->dev);
1713 struct drm_device *dev = encoder->dev;
1714 struct nouveau_drm *drm = nouveau_drm(dev);
1715 struct nouveau_connector *nv_connector;
1716 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1717 struct nouveau_backlight *backlight;
1718 #endif
1719 struct nvbios *bios = &drm->vbios;
1720 bool hda = false;
1721 u8 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_CUSTOM;
1722 u8 depth = NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT;
1723
1724 nv_connector = nv50_outp_get_new_connector(state, nv_encoder);
1725 nv_encoder->crtc = &nv_crtc->base;
1726
1727 if ((disp->disp->object.oclass == GT214_DISP ||
1728 disp->disp->object.oclass >= GF110_DISP) &&
1729 drm_detect_monitor_audio(nv_connector->edid))
1730 hda = true;
1731 nv50_outp_acquire(nv_encoder, hda);
1732
1733 switch (nv_encoder->dcb->type) {
1734 case DCB_OUTPUT_TMDS:
1735 if (nv_encoder->link & 1) {
1736 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_SINGLE_TMDS_A;
1737
1738
1739
1740
1741
1742
1743 if (mode->clock >= 165000 &&
1744 nv_encoder->dcb->duallink_possible &&
1745 !drm_detect_hdmi_monitor(nv_connector->edid))
1746 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_DUAL_TMDS;
1747 } else {
1748 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_SINGLE_TMDS_B;
1749 }
1750
1751 nv50_hdmi_enable(&nv_encoder->base.base, nv_crtc, nv_connector, state, mode);
1752 break;
1753 case DCB_OUTPUT_LVDS:
1754 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_LVDS_CUSTOM;
1755
1756 if (bios->fp_no_ddc) {
1757 if (bios->fp.dual_link)
1758 lvds.lvds.script |= 0x0100;
1759 if (bios->fp.if_is_24bit)
1760 lvds.lvds.script |= 0x0200;
1761 } else {
1762 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1763 if (((u8 *)nv_connector->edid)[121] == 2)
1764 lvds.lvds.script |= 0x0100;
1765 } else
1766 if (mode->clock >= bios->fp.duallink_transition_clk) {
1767 lvds.lvds.script |= 0x0100;
1768 }
1769
1770 if (lvds.lvds.script & 0x0100) {
1771 if (bios->fp.strapless_is_24bit & 2)
1772 lvds.lvds.script |= 0x0200;
1773 } else {
1774 if (bios->fp.strapless_is_24bit & 1)
1775 lvds.lvds.script |= 0x0200;
1776 }
1777
1778 if (asyh->or.bpc == 8)
1779 lvds.lvds.script |= 0x0200;
1780 }
1781
1782 nvif_mthd(&disp->disp->object, 0, &lvds, sizeof(lvds));
1783 break;
1784 case DCB_OUTPUT_DP:
1785 depth = nv50_dp_bpc_to_depth(asyh->or.bpc);
1786
1787 if (nv_encoder->link & 1)
1788 proto = NV887D_SOR_SET_CONTROL_PROTOCOL_DP_A;
1789 else
1790 proto = NV887D_SOR_SET_CONTROL_PROTOCOL_DP_B;
1791
1792 nv50_audio_enable(encoder, nv_crtc, nv_connector, state, mode);
1793
1794 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1795 backlight = nv_connector->backlight;
1796 if (backlight && backlight->uses_dpcd)
1797 drm_edp_backlight_enable(&nv_connector->aux, &backlight->edp_info,
1798 (u16)backlight->dev->props.brightness);
1799 #endif
1800
1801 break;
1802 default:
1803 BUG();
1804 break;
1805 }
1806
1807 nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth);
1808 }
1809
1810 static const struct drm_encoder_helper_funcs
1811 nv50_sor_help = {
1812 .atomic_check = nv50_outp_atomic_check,
1813 .atomic_enable = nv50_sor_atomic_enable,
1814 .atomic_disable = nv50_sor_atomic_disable,
1815 };
1816
1817 static void
1818 nv50_sor_destroy(struct drm_encoder *encoder)
1819 {
1820 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1821
1822 nvif_outp_dtor(&nv_encoder->outp);
1823
1824 nv50_mstm_del(&nv_encoder->dp.mstm);
1825 drm_encoder_cleanup(encoder);
1826
1827 if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1828 mutex_destroy(&nv_encoder->dp.hpd_irq_lock);
1829
1830 kfree(encoder);
1831 }
1832
1833 static const struct drm_encoder_funcs
1834 nv50_sor_func = {
1835 .destroy = nv50_sor_destroy,
1836 };
1837
1838 static bool nv50_has_mst(struct nouveau_drm *drm)
1839 {
1840 struct nvkm_bios *bios = nvxx_bios(&drm->client.device);
1841 u32 data;
1842 u8 ver, hdr, cnt, len;
1843
1844 data = nvbios_dp_table(bios, &ver, &hdr, &cnt, &len);
1845 return data && ver >= 0x40 && (nvbios_rd08(bios, data + 0x08) & 0x04);
1846 }
1847
1848 static int
1849 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1850 {
1851 struct nouveau_connector *nv_connector = nouveau_connector(connector);
1852 struct nouveau_drm *drm = nouveau_drm(connector->dev);
1853 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1854 struct nouveau_encoder *nv_encoder;
1855 struct drm_encoder *encoder;
1856 struct nv50_disp *disp = nv50_disp(connector->dev);
1857 int type, ret;
1858
1859 switch (dcbe->type) {
1860 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1861 case DCB_OUTPUT_TMDS:
1862 case DCB_OUTPUT_DP:
1863 default:
1864 type = DRM_MODE_ENCODER_TMDS;
1865 break;
1866 }
1867
1868 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1869 if (!nv_encoder)
1870 return -ENOMEM;
1871 nv_encoder->dcb = dcbe;
1872 nv_encoder->update = nv50_sor_update;
1873
1874 encoder = to_drm_encoder(nv_encoder);
1875 encoder->possible_crtcs = dcbe->heads;
1876 encoder->possible_clones = 0;
1877 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
1878 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
1879 drm_encoder_helper_add(encoder, &nv50_sor_help);
1880
1881 drm_connector_attach_encoder(connector, encoder);
1882
1883 disp->core->func->sor->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1);
1884 nv50_outp_dump_caps(drm, nv_encoder);
1885
1886 if (dcbe->type == DCB_OUTPUT_DP) {
1887 struct nvkm_i2c_aux *aux =
1888 nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
1889
1890 mutex_init(&nv_encoder->dp.hpd_irq_lock);
1891
1892 if (aux) {
1893 if (disp->disp->object.oclass < GF110_DISP) {
1894
1895
1896
1897
1898 nv_encoder->i2c = &aux->i2c;
1899 } else {
1900 nv_encoder->i2c = &nv_connector->aux.ddc;
1901 }
1902 nv_encoder->aux = aux;
1903 }
1904
1905 if (nv_connector->type != DCB_CONNECTOR_eDP &&
1906 nv50_has_mst(drm)) {
1907 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux,
1908 16, nv_connector->base.base.id,
1909 &nv_encoder->dp.mstm);
1910 if (ret)
1911 return ret;
1912 }
1913 } else {
1914 struct nvkm_i2c_bus *bus =
1915 nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1916 if (bus)
1917 nv_encoder->i2c = &bus->i2c;
1918 }
1919
1920 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp);
1921 }
1922
1923
1924
1925
1926 static int
1927 nv50_pior_atomic_check(struct drm_encoder *encoder,
1928 struct drm_crtc_state *crtc_state,
1929 struct drm_connector_state *conn_state)
1930 {
1931 int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
1932 if (ret)
1933 return ret;
1934 crtc_state->adjusted_mode.clock *= 2;
1935 return 0;
1936 }
1937
1938 static void
1939 nv50_pior_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1940 {
1941 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1942 struct nv50_core *core = nv50_disp(encoder->dev)->core;
1943 const u32 ctrl = NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, NONE);
1944
1945 core->func->pior->ctrl(core, nv_encoder->or, ctrl, NULL);
1946 nv_encoder->crtc = NULL;
1947 nv50_outp_release(nv_encoder);
1948 }
1949
1950 static void
1951 nv50_pior_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1952 {
1953 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1954 struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
1955 struct nv50_head_atom *asyh =
1956 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
1957 struct nv50_core *core = nv50_disp(encoder->dev)->core;
1958 u32 ctrl = 0;
1959
1960 switch (nv_crtc->index) {
1961 case 0: ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, HEAD0); break;
1962 case 1: ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, HEAD1); break;
1963 default:
1964 WARN_ON(1);
1965 break;
1966 }
1967
1968 nv50_outp_acquire(nv_encoder, false);
1969
1970 switch (asyh->or.bpc) {
1971 case 10: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444; break;
1972 case 8: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444; break;
1973 case 6: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444; break;
1974 default: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT; break;
1975 }
1976
1977 switch (nv_encoder->dcb->type) {
1978 case DCB_OUTPUT_TMDS:
1979 case DCB_OUTPUT_DP:
1980 ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, PROTOCOL, EXT_TMDS_ENC);
1981 break;
1982 default:
1983 BUG();
1984 break;
1985 }
1986
1987 core->func->pior->ctrl(core, nv_encoder->or, ctrl, asyh);
1988 nv_encoder->crtc = &nv_crtc->base;
1989 }
1990
1991 static const struct drm_encoder_helper_funcs
1992 nv50_pior_help = {
1993 .atomic_check = nv50_pior_atomic_check,
1994 .atomic_enable = nv50_pior_atomic_enable,
1995 .atomic_disable = nv50_pior_atomic_disable,
1996 };
1997
1998 static void
1999 nv50_pior_destroy(struct drm_encoder *encoder)
2000 {
2001 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2002
2003 nvif_outp_dtor(&nv_encoder->outp);
2004
2005 drm_encoder_cleanup(encoder);
2006 kfree(encoder);
2007 }
2008
2009 static const struct drm_encoder_funcs
2010 nv50_pior_func = {
2011 .destroy = nv50_pior_destroy,
2012 };
2013
2014 static int
2015 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2016 {
2017 struct drm_device *dev = connector->dev;
2018 struct nouveau_drm *drm = nouveau_drm(dev);
2019 struct nv50_disp *disp = nv50_disp(dev);
2020 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
2021 struct nvkm_i2c_bus *bus = NULL;
2022 struct nvkm_i2c_aux *aux = NULL;
2023 struct i2c_adapter *ddc;
2024 struct nouveau_encoder *nv_encoder;
2025 struct drm_encoder *encoder;
2026 int type;
2027
2028 switch (dcbe->type) {
2029 case DCB_OUTPUT_TMDS:
2030 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
2031 ddc = bus ? &bus->i2c : NULL;
2032 type = DRM_MODE_ENCODER_TMDS;
2033 break;
2034 case DCB_OUTPUT_DP:
2035 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
2036 ddc = aux ? &aux->i2c : NULL;
2037 type = DRM_MODE_ENCODER_TMDS;
2038 break;
2039 default:
2040 return -ENODEV;
2041 }
2042
2043 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2044 if (!nv_encoder)
2045 return -ENOMEM;
2046 nv_encoder->dcb = dcbe;
2047 nv_encoder->i2c = ddc;
2048 nv_encoder->aux = aux;
2049
2050 encoder = to_drm_encoder(nv_encoder);
2051 encoder->possible_crtcs = dcbe->heads;
2052 encoder->possible_clones = 0;
2053 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
2054 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
2055 drm_encoder_helper_add(encoder, &nv50_pior_help);
2056
2057 drm_connector_attach_encoder(connector, encoder);
2058
2059 disp->core->func->pior->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1);
2060 nv50_outp_dump_caps(drm, nv_encoder);
2061
2062 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp);
2063 }
2064
2065
2066
2067
2068
2069 static void
2070 nv50_disp_atomic_commit_core(struct drm_atomic_state *state, u32 *interlock)
2071 {
2072 struct nouveau_drm *drm = nouveau_drm(state->dev);
2073 struct nv50_disp *disp = nv50_disp(drm->dev);
2074 struct nv50_core *core = disp->core;
2075 struct nv50_mstm *mstm;
2076 struct drm_encoder *encoder;
2077
2078 NV_ATOMIC(drm, "commit core %08x\n", interlock[NV50_DISP_INTERLOCK_BASE]);
2079
2080 drm_for_each_encoder(encoder, drm->dev) {
2081 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2082 mstm = nouveau_encoder(encoder)->dp.mstm;
2083 if (mstm && mstm->modified)
2084 nv50_mstm_prepare(mstm);
2085 }
2086 }
2087
2088 core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY);
2089 core->func->update(core, interlock, true);
2090 if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY,
2091 disp->core->chan.base.device))
2092 NV_ERROR(drm, "core notifier timeout\n");
2093
2094 drm_for_each_encoder(encoder, drm->dev) {
2095 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2096 mstm = nouveau_encoder(encoder)->dp.mstm;
2097 if (mstm && mstm->modified)
2098 nv50_mstm_cleanup(mstm);
2099 }
2100 }
2101 }
2102
2103 static void
2104 nv50_disp_atomic_commit_wndw(struct drm_atomic_state *state, u32 *interlock)
2105 {
2106 struct drm_plane_state *new_plane_state;
2107 struct drm_plane *plane;
2108 int i;
2109
2110 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2111 struct nv50_wndw *wndw = nv50_wndw(plane);
2112 if (interlock[wndw->interlock.type] & wndw->interlock.data) {
2113 if (wndw->func->update)
2114 wndw->func->update(wndw, interlock);
2115 }
2116 }
2117 }
2118
2119 static void
2120 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
2121 {
2122 struct drm_device *dev = state->dev;
2123 struct drm_crtc_state *new_crtc_state, *old_crtc_state;
2124 struct drm_crtc *crtc;
2125 struct drm_plane_state *new_plane_state;
2126 struct drm_plane *plane;
2127 struct nouveau_drm *drm = nouveau_drm(dev);
2128 struct nv50_disp *disp = nv50_disp(dev);
2129 struct nv50_atom *atom = nv50_atom(state);
2130 struct nv50_core *core = disp->core;
2131 struct nv50_outp_atom *outp, *outt;
2132 u32 interlock[NV50_DISP_INTERLOCK__SIZE] = {};
2133 int i;
2134 bool flushed = false;
2135
2136 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
2137 nv50_crc_atomic_stop_reporting(state);
2138 drm_atomic_helper_wait_for_fences(dev, state, false);
2139 drm_atomic_helper_wait_for_dependencies(state);
2140 drm_atomic_helper_update_legacy_modeset_state(dev, state);
2141 drm_atomic_helper_calc_timestamping_constants(state);
2142
2143 if (atom->lock_core)
2144 mutex_lock(&disp->mutex);
2145
2146
2147 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2148 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2149 struct nv50_head *head = nv50_head(crtc);
2150
2151 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
2152 asyh->clr.mask, asyh->set.mask);
2153
2154 if (old_crtc_state->active && !new_crtc_state->active) {
2155 pm_runtime_put_noidle(dev->dev);
2156 drm_crtc_vblank_off(crtc);
2157 }
2158
2159 if (asyh->clr.mask) {
2160 nv50_head_flush_clr(head, asyh, atom->flush_disable);
2161 interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
2162 }
2163 }
2164
2165
2166 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2167 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2168 struct nv50_wndw *wndw = nv50_wndw(plane);
2169
2170 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
2171 asyw->clr.mask, asyw->set.mask);
2172 if (!asyw->clr.mask)
2173 continue;
2174
2175 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw);
2176 }
2177
2178
2179 list_for_each_entry(outp, &atom->outp, head) {
2180 const struct drm_encoder_helper_funcs *help;
2181 struct drm_encoder *encoder;
2182
2183 encoder = outp->encoder;
2184 help = encoder->helper_private;
2185
2186 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
2187 outp->clr.mask, outp->set.mask);
2188
2189 if (outp->clr.mask) {
2190 help->atomic_disable(encoder, state);
2191 interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
2192 if (outp->flush_disable) {
2193 nv50_disp_atomic_commit_wndw(state, interlock);
2194 nv50_disp_atomic_commit_core(state, interlock);
2195 memset(interlock, 0x00, sizeof(interlock));
2196
2197 flushed = true;
2198 }
2199 }
2200 }
2201
2202
2203 if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2204 if (atom->flush_disable) {
2205 nv50_disp_atomic_commit_wndw(state, interlock);
2206 nv50_disp_atomic_commit_core(state, interlock);
2207 memset(interlock, 0x00, sizeof(interlock));
2208
2209 flushed = true;
2210 }
2211 }
2212
2213 if (flushed)
2214 nv50_crc_atomic_release_notifier_contexts(state);
2215 nv50_crc_atomic_init_notifier_contexts(state);
2216
2217
2218 list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2219 const struct drm_encoder_helper_funcs *help;
2220 struct drm_encoder *encoder;
2221
2222 encoder = outp->encoder;
2223 help = encoder->helper_private;
2224
2225 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
2226 outp->set.mask, outp->clr.mask);
2227
2228 if (outp->set.mask) {
2229 help->atomic_enable(encoder, state);
2230 interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2231 }
2232
2233 list_del(&outp->head);
2234 kfree(outp);
2235 }
2236
2237
2238 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2239 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2240 struct nv50_head *head = nv50_head(crtc);
2241
2242 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
2243 asyh->set.mask, asyh->clr.mask);
2244
2245 if (asyh->set.mask) {
2246 nv50_head_flush_set(head, asyh);
2247 interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2248 }
2249
2250 if (new_crtc_state->active) {
2251 if (!old_crtc_state->active) {
2252 drm_crtc_vblank_on(crtc);
2253 pm_runtime_get_noresume(dev->dev);
2254 }
2255 if (new_crtc_state->event)
2256 drm_crtc_vblank_get(crtc);
2257 }
2258 }
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268 if (core->assign_windows) {
2269 core->func->wndw.owner(core);
2270 nv50_disp_atomic_commit_core(state, interlock);
2271 core->assign_windows = false;
2272 interlock[NV50_DISP_INTERLOCK_CORE] = 0;
2273 }
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2290 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2291 struct nv50_head *head = nv50_head(crtc);
2292
2293 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
2294 asyh->set.mask, asyh->clr.mask);
2295
2296 if (asyh->set.mask) {
2297 nv50_head_flush_set_wndw(head, asyh);
2298 interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2299 }
2300 }
2301
2302
2303 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2304 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2305 struct nv50_wndw *wndw = nv50_wndw(plane);
2306
2307 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
2308 asyw->set.mask, asyw->clr.mask);
2309 if ( !asyw->set.mask &&
2310 (!asyw->clr.mask || atom->flush_disable))
2311 continue;
2312
2313 nv50_wndw_flush_set(wndw, interlock, asyw);
2314 }
2315
2316
2317 nv50_disp_atomic_commit_wndw(state, interlock);
2318
2319 if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2320 if (interlock[NV50_DISP_INTERLOCK_BASE] ||
2321 interlock[NV50_DISP_INTERLOCK_OVLY] ||
2322 interlock[NV50_DISP_INTERLOCK_WNDW] ||
2323 !atom->state.legacy_cursor_update)
2324 nv50_disp_atomic_commit_core(state, interlock);
2325 else
2326 disp->core->func->update(disp->core, interlock, false);
2327 }
2328
2329 if (atom->lock_core)
2330 mutex_unlock(&disp->mutex);
2331
2332
2333 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2334 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2335 struct nv50_wndw *wndw = nv50_wndw(plane);
2336 int ret = nv50_wndw_wait_armed(wndw, asyw);
2337 if (ret)
2338 NV_ERROR(drm, "%s: timeout\n", plane->name);
2339 }
2340
2341 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2342 if (new_crtc_state->event) {
2343 unsigned long flags;
2344
2345 if (new_crtc_state->active)
2346 drm_crtc_accurate_vblank_count(crtc);
2347 spin_lock_irqsave(&crtc->dev->event_lock, flags);
2348 drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
2349 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
2350
2351 new_crtc_state->event = NULL;
2352 if (new_crtc_state->active)
2353 drm_crtc_vblank_put(crtc);
2354 }
2355 }
2356
2357 nv50_crc_atomic_start_reporting(state);
2358 if (!flushed)
2359 nv50_crc_atomic_release_notifier_contexts(state);
2360
2361 drm_atomic_helper_commit_hw_done(state);
2362 drm_atomic_helper_cleanup_planes(dev, state);
2363 drm_atomic_helper_commit_cleanup_done(state);
2364 drm_atomic_state_put(state);
2365
2366
2367 pm_runtime_mark_last_busy(dev->dev);
2368 pm_runtime_put_autosuspend(dev->dev);
2369 }
2370
2371 static void
2372 nv50_disp_atomic_commit_work(struct work_struct *work)
2373 {
2374 struct drm_atomic_state *state =
2375 container_of(work, typeof(*state), commit_work);
2376 nv50_disp_atomic_commit_tail(state);
2377 }
2378
2379 static int
2380 nv50_disp_atomic_commit(struct drm_device *dev,
2381 struct drm_atomic_state *state, bool nonblock)
2382 {
2383 struct drm_plane_state *new_plane_state;
2384 struct drm_plane *plane;
2385 int ret, i;
2386
2387 ret = pm_runtime_get_sync(dev->dev);
2388 if (ret < 0 && ret != -EACCES) {
2389 pm_runtime_put_autosuspend(dev->dev);
2390 return ret;
2391 }
2392
2393 ret = drm_atomic_helper_setup_commit(state, nonblock);
2394 if (ret)
2395 goto done;
2396
2397 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
2398
2399 ret = drm_atomic_helper_prepare_planes(dev, state);
2400 if (ret)
2401 goto done;
2402
2403 if (!nonblock) {
2404 ret = drm_atomic_helper_wait_for_fences(dev, state, true);
2405 if (ret)
2406 goto err_cleanup;
2407 }
2408
2409 ret = drm_atomic_helper_swap_state(state, true);
2410 if (ret)
2411 goto err_cleanup;
2412
2413 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2414 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2415 struct nv50_wndw *wndw = nv50_wndw(plane);
2416
2417 if (asyw->set.image)
2418 nv50_wndw_ntfy_enable(wndw, asyw);
2419 }
2420
2421 drm_atomic_state_get(state);
2422
2423
2424
2425
2426
2427 pm_runtime_get_noresume(dev->dev);
2428
2429 if (nonblock)
2430 queue_work(system_unbound_wq, &state->commit_work);
2431 else
2432 nv50_disp_atomic_commit_tail(state);
2433
2434 err_cleanup:
2435 if (ret)
2436 drm_atomic_helper_cleanup_planes(dev, state);
2437 done:
2438 pm_runtime_put_autosuspend(dev->dev);
2439 return ret;
2440 }
2441
2442 static struct nv50_outp_atom *
2443 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
2444 {
2445 struct nv50_outp_atom *outp;
2446
2447 list_for_each_entry(outp, &atom->outp, head) {
2448 if (outp->encoder == encoder)
2449 return outp;
2450 }
2451
2452 outp = kzalloc(sizeof(*outp), GFP_KERNEL);
2453 if (!outp)
2454 return ERR_PTR(-ENOMEM);
2455
2456 list_add(&outp->head, &atom->outp);
2457 outp->encoder = encoder;
2458 return outp;
2459 }
2460
2461 static int
2462 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
2463 struct drm_connector_state *old_connector_state)
2464 {
2465 struct drm_encoder *encoder = old_connector_state->best_encoder;
2466 struct drm_crtc_state *old_crtc_state, *new_crtc_state;
2467 struct drm_crtc *crtc;
2468 struct nv50_outp_atom *outp;
2469
2470 if (!(crtc = old_connector_state->crtc))
2471 return 0;
2472
2473 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
2474 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2475 if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2476 outp = nv50_disp_outp_atomic_add(atom, encoder);
2477 if (IS_ERR(outp))
2478 return PTR_ERR(outp);
2479
2480 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
2481 outp->flush_disable = true;
2482 atom->flush_disable = true;
2483 }
2484 outp->clr.ctrl = true;
2485 atom->lock_core = true;
2486 }
2487
2488 return 0;
2489 }
2490
2491 static int
2492 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
2493 struct drm_connector_state *connector_state)
2494 {
2495 struct drm_encoder *encoder = connector_state->best_encoder;
2496 struct drm_crtc_state *new_crtc_state;
2497 struct drm_crtc *crtc;
2498 struct nv50_outp_atom *outp;
2499
2500 if (!(crtc = connector_state->crtc))
2501 return 0;
2502
2503 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2504 if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2505 outp = nv50_disp_outp_atomic_add(atom, encoder);
2506 if (IS_ERR(outp))
2507 return PTR_ERR(outp);
2508
2509 outp->set.ctrl = true;
2510 atom->lock_core = true;
2511 }
2512
2513 return 0;
2514 }
2515
2516 static int
2517 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
2518 {
2519 struct nv50_atom *atom = nv50_atom(state);
2520 struct nv50_core *core = nv50_disp(dev)->core;
2521 struct drm_connector_state *old_connector_state, *new_connector_state;
2522 struct drm_connector *connector;
2523 struct drm_crtc_state *new_crtc_state;
2524 struct drm_crtc *crtc;
2525 struct nv50_head *head;
2526 struct nv50_head_atom *asyh;
2527 int ret, i;
2528
2529 if (core->assign_windows && core->func->head->static_wndw_map) {
2530 drm_for_each_crtc(crtc, dev) {
2531 new_crtc_state = drm_atomic_get_crtc_state(state,
2532 crtc);
2533 if (IS_ERR(new_crtc_state))
2534 return PTR_ERR(new_crtc_state);
2535
2536 head = nv50_head(crtc);
2537 asyh = nv50_head_atom(new_crtc_state);
2538 core->func->head->static_wndw_map(head, asyh);
2539 }
2540 }
2541
2542
2543 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2544 if (new_crtc_state->color_mgmt_changed) {
2545 ret = drm_atomic_add_affected_planes(state, crtc);
2546 if (ret)
2547 return ret;
2548 }
2549 }
2550
2551 ret = drm_atomic_helper_check(dev, state);
2552 if (ret)
2553 return ret;
2554
2555 for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
2556 ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
2557 if (ret)
2558 return ret;
2559
2560 ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
2561 if (ret)
2562 return ret;
2563 }
2564
2565 ret = drm_dp_mst_atomic_check(state);
2566 if (ret)
2567 return ret;
2568
2569 nv50_crc_atomic_check_outp(atom);
2570
2571 return 0;
2572 }
2573
2574 static void
2575 nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
2576 {
2577 struct nv50_atom *atom = nv50_atom(state);
2578 struct nv50_outp_atom *outp, *outt;
2579
2580 list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2581 list_del(&outp->head);
2582 kfree(outp);
2583 }
2584
2585 drm_atomic_state_default_clear(state);
2586 }
2587
2588 static void
2589 nv50_disp_atomic_state_free(struct drm_atomic_state *state)
2590 {
2591 struct nv50_atom *atom = nv50_atom(state);
2592 drm_atomic_state_default_release(&atom->state);
2593 kfree(atom);
2594 }
2595
2596 static struct drm_atomic_state *
2597 nv50_disp_atomic_state_alloc(struct drm_device *dev)
2598 {
2599 struct nv50_atom *atom;
2600 if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
2601 drm_atomic_state_init(dev, &atom->state) < 0) {
2602 kfree(atom);
2603 return NULL;
2604 }
2605 INIT_LIST_HEAD(&atom->outp);
2606 return &atom->state;
2607 }
2608
2609 static const struct drm_mode_config_funcs
2610 nv50_disp_func = {
2611 .fb_create = nouveau_user_framebuffer_create,
2612 .output_poll_changed = nouveau_fbcon_output_poll_changed,
2613 .atomic_check = nv50_disp_atomic_check,
2614 .atomic_commit = nv50_disp_atomic_commit,
2615 .atomic_state_alloc = nv50_disp_atomic_state_alloc,
2616 .atomic_state_clear = nv50_disp_atomic_state_clear,
2617 .atomic_state_free = nv50_disp_atomic_state_free,
2618 };
2619
2620
2621
2622
2623
2624 static void
2625 nv50_display_fini(struct drm_device *dev, bool runtime, bool suspend)
2626 {
2627 struct nouveau_drm *drm = nouveau_drm(dev);
2628 struct drm_encoder *encoder;
2629
2630 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2631 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST)
2632 nv50_mstm_fini(nouveau_encoder(encoder));
2633 }
2634
2635 if (!runtime)
2636 cancel_work_sync(&drm->hpd_work);
2637 }
2638
2639 static int
2640 nv50_display_init(struct drm_device *dev, bool resume, bool runtime)
2641 {
2642 struct nv50_core *core = nv50_disp(dev)->core;
2643 struct drm_encoder *encoder;
2644
2645 if (resume || runtime)
2646 core->func->init(core);
2647
2648 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2649 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2650 struct nouveau_encoder *nv_encoder =
2651 nouveau_encoder(encoder);
2652 nv50_mstm_init(nv_encoder, runtime);
2653 }
2654 }
2655
2656 return 0;
2657 }
2658
2659 static void
2660 nv50_display_destroy(struct drm_device *dev)
2661 {
2662 struct nv50_disp *disp = nv50_disp(dev);
2663
2664 nv50_audio_component_fini(nouveau_drm(dev));
2665
2666 nvif_object_unmap(&disp->caps);
2667 nvif_object_dtor(&disp->caps);
2668 nv50_core_del(&disp->core);
2669
2670 nouveau_bo_unmap(disp->sync);
2671 if (disp->sync)
2672 nouveau_bo_unpin(disp->sync);
2673 nouveau_bo_ref(NULL, &disp->sync);
2674
2675 nouveau_display(dev)->priv = NULL;
2676 kfree(disp);
2677 }
2678
2679 int
2680 nv50_display_create(struct drm_device *dev)
2681 {
2682 struct nvif_device *device = &nouveau_drm(dev)->client.device;
2683 struct nouveau_drm *drm = nouveau_drm(dev);
2684 struct dcb_table *dcb = &drm->vbios.dcb;
2685 struct drm_connector *connector, *tmp;
2686 struct nv50_disp *disp;
2687 struct dcb_output *dcbe;
2688 int crtcs, ret, i;
2689 bool has_mst = nv50_has_mst(drm);
2690
2691 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2692 if (!disp)
2693 return -ENOMEM;
2694
2695 mutex_init(&disp->mutex);
2696
2697 nouveau_display(dev)->priv = disp;
2698 nouveau_display(dev)->dtor = nv50_display_destroy;
2699 nouveau_display(dev)->init = nv50_display_init;
2700 nouveau_display(dev)->fini = nv50_display_fini;
2701 disp->disp = &nouveau_display(dev)->disp;
2702 dev->mode_config.funcs = &nv50_disp_func;
2703 dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true;
2704 dev->mode_config.normalize_zpos = true;
2705
2706
2707 ret = nouveau_bo_new(&drm->client, 4096, 0x1000,
2708 NOUVEAU_GEM_DOMAIN_VRAM,
2709 0, 0x0000, NULL, NULL, &disp->sync);
2710 if (!ret) {
2711 ret = nouveau_bo_pin(disp->sync, NOUVEAU_GEM_DOMAIN_VRAM, true);
2712 if (!ret) {
2713 ret = nouveau_bo_map(disp->sync);
2714 if (ret)
2715 nouveau_bo_unpin(disp->sync);
2716 }
2717 if (ret)
2718 nouveau_bo_ref(NULL, &disp->sync);
2719 }
2720
2721 if (ret)
2722 goto out;
2723
2724
2725 ret = nv50_core_new(drm, &disp->core);
2726 if (ret)
2727 goto out;
2728
2729 disp->core->func->init(disp->core);
2730 if (disp->core->func->caps_init) {
2731 ret = disp->core->func->caps_init(drm, disp);
2732 if (ret)
2733 goto out;
2734 }
2735
2736
2737 if (disp->disp->object.oclass >= TU102_DISP)
2738 nouveau_display(dev)->format_modifiers = wndwc57e_modifiers;
2739 else
2740 if (drm->client.device.info.family >= NV_DEVICE_INFO_V0_FERMI)
2741 nouveau_display(dev)->format_modifiers = disp90xx_modifiers;
2742 else
2743 nouveau_display(dev)->format_modifiers = disp50xx_modifiers;
2744
2745
2746
2747
2748
2749
2750
2751
2752
2753 if (disp->disp->object.oclass >= GM107_DISP) {
2754 dev->mode_config.cursor_width = 256;
2755 dev->mode_config.cursor_height = 256;
2756 } else if (disp->disp->object.oclass >= GK104_DISP) {
2757 dev->mode_config.cursor_width = 128;
2758 dev->mode_config.cursor_height = 128;
2759 } else {
2760 dev->mode_config.cursor_width = 64;
2761 dev->mode_config.cursor_height = 64;
2762 }
2763
2764
2765 if (disp->disp->object.oclass >= GV100_DISP)
2766 crtcs = nvif_rd32(&device->object, 0x610060) & 0xff;
2767 else
2768 if (disp->disp->object.oclass >= GF110_DISP)
2769 crtcs = nvif_rd32(&device->object, 0x612004) & 0xf;
2770 else
2771 crtcs = 0x3;
2772
2773 for (i = 0; i < fls(crtcs); i++) {
2774 struct nv50_head *head;
2775
2776 if (!(crtcs & (1 << i)))
2777 continue;
2778
2779 head = nv50_head_create(dev, i);
2780 if (IS_ERR(head)) {
2781 ret = PTR_ERR(head);
2782 goto out;
2783 }
2784
2785 if (has_mst) {
2786 head->msto = nv50_msto_new(dev, head, i);
2787 if (IS_ERR(head->msto)) {
2788 ret = PTR_ERR(head->msto);
2789 head->msto = NULL;
2790 goto out;
2791 }
2792
2793
2794
2795
2796
2797
2798
2799
2800
2801
2802
2803 head->msto->encoder.possible_crtcs = crtcs;
2804 }
2805 }
2806
2807
2808 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2809 connector = nouveau_connector_create(dev, dcbe);
2810 if (IS_ERR(connector))
2811 continue;
2812
2813 if (dcbe->location == DCB_LOC_ON_CHIP) {
2814 switch (dcbe->type) {
2815 case DCB_OUTPUT_TMDS:
2816 case DCB_OUTPUT_LVDS:
2817 case DCB_OUTPUT_DP:
2818 ret = nv50_sor_create(connector, dcbe);
2819 break;
2820 case DCB_OUTPUT_ANALOG:
2821 ret = nv50_dac_create(connector, dcbe);
2822 break;
2823 default:
2824 ret = -ENODEV;
2825 break;
2826 }
2827 } else {
2828 ret = nv50_pior_create(connector, dcbe);
2829 }
2830
2831 if (ret) {
2832 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2833 dcbe->location, dcbe->type,
2834 ffs(dcbe->or) - 1, ret);
2835 ret = 0;
2836 }
2837 }
2838
2839
2840 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2841 if (connector->possible_encoders)
2842 continue;
2843
2844 NV_WARN(drm, "%s has no encoders, removing\n",
2845 connector->name);
2846 connector->funcs->destroy(connector);
2847 }
2848
2849
2850 dev->vblank_disable_immediate = true;
2851
2852 nv50_audio_component_init(drm);
2853
2854 out:
2855 if (ret)
2856 nv50_display_destroy(dev);
2857 return ret;
2858 }
2859
2860
2861
2862
2863
2864
2865
2866
2867
2868
2869
2870 const u64 disp50xx_modifiers[] = {
2871 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 0),
2872 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 1),
2873 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 2),
2874 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 3),
2875 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 4),
2876 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 5),
2877 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 0),
2878 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 1),
2879 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 2),
2880 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 3),
2881 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 4),
2882 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 5),
2883 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 0),
2884 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 1),
2885 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 2),
2886 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 3),
2887 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 4),
2888 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 5),
2889 DRM_FORMAT_MOD_LINEAR,
2890 DRM_FORMAT_MOD_INVALID
2891 };
2892
2893
2894
2895
2896
2897
2898
2899 const u64 disp90xx_modifiers[] = {
2900 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 0),
2901 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 1),
2902 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 2),
2903 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 3),
2904 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 4),
2905 DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 5),
2906 DRM_FORMAT_MOD_LINEAR,
2907 DRM_FORMAT_MOD_INVALID
2908 };