0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034 #include "i915_drv.h"
0035 #include "gvt.h"
0036
0037 enum {
0038 INTEL_GVT_PCI_BAR_GTTMMIO = 0,
0039 INTEL_GVT_PCI_BAR_APERTURE,
0040 INTEL_GVT_PCI_BAR_PIO,
0041 INTEL_GVT_PCI_BAR_MAX,
0042 };
0043
0044
0045
0046
0047
0048 static const u8 pci_cfg_space_rw_bmp[PCI_INTERRUPT_LINE + 4] = {
0049 [PCI_COMMAND] = 0xff, 0x07,
0050 [PCI_STATUS] = 0x00, 0xf9,
0051 [PCI_CACHE_LINE_SIZE] = 0xff,
0052 [PCI_BASE_ADDRESS_0 ... PCI_CARDBUS_CIS - 1] = 0xff,
0053 [PCI_ROM_ADDRESS] = 0x01, 0xf8, 0xff, 0xff,
0054 [PCI_INTERRUPT_LINE] = 0xff,
0055 };
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068 static void vgpu_pci_cfg_mem_write(struct intel_vgpu *vgpu, unsigned int off,
0069 u8 *src, unsigned int bytes)
0070 {
0071 u8 *cfg_base = vgpu_cfg_space(vgpu);
0072 u8 mask, new, old;
0073 pci_power_t pwr;
0074 int i = 0;
0075
0076 for (; i < bytes && (off + i < sizeof(pci_cfg_space_rw_bmp)); i++) {
0077 mask = pci_cfg_space_rw_bmp[off + i];
0078 old = cfg_base[off + i];
0079 new = src[i] & mask;
0080
0081
0082
0083
0084
0085
0086 if (off + i == PCI_STATUS + 1)
0087 new = (~new & old) & mask;
0088
0089 cfg_base[off + i] = (old & ~mask) | new;
0090 }
0091
0092
0093 if (i < bytes)
0094 memcpy(cfg_base + off + i, src + i, bytes - i);
0095
0096 if (off == vgpu->cfg_space.pmcsr_off && vgpu->cfg_space.pmcsr_off) {
0097 pwr = (pci_power_t __force)(*(u16*)(&vgpu_cfg_space(vgpu)[off])
0098 & PCI_PM_CTRL_STATE_MASK);
0099 if (pwr == PCI_D3hot)
0100 vgpu->d3_entered = true;
0101 gvt_dbg_core("vgpu-%d power status changed to %d\n",
0102 vgpu->id, pwr);
0103 }
0104 }
0105
0106
0107
0108
0109
0110
0111
0112
0113
0114
0115
0116 int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset,
0117 void *p_data, unsigned int bytes)
0118 {
0119 struct drm_i915_private *i915 = vgpu->gvt->gt->i915;
0120
0121 if (drm_WARN_ON(&i915->drm, bytes > 4))
0122 return -EINVAL;
0123
0124 if (drm_WARN_ON(&i915->drm,
0125 offset + bytes > vgpu->gvt->device_info.cfg_space_size))
0126 return -EINVAL;
0127
0128 memcpy(p_data, vgpu_cfg_space(vgpu) + offset, bytes);
0129 return 0;
0130 }
0131
0132 static void map_aperture(struct intel_vgpu *vgpu, bool map)
0133 {
0134 if (map != vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].tracked)
0135 vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].tracked = map;
0136 }
0137
0138 static void trap_gttmmio(struct intel_vgpu *vgpu, bool trap)
0139 {
0140 if (trap != vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].tracked)
0141 vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].tracked = trap;
0142 }
0143
0144 static int emulate_pci_command_write(struct intel_vgpu *vgpu,
0145 unsigned int offset, void *p_data, unsigned int bytes)
0146 {
0147 u8 old = vgpu_cfg_space(vgpu)[offset];
0148 u8 new = *(u8 *)p_data;
0149 u8 changed = old ^ new;
0150
0151 vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
0152 if (!(changed & PCI_COMMAND_MEMORY))
0153 return 0;
0154
0155 if (old & PCI_COMMAND_MEMORY) {
0156 trap_gttmmio(vgpu, false);
0157 map_aperture(vgpu, false);
0158 } else {
0159 trap_gttmmio(vgpu, true);
0160 map_aperture(vgpu, true);
0161 }
0162
0163 return 0;
0164 }
0165
0166 static int emulate_pci_rom_bar_write(struct intel_vgpu *vgpu,
0167 unsigned int offset, void *p_data, unsigned int bytes)
0168 {
0169 u32 *pval = (u32 *)(vgpu_cfg_space(vgpu) + offset);
0170 u32 new = *(u32 *)(p_data);
0171
0172 if ((new & PCI_ROM_ADDRESS_MASK) == PCI_ROM_ADDRESS_MASK)
0173
0174 *pval = 0;
0175 else
0176 vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
0177 return 0;
0178 }
0179
0180 static void emulate_pci_bar_write(struct intel_vgpu *vgpu, unsigned int offset,
0181 void *p_data, unsigned int bytes)
0182 {
0183 u32 new = *(u32 *)(p_data);
0184 bool lo = IS_ALIGNED(offset, 8);
0185 u64 size;
0186 bool mmio_enabled =
0187 vgpu_cfg_space(vgpu)[PCI_COMMAND] & PCI_COMMAND_MEMORY;
0188 struct intel_vgpu_pci_bar *bars = vgpu->cfg_space.bar;
0189
0190
0191
0192
0193
0194
0195
0196
0197 if (new == 0xffffffff) {
0198 switch (offset) {
0199 case PCI_BASE_ADDRESS_0:
0200 case PCI_BASE_ADDRESS_1:
0201 size = ~(bars[INTEL_GVT_PCI_BAR_GTTMMIO].size -1);
0202 intel_vgpu_write_pci_bar(vgpu, offset,
0203 size >> (lo ? 0 : 32), lo);
0204
0205
0206
0207
0208 trap_gttmmio(vgpu, false);
0209 break;
0210 case PCI_BASE_ADDRESS_2:
0211 case PCI_BASE_ADDRESS_3:
0212 size = ~(bars[INTEL_GVT_PCI_BAR_APERTURE].size -1);
0213 intel_vgpu_write_pci_bar(vgpu, offset,
0214 size >> (lo ? 0 : 32), lo);
0215 map_aperture(vgpu, false);
0216 break;
0217 default:
0218
0219 intel_vgpu_write_pci_bar(vgpu, offset, 0x0, false);
0220 }
0221 } else {
0222 switch (offset) {
0223 case PCI_BASE_ADDRESS_0:
0224 case PCI_BASE_ADDRESS_1:
0225
0226
0227
0228
0229 trap_gttmmio(vgpu, false);
0230 intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
0231 trap_gttmmio(vgpu, mmio_enabled);
0232 break;
0233 case PCI_BASE_ADDRESS_2:
0234 case PCI_BASE_ADDRESS_3:
0235 map_aperture(vgpu, false);
0236 intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
0237 map_aperture(vgpu, mmio_enabled);
0238 break;
0239 default:
0240 intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
0241 }
0242 }
0243 }
0244
0245
0246
0247
0248
0249
0250
0251
0252
0253
0254
0255 int intel_vgpu_emulate_cfg_write(struct intel_vgpu *vgpu, unsigned int offset,
0256 void *p_data, unsigned int bytes)
0257 {
0258 struct drm_i915_private *i915 = vgpu->gvt->gt->i915;
0259 int ret;
0260
0261 if (drm_WARN_ON(&i915->drm, bytes > 4))
0262 return -EINVAL;
0263
0264 if (drm_WARN_ON(&i915->drm,
0265 offset + bytes > vgpu->gvt->device_info.cfg_space_size))
0266 return -EINVAL;
0267
0268
0269 if (IS_ALIGNED(offset, 2) && offset == PCI_COMMAND) {
0270 if (drm_WARN_ON(&i915->drm, bytes > 2))
0271 return -EINVAL;
0272 return emulate_pci_command_write(vgpu, offset, p_data, bytes);
0273 }
0274
0275 switch (rounddown(offset, 4)) {
0276 case PCI_ROM_ADDRESS:
0277 if (drm_WARN_ON(&i915->drm, !IS_ALIGNED(offset, 4)))
0278 return -EINVAL;
0279 return emulate_pci_rom_bar_write(vgpu, offset, p_data, bytes);
0280
0281 case PCI_BASE_ADDRESS_0 ... PCI_BASE_ADDRESS_5:
0282 if (drm_WARN_ON(&i915->drm, !IS_ALIGNED(offset, 4)))
0283 return -EINVAL;
0284 emulate_pci_bar_write(vgpu, offset, p_data, bytes);
0285 break;
0286 case INTEL_GVT_PCI_SWSCI:
0287 if (drm_WARN_ON(&i915->drm, !IS_ALIGNED(offset, 4)))
0288 return -EINVAL;
0289 ret = intel_vgpu_emulate_opregion_request(vgpu, *(u32 *)p_data);
0290 if (ret)
0291 return ret;
0292 break;
0293
0294 case INTEL_GVT_PCI_OPREGION:
0295 if (drm_WARN_ON(&i915->drm, !IS_ALIGNED(offset, 4)))
0296 return -EINVAL;
0297 ret = intel_vgpu_opregion_base_write_handler(vgpu,
0298 *(u32 *)p_data);
0299 if (ret)
0300 return ret;
0301
0302 vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
0303 break;
0304 default:
0305 vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
0306 break;
0307 }
0308 return 0;
0309 }
0310
0311
0312
0313
0314
0315
0316
0317
0318 void intel_vgpu_init_cfg_space(struct intel_vgpu *vgpu,
0319 bool primary)
0320 {
0321 struct intel_gvt *gvt = vgpu->gvt;
0322 struct pci_dev *pdev = to_pci_dev(gvt->gt->i915->drm.dev);
0323 const struct intel_gvt_device_info *info = &gvt->device_info;
0324 u16 *gmch_ctl;
0325 u8 next;
0326
0327 memcpy(vgpu_cfg_space(vgpu), gvt->firmware.cfg_space,
0328 info->cfg_space_size);
0329
0330 if (!primary) {
0331 vgpu_cfg_space(vgpu)[PCI_CLASS_DEVICE] =
0332 INTEL_GVT_PCI_CLASS_VGA_OTHER;
0333 vgpu_cfg_space(vgpu)[PCI_CLASS_PROG] =
0334 INTEL_GVT_PCI_CLASS_VGA_OTHER;
0335 }
0336
0337
0338 gmch_ctl = (u16 *)(vgpu_cfg_space(vgpu) + INTEL_GVT_PCI_GMCH_CONTROL);
0339 *gmch_ctl &= ~(BDW_GMCH_GMS_MASK << BDW_GMCH_GMS_SHIFT);
0340
0341 intel_vgpu_write_pci_bar(vgpu, PCI_BASE_ADDRESS_2,
0342 gvt_aperture_pa_base(gvt), true);
0343
0344 vgpu_cfg_space(vgpu)[PCI_COMMAND] &= ~(PCI_COMMAND_IO
0345 | PCI_COMMAND_MEMORY
0346 | PCI_COMMAND_MASTER);
0347
0348
0349
0350 memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_1, 0, 4);
0351 memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_3, 0, 4);
0352 memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_4, 0, 8);
0353 memset(vgpu_cfg_space(vgpu) + INTEL_GVT_PCI_OPREGION, 0, 4);
0354
0355 vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].size =
0356 pci_resource_len(pdev, 0);
0357 vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].size =
0358 pci_resource_len(pdev, 2);
0359
0360 memset(vgpu_cfg_space(vgpu) + PCI_ROM_ADDRESS, 0, 4);
0361
0362
0363 vgpu->cfg_space.pmcsr_off = 0;
0364 if (vgpu_cfg_space(vgpu)[PCI_STATUS] & PCI_STATUS_CAP_LIST) {
0365 next = vgpu_cfg_space(vgpu)[PCI_CAPABILITY_LIST];
0366 do {
0367 if (vgpu_cfg_space(vgpu)[next + PCI_CAP_LIST_ID] == PCI_CAP_ID_PM) {
0368 vgpu->cfg_space.pmcsr_off = next + PCI_PM_CTRL;
0369 break;
0370 }
0371 next = vgpu_cfg_space(vgpu)[next + PCI_CAP_LIST_NEXT];
0372 } while (next);
0373 }
0374 }
0375
0376
0377
0378
0379
0380
0381
0382 void intel_vgpu_reset_cfg_space(struct intel_vgpu *vgpu)
0383 {
0384 u8 cmd = vgpu_cfg_space(vgpu)[PCI_COMMAND];
0385 bool primary = vgpu_cfg_space(vgpu)[PCI_CLASS_DEVICE] !=
0386 INTEL_GVT_PCI_CLASS_VGA_OTHER;
0387
0388 if (cmd & PCI_COMMAND_MEMORY) {
0389 trap_gttmmio(vgpu, false);
0390 map_aperture(vgpu, false);
0391 }
0392
0393
0394
0395
0396
0397
0398 intel_vgpu_init_cfg_space(vgpu, primary);
0399 }