0001
0002
0003
0004
0005
0006
0007 #include "gem/i915_gem_pm.h"
0008 #include "gem/i915_gem_ttm_pm.h"
0009 #include "gt/intel_gt.h"
0010 #include "gt/intel_gt_pm.h"
0011 #include "gt/intel_gt_requests.h"
0012
0013 #include "i915_driver.h"
0014 #include "i915_drv.h"
0015
0016 #if defined(CONFIG_X86)
0017 #include <asm/smp.h>
0018 #else
0019 #define wbinvd_on_all_cpus() \
0020 pr_warn(DRIVER_NAME ": Missing cache flush in %s\n", __func__)
0021 #endif
0022
0023 void i915_gem_suspend(struct drm_i915_private *i915)
0024 {
0025 GEM_TRACE("%s\n", dev_name(i915->drm.dev));
0026
0027 intel_wakeref_auto(&to_gt(i915)->ggtt->userfault_wakeref, 0);
0028 flush_workqueue(i915->wq);
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039 intel_gt_suspend_prepare(to_gt(i915));
0040
0041 i915_gem_drain_freed_objects(i915);
0042 }
0043
0044 static int lmem_restore(struct drm_i915_private *i915, u32 flags)
0045 {
0046 struct intel_memory_region *mr;
0047 int ret = 0, id;
0048
0049 for_each_memory_region(mr, i915, id) {
0050 if (mr->type == INTEL_MEMORY_LOCAL) {
0051 ret = i915_ttm_restore_region(mr, flags);
0052 if (ret)
0053 break;
0054 }
0055 }
0056
0057 return ret;
0058 }
0059
0060 static int lmem_suspend(struct drm_i915_private *i915, u32 flags)
0061 {
0062 struct intel_memory_region *mr;
0063 int ret = 0, id;
0064
0065 for_each_memory_region(mr, i915, id) {
0066 if (mr->type == INTEL_MEMORY_LOCAL) {
0067 ret = i915_ttm_backup_region(mr, flags);
0068 if (ret)
0069 break;
0070 }
0071 }
0072
0073 return ret;
0074 }
0075
0076 static void lmem_recover(struct drm_i915_private *i915)
0077 {
0078 struct intel_memory_region *mr;
0079 int id;
0080
0081 for_each_memory_region(mr, i915, id)
0082 if (mr->type == INTEL_MEMORY_LOCAL)
0083 i915_ttm_recover_region(mr);
0084 }
0085
0086 int i915_gem_backup_suspend(struct drm_i915_private *i915)
0087 {
0088 int ret;
0089
0090
0091 ret = lmem_suspend(i915, I915_TTM_BACKUP_ALLOW_GPU);
0092 if (ret)
0093 goto out_recover;
0094
0095 i915_gem_suspend(i915);
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105 ret = lmem_suspend(i915, I915_TTM_BACKUP_ALLOW_GPU |
0106 I915_TTM_BACKUP_PINNED);
0107 if (ret)
0108 goto out_recover;
0109
0110
0111
0112
0113
0114 ret = lmem_suspend(i915, I915_TTM_BACKUP_PINNED);
0115 if (ret)
0116 goto out_recover;
0117
0118 return 0;
0119
0120 out_recover:
0121 lmem_recover(i915);
0122
0123 return ret;
0124 }
0125
0126 void i915_gem_suspend_late(struct drm_i915_private *i915)
0127 {
0128 struct drm_i915_gem_object *obj;
0129 struct list_head *phases[] = {
0130 &i915->mm.shrink_list,
0131 &i915->mm.purge_list,
0132 NULL
0133 }, **phase;
0134 unsigned long flags;
0135 bool flush = false;
0136
0137
0138
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150
0151
0152
0153
0154
0155
0156
0157 intel_gt_suspend_late(to_gt(i915));
0158
0159 spin_lock_irqsave(&i915->mm.obj_lock, flags);
0160 for (phase = phases; *phase; phase++) {
0161 list_for_each_entry(obj, *phase, mm.link) {
0162 if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ))
0163 flush |= (obj->read_domains & I915_GEM_DOMAIN_CPU) == 0;
0164 __start_cpu_write(obj);
0165 }
0166 }
0167 spin_unlock_irqrestore(&i915->mm.obj_lock, flags);
0168 if (flush)
0169 wbinvd_on_all_cpus();
0170 }
0171
0172 int i915_gem_freeze(struct drm_i915_private *i915)
0173 {
0174
0175
0176
0177 i915_gem_shrink_all(i915);
0178
0179 return 0;
0180 }
0181
0182 int i915_gem_freeze_late(struct drm_i915_private *i915)
0183 {
0184 struct drm_i915_gem_object *obj;
0185 intel_wakeref_t wakeref;
0186
0187
0188
0189
0190
0191
0192
0193
0194
0195
0196
0197
0198
0199
0200
0201
0202 with_intel_runtime_pm(&i915->runtime_pm, wakeref)
0203 i915_gem_shrink(NULL, i915, -1UL, NULL, ~0);
0204 i915_gem_drain_freed_objects(i915);
0205
0206 wbinvd_on_all_cpus();
0207 list_for_each_entry(obj, &i915->mm.shrink_list, mm.link)
0208 __start_cpu_write(obj);
0209
0210 return 0;
0211 }
0212
0213 void i915_gem_resume(struct drm_i915_private *i915)
0214 {
0215 int ret;
0216
0217 GEM_TRACE("%s\n", dev_name(i915->drm.dev));
0218
0219 ret = lmem_restore(i915, 0);
0220 GEM_WARN_ON(ret);
0221
0222
0223
0224
0225
0226
0227 intel_gt_resume(to_gt(i915));
0228
0229 ret = lmem_restore(i915, I915_TTM_BACKUP_ALLOW_GPU);
0230 GEM_WARN_ON(ret);
0231 }