0001
0002
0003
0004
0005
0006 #include <linux/string.h>
0007
0008 #include "i915_drv.h"
0009 #include "intel_atomic.h"
0010 #include "intel_display_types.h"
0011 #include "intel_global_state.h"
0012
0013 static void __intel_atomic_global_state_free(struct kref *kref)
0014 {
0015 struct intel_global_state *obj_state =
0016 container_of(kref, struct intel_global_state, ref);
0017 struct intel_global_obj *obj = obj_state->obj;
0018
0019 obj->funcs->atomic_destroy_state(obj, obj_state);
0020 }
0021
0022 static void intel_atomic_global_state_put(struct intel_global_state *obj_state)
0023 {
0024 kref_put(&obj_state->ref, __intel_atomic_global_state_free);
0025 }
0026
0027 static struct intel_global_state *
0028 intel_atomic_global_state_get(struct intel_global_state *obj_state)
0029 {
0030 kref_get(&obj_state->ref);
0031
0032 return obj_state;
0033 }
0034
0035 void intel_atomic_global_obj_init(struct drm_i915_private *dev_priv,
0036 struct intel_global_obj *obj,
0037 struct intel_global_state *state,
0038 const struct intel_global_state_funcs *funcs)
0039 {
0040 memset(obj, 0, sizeof(*obj));
0041
0042 state->obj = obj;
0043
0044 kref_init(&state->ref);
0045
0046 obj->state = state;
0047 obj->funcs = funcs;
0048 list_add_tail(&obj->head, &dev_priv->global_obj_list);
0049 }
0050
0051 void intel_atomic_global_obj_cleanup(struct drm_i915_private *dev_priv)
0052 {
0053 struct intel_global_obj *obj, *next;
0054
0055 list_for_each_entry_safe(obj, next, &dev_priv->global_obj_list, head) {
0056 list_del(&obj->head);
0057
0058 drm_WARN_ON(&dev_priv->drm, kref_read(&obj->state->ref) != 1);
0059 intel_atomic_global_state_put(obj->state);
0060 }
0061 }
0062
0063 static void assert_global_state_write_locked(struct drm_i915_private *dev_priv)
0064 {
0065 struct intel_crtc *crtc;
0066
0067 for_each_intel_crtc(&dev_priv->drm, crtc)
0068 drm_modeset_lock_assert_held(&crtc->base.mutex);
0069 }
0070
0071 static bool modeset_lock_is_held(struct drm_modeset_acquire_ctx *ctx,
0072 struct drm_modeset_lock *lock)
0073 {
0074 struct drm_modeset_lock *l;
0075
0076 list_for_each_entry(l, &ctx->locked, head) {
0077 if (lock == l)
0078 return true;
0079 }
0080
0081 return false;
0082 }
0083
0084 static void assert_global_state_read_locked(struct intel_atomic_state *state)
0085 {
0086 struct drm_modeset_acquire_ctx *ctx = state->base.acquire_ctx;
0087 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
0088 struct intel_crtc *crtc;
0089
0090 for_each_intel_crtc(&dev_priv->drm, crtc) {
0091 if (modeset_lock_is_held(ctx, &crtc->base.mutex))
0092 return;
0093 }
0094
0095 drm_WARN(&dev_priv->drm, 1, "Global state not read locked\n");
0096 }
0097
0098 struct intel_global_state *
0099 intel_atomic_get_global_obj_state(struct intel_atomic_state *state,
0100 struct intel_global_obj *obj)
0101 {
0102 struct drm_i915_private *i915 = to_i915(state->base.dev);
0103 int index, num_objs, i;
0104 size_t size;
0105 struct __intel_global_objs_state *arr;
0106 struct intel_global_state *obj_state;
0107
0108 for (i = 0; i < state->num_global_objs; i++)
0109 if (obj == state->global_objs[i].ptr)
0110 return state->global_objs[i].state;
0111
0112 assert_global_state_read_locked(state);
0113
0114 num_objs = state->num_global_objs + 1;
0115 size = sizeof(*state->global_objs) * num_objs;
0116 arr = krealloc(state->global_objs, size, GFP_KERNEL);
0117 if (!arr)
0118 return ERR_PTR(-ENOMEM);
0119
0120 state->global_objs = arr;
0121 index = state->num_global_objs;
0122 memset(&state->global_objs[index], 0, sizeof(*state->global_objs));
0123
0124 obj_state = obj->funcs->atomic_duplicate_state(obj);
0125 if (!obj_state)
0126 return ERR_PTR(-ENOMEM);
0127
0128 obj_state->obj = obj;
0129 obj_state->changed = false;
0130
0131 kref_init(&obj_state->ref);
0132
0133 state->global_objs[index].state = obj_state;
0134 state->global_objs[index].old_state =
0135 intel_atomic_global_state_get(obj->state);
0136 state->global_objs[index].new_state = obj_state;
0137 state->global_objs[index].ptr = obj;
0138 obj_state->state = state;
0139
0140 state->num_global_objs = num_objs;
0141
0142 drm_dbg_atomic(&i915->drm, "Added new global object %p state %p to %p\n",
0143 obj, obj_state, state);
0144
0145 return obj_state;
0146 }
0147
0148 struct intel_global_state *
0149 intel_atomic_get_old_global_obj_state(struct intel_atomic_state *state,
0150 struct intel_global_obj *obj)
0151 {
0152 int i;
0153
0154 for (i = 0; i < state->num_global_objs; i++)
0155 if (obj == state->global_objs[i].ptr)
0156 return state->global_objs[i].old_state;
0157
0158 return NULL;
0159 }
0160
0161 struct intel_global_state *
0162 intel_atomic_get_new_global_obj_state(struct intel_atomic_state *state,
0163 struct intel_global_obj *obj)
0164 {
0165 int i;
0166
0167 for (i = 0; i < state->num_global_objs; i++)
0168 if (obj == state->global_objs[i].ptr)
0169 return state->global_objs[i].new_state;
0170
0171 return NULL;
0172 }
0173
0174 void intel_atomic_swap_global_state(struct intel_atomic_state *state)
0175 {
0176 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
0177 struct intel_global_state *old_obj_state, *new_obj_state;
0178 struct intel_global_obj *obj;
0179 int i;
0180
0181 for_each_oldnew_global_obj_in_state(state, obj, old_obj_state,
0182 new_obj_state, i) {
0183 drm_WARN_ON(&dev_priv->drm, obj->state != old_obj_state);
0184
0185
0186
0187
0188
0189 if (!new_obj_state->changed)
0190 continue;
0191
0192 assert_global_state_write_locked(dev_priv);
0193
0194 old_obj_state->state = state;
0195 new_obj_state->state = NULL;
0196
0197 state->global_objs[i].state = old_obj_state;
0198
0199 intel_atomic_global_state_put(obj->state);
0200 obj->state = intel_atomic_global_state_get(new_obj_state);
0201 }
0202 }
0203
0204 void intel_atomic_clear_global_state(struct intel_atomic_state *state)
0205 {
0206 int i;
0207
0208 for (i = 0; i < state->num_global_objs; i++) {
0209 intel_atomic_global_state_put(state->global_objs[i].old_state);
0210 intel_atomic_global_state_put(state->global_objs[i].new_state);
0211
0212 state->global_objs[i].ptr = NULL;
0213 state->global_objs[i].state = NULL;
0214 state->global_objs[i].old_state = NULL;
0215 state->global_objs[i].new_state = NULL;
0216 }
0217 state->num_global_objs = 0;
0218 }
0219
0220 int intel_atomic_lock_global_state(struct intel_global_state *obj_state)
0221 {
0222 struct intel_atomic_state *state = obj_state->state;
0223 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
0224 struct intel_crtc *crtc;
0225
0226 for_each_intel_crtc(&dev_priv->drm, crtc) {
0227 int ret;
0228
0229 ret = drm_modeset_lock(&crtc->base.mutex,
0230 state->base.acquire_ctx);
0231 if (ret)
0232 return ret;
0233 }
0234
0235 obj_state->changed = true;
0236
0237 return 0;
0238 }
0239
0240 int intel_atomic_serialize_global_state(struct intel_global_state *obj_state)
0241 {
0242 struct intel_atomic_state *state = obj_state->state;
0243 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
0244 struct intel_crtc *crtc;
0245
0246 for_each_intel_crtc(&dev_priv->drm, crtc) {
0247 struct intel_crtc_state *crtc_state;
0248
0249 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
0250 if (IS_ERR(crtc_state))
0251 return PTR_ERR(crtc_state);
0252 }
0253
0254 obj_state->changed = true;
0255
0256 return 0;
0257 }