0001
0002 #ifndef __NVKM_BUS_HWSQ_H__
0003 #define __NVKM_BUS_HWSQ_H__
0004 #include <subdev/bus.h>
0005
0006 struct hwsq {
0007 struct nvkm_subdev *subdev;
0008 struct nvkm_hwsq *hwsq;
0009 int sequence;
0010 };
0011
0012 struct hwsq_reg {
0013 int sequence;
0014 bool force;
0015 u32 addr;
0016 u32 stride;
0017 u32 mask;
0018 u32 data;
0019 };
0020
0021 static inline struct hwsq_reg
0022 hwsq_stride(u32 addr, u32 stride, u32 mask)
0023 {
0024 return (struct hwsq_reg) {
0025 .sequence = 0,
0026 .force = 0,
0027 .addr = addr,
0028 .stride = stride,
0029 .mask = mask,
0030 .data = 0xdeadbeef,
0031 };
0032 }
0033
0034 static inline struct hwsq_reg
0035 hwsq_reg2(u32 addr1, u32 addr2)
0036 {
0037 return (struct hwsq_reg) {
0038 .sequence = 0,
0039 .force = 0,
0040 .addr = addr1,
0041 .stride = addr2 - addr1,
0042 .mask = 0x3,
0043 .data = 0xdeadbeef,
0044 };
0045 }
0046
0047 static inline struct hwsq_reg
0048 hwsq_reg(u32 addr)
0049 {
0050 return (struct hwsq_reg) {
0051 .sequence = 0,
0052 .force = 0,
0053 .addr = addr,
0054 .stride = 0,
0055 .mask = 0x1,
0056 .data = 0xdeadbeef,
0057 };
0058 }
0059
0060 static inline int
0061 hwsq_init(struct hwsq *ram, struct nvkm_subdev *subdev)
0062 {
0063 int ret;
0064
0065 ret = nvkm_hwsq_init(subdev, &ram->hwsq);
0066 if (ret)
0067 return ret;
0068
0069 ram->sequence++;
0070 ram->subdev = subdev;
0071 return 0;
0072 }
0073
0074 static inline int
0075 hwsq_exec(struct hwsq *ram, bool exec)
0076 {
0077 int ret = 0;
0078 if (ram->subdev) {
0079 ret = nvkm_hwsq_fini(&ram->hwsq, exec);
0080 ram->subdev = NULL;
0081 }
0082 return ret;
0083 }
0084
0085 static inline u32
0086 hwsq_rd32(struct hwsq *ram, struct hwsq_reg *reg)
0087 {
0088 struct nvkm_device *device = ram->subdev->device;
0089 if (reg->sequence != ram->sequence)
0090 reg->data = nvkm_rd32(device, reg->addr);
0091 return reg->data;
0092 }
0093
0094 static inline void
0095 hwsq_wr32(struct hwsq *ram, struct hwsq_reg *reg, u32 data)
0096 {
0097 u32 mask, off = 0;
0098
0099 reg->sequence = ram->sequence;
0100 reg->data = data;
0101
0102 for (mask = reg->mask; mask > 0; mask = (mask & ~1) >> 1) {
0103 if (mask & 1)
0104 nvkm_hwsq_wr32(ram->hwsq, reg->addr+off, reg->data);
0105
0106 off += reg->stride;
0107 }
0108 }
0109
0110 static inline void
0111 hwsq_nuke(struct hwsq *ram, struct hwsq_reg *reg)
0112 {
0113 reg->force = true;
0114 }
0115
0116 static inline u32
0117 hwsq_mask(struct hwsq *ram, struct hwsq_reg *reg, u32 mask, u32 data)
0118 {
0119 u32 temp = hwsq_rd32(ram, reg);
0120 if (temp != ((temp & ~mask) | data) || reg->force)
0121 hwsq_wr32(ram, reg, (temp & ~mask) | data);
0122 return temp;
0123 }
0124
0125 static inline void
0126 hwsq_setf(struct hwsq *ram, u8 flag, int data)
0127 {
0128 nvkm_hwsq_setf(ram->hwsq, flag, data);
0129 }
0130
0131 static inline void
0132 hwsq_wait(struct hwsq *ram, u8 flag, u8 data)
0133 {
0134 nvkm_hwsq_wait(ram->hwsq, flag, data);
0135 }
0136
0137 static inline void
0138 hwsq_wait_vblank(struct hwsq *ram)
0139 {
0140 nvkm_hwsq_wait_vblank(ram->hwsq);
0141 }
0142
0143 static inline void
0144 hwsq_nsec(struct hwsq *ram, u32 nsec)
0145 {
0146 nvkm_hwsq_nsec(ram->hwsq, nsec);
0147 }
0148 #endif