0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019 #include <linux/kernel.h>
0020 #include <linux/vmalloc.h>
0021 #include <media/v4l2-mem2mem.h>
0022 #include <media/v4l2-vp9.h>
0023
0024 #include "rkvdec.h"
0025 #include "rkvdec-regs.h"
0026
0027 #define RKVDEC_VP9_PROBE_SIZE 4864
0028 #define RKVDEC_VP9_COUNT_SIZE 13232
0029 #define RKVDEC_VP9_MAX_SEGMAP_SIZE 73728
0030
0031 struct rkvdec_vp9_intra_mode_probs {
0032 u8 y_mode[105];
0033 u8 uv_mode[23];
0034 };
0035
0036 struct rkvdec_vp9_intra_only_frame_probs {
0037 u8 coef_intra[4][2][128];
0038 struct rkvdec_vp9_intra_mode_probs intra_mode[10];
0039 };
0040
0041 struct rkvdec_vp9_inter_frame_probs {
0042 u8 y_mode[4][9];
0043 u8 comp_mode[5];
0044 u8 comp_ref[5];
0045 u8 single_ref[5][2];
0046 u8 inter_mode[7][3];
0047 u8 interp_filter[4][2];
0048 u8 padding0[11];
0049 u8 coef[2][4][2][128];
0050 u8 uv_mode_0_2[3][9];
0051 u8 padding1[5];
0052 u8 uv_mode_3_5[3][9];
0053 u8 padding2[5];
0054 u8 uv_mode_6_8[3][9];
0055 u8 padding3[5];
0056 u8 uv_mode_9[9];
0057 u8 padding4[7];
0058 u8 padding5[16];
0059 struct {
0060 u8 joint[3];
0061 u8 sign[2];
0062 u8 classes[2][10];
0063 u8 class0_bit[2];
0064 u8 bits[2][10];
0065 u8 class0_fr[2][2][3];
0066 u8 fr[2][3];
0067 u8 class0_hp[2];
0068 u8 hp[2];
0069 } mv;
0070 };
0071
0072 struct rkvdec_vp9_probs {
0073 u8 partition[16][3];
0074 u8 pred[3];
0075 u8 tree[7];
0076 u8 skip[3];
0077 u8 tx32[2][3];
0078 u8 tx16[2][2];
0079 u8 tx8[2][1];
0080 u8 is_inter[4];
0081
0082 u8 padding0[3];
0083 union {
0084 struct rkvdec_vp9_inter_frame_probs inter;
0085 struct rkvdec_vp9_intra_only_frame_probs intra_only;
0086 };
0087 };
0088
0089
0090 struct rkvdec_vp9_priv_tbl {
0091 struct rkvdec_vp9_probs probs;
0092 u8 segmap[2][RKVDEC_VP9_MAX_SEGMAP_SIZE];
0093 };
0094
0095 struct rkvdec_vp9_refs_counts {
0096 u32 eob[2];
0097 u32 coeff[3];
0098 };
0099
0100 struct rkvdec_vp9_inter_frame_symbol_counts {
0101 u32 partition[16][4];
0102 u32 skip[3][2];
0103 u32 inter[4][2];
0104 u32 tx32p[2][4];
0105 u32 tx16p[2][4];
0106 u32 tx8p[2][2];
0107 u32 y_mode[4][10];
0108 u32 uv_mode[10][10];
0109 u32 comp[5][2];
0110 u32 comp_ref[5][2];
0111 u32 single_ref[5][2][2];
0112 u32 mv_mode[7][4];
0113 u32 filter[4][3];
0114 u32 mv_joint[4];
0115 u32 sign[2][2];
0116
0117 u32 classes[2][11 + 1];
0118 u32 class0[2][2];
0119 u32 bits[2][10][2];
0120 u32 class0_fp[2][2][4];
0121 u32 fp[2][4];
0122 u32 class0_hp[2][2];
0123 u32 hp[2][2];
0124 struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
0125 };
0126
0127 struct rkvdec_vp9_intra_frame_symbol_counts {
0128 u32 partition[4][4][4];
0129 u32 skip[3][2];
0130 u32 intra[4][2];
0131 u32 tx32p[2][4];
0132 u32 tx16p[2][4];
0133 u32 tx8p[2][2];
0134 struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
0135 };
0136
0137 struct rkvdec_vp9_run {
0138 struct rkvdec_run base;
0139 const struct v4l2_ctrl_vp9_frame *decode_params;
0140 };
0141
0142 struct rkvdec_vp9_frame_info {
0143 u32 valid : 1;
0144 u32 segmapid : 1;
0145 u32 frame_context_idx : 2;
0146 u32 reference_mode : 2;
0147 u32 tx_mode : 3;
0148 u32 interpolation_filter : 3;
0149 u32 flags;
0150 u64 timestamp;
0151 struct v4l2_vp9_segmentation seg;
0152 struct v4l2_vp9_loop_filter lf;
0153 };
0154
0155 struct rkvdec_vp9_ctx {
0156 struct rkvdec_aux_buf priv_tbl;
0157 struct rkvdec_aux_buf count_tbl;
0158 struct v4l2_vp9_frame_symbol_counts inter_cnts;
0159 struct v4l2_vp9_frame_symbol_counts intra_cnts;
0160 struct v4l2_vp9_frame_context probability_tables;
0161 struct v4l2_vp9_frame_context frame_context[4];
0162 struct rkvdec_vp9_frame_info cur;
0163 struct rkvdec_vp9_frame_info last;
0164 };
0165
0166 static void write_coeff_plane(const u8 coef[6][6][3], u8 *coeff_plane)
0167 {
0168 unsigned int idx = 0, byte_count = 0;
0169 int k, m, n;
0170 u8 p;
0171
0172 for (k = 0; k < 6; k++) {
0173 for (m = 0; m < 6; m++) {
0174 for (n = 0; n < 3; n++) {
0175 p = coef[k][m][n];
0176 coeff_plane[idx++] = p;
0177 byte_count++;
0178 if (byte_count == 27) {
0179 idx += 5;
0180 byte_count = 0;
0181 }
0182 }
0183 }
0184 }
0185 }
0186
0187 static void init_intra_only_probs(struct rkvdec_ctx *ctx,
0188 const struct rkvdec_vp9_run *run)
0189 {
0190 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
0191 struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
0192 struct rkvdec_vp9_intra_only_frame_probs *rkprobs;
0193 const struct v4l2_vp9_frame_context *probs;
0194 unsigned int i, j, k;
0195
0196 rkprobs = &tbl->probs.intra_only;
0197 probs = &vp9_ctx->probability_tables;
0198
0199
0200
0201
0202
0203 for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
0204 for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++)
0205 write_coeff_plane(probs->coef[i][j][0],
0206 rkprobs->coef_intra[i][j]);
0207 }
0208
0209
0210 for (i = 0; i < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob); i++) {
0211 unsigned int byte_count = 0;
0212 int idx = 0;
0213
0214
0215 for (j = 0; j < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0]); j++) {
0216 for (k = 0; k < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0][0]);
0217 k++) {
0218 u8 val = v4l2_vp9_kf_y_mode_prob[i][j][k];
0219
0220 rkprobs->intra_mode[i].y_mode[idx++] = val;
0221 byte_count++;
0222 if (byte_count == 27) {
0223 byte_count = 0;
0224 idx += 5;
0225 }
0226 }
0227 }
0228
0229 }
0230
0231 for (i = 0; i < sizeof(v4l2_vp9_kf_uv_mode_prob); ++i) {
0232 const u8 *ptr = (const u8 *)v4l2_vp9_kf_uv_mode_prob;
0233
0234 rkprobs->intra_mode[i / 23].uv_mode[i % 23] = ptr[i];
0235 }
0236 }
0237
0238 static void init_inter_probs(struct rkvdec_ctx *ctx,
0239 const struct rkvdec_vp9_run *run)
0240 {
0241 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
0242 struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
0243 struct rkvdec_vp9_inter_frame_probs *rkprobs;
0244 const struct v4l2_vp9_frame_context *probs;
0245 unsigned int i, j, k;
0246
0247 rkprobs = &tbl->probs.inter;
0248 probs = &vp9_ctx->probability_tables;
0249
0250
0251
0252
0253
0254
0255
0256
0257 memcpy(rkprobs->y_mode, probs->y_mode, sizeof(rkprobs->y_mode));
0258 memcpy(rkprobs->comp_mode, probs->comp_mode,
0259 sizeof(rkprobs->comp_mode));
0260 memcpy(rkprobs->comp_ref, probs->comp_ref,
0261 sizeof(rkprobs->comp_ref));
0262 memcpy(rkprobs->single_ref, probs->single_ref,
0263 sizeof(rkprobs->single_ref));
0264 memcpy(rkprobs->inter_mode, probs->inter_mode,
0265 sizeof(rkprobs->inter_mode));
0266 memcpy(rkprobs->interp_filter, probs->interp_filter,
0267 sizeof(rkprobs->interp_filter));
0268
0269
0270 for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
0271 for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++) {
0272 for (k = 0; k < ARRAY_SIZE(probs->coef[0][0]); k++)
0273 write_coeff_plane(probs->coef[i][j][k],
0274 rkprobs->coef[k][i][j]);
0275 }
0276 }
0277
0278
0279 memcpy(rkprobs->uv_mode_0_2, &probs->uv_mode[0],
0280 sizeof(rkprobs->uv_mode_0_2));
0281 memcpy(rkprobs->uv_mode_3_5, &probs->uv_mode[3],
0282 sizeof(rkprobs->uv_mode_3_5));
0283 memcpy(rkprobs->uv_mode_6_8, &probs->uv_mode[6],
0284 sizeof(rkprobs->uv_mode_6_8));
0285 memcpy(rkprobs->uv_mode_9, &probs->uv_mode[9],
0286 sizeof(rkprobs->uv_mode_9));
0287
0288
0289 memcpy(rkprobs->mv.joint, probs->mv.joint,
0290 sizeof(rkprobs->mv.joint));
0291 memcpy(rkprobs->mv.sign, probs->mv.sign,
0292 sizeof(rkprobs->mv.sign));
0293 memcpy(rkprobs->mv.classes, probs->mv.classes,
0294 sizeof(rkprobs->mv.classes));
0295 memcpy(rkprobs->mv.class0_bit, probs->mv.class0_bit,
0296 sizeof(rkprobs->mv.class0_bit));
0297 memcpy(rkprobs->mv.bits, probs->mv.bits,
0298 sizeof(rkprobs->mv.bits));
0299 memcpy(rkprobs->mv.class0_fr, probs->mv.class0_fr,
0300 sizeof(rkprobs->mv.class0_fr));
0301 memcpy(rkprobs->mv.fr, probs->mv.fr,
0302 sizeof(rkprobs->mv.fr));
0303 memcpy(rkprobs->mv.class0_hp, probs->mv.class0_hp,
0304 sizeof(rkprobs->mv.class0_hp));
0305 memcpy(rkprobs->mv.hp, probs->mv.hp,
0306 sizeof(rkprobs->mv.hp));
0307 }
0308
0309 static void init_probs(struct rkvdec_ctx *ctx,
0310 const struct rkvdec_vp9_run *run)
0311 {
0312 const struct v4l2_ctrl_vp9_frame *dec_params;
0313 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
0314 struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
0315 struct rkvdec_vp9_probs *rkprobs = &tbl->probs;
0316 const struct v4l2_vp9_segmentation *seg;
0317 const struct v4l2_vp9_frame_context *probs;
0318 bool intra_only;
0319
0320 dec_params = run->decode_params;
0321 probs = &vp9_ctx->probability_tables;
0322 seg = &dec_params->seg;
0323
0324 memset(rkprobs, 0, sizeof(*rkprobs));
0325
0326 intra_only = !!(dec_params->flags &
0327 (V4L2_VP9_FRAME_FLAG_KEY_FRAME |
0328 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
0329
0330
0331 memcpy(rkprobs->partition,
0332 intra_only ? v4l2_vp9_kf_partition_probs : probs->partition,
0333 sizeof(rkprobs->partition));
0334
0335 memcpy(rkprobs->pred, seg->pred_probs, sizeof(rkprobs->pred));
0336 memcpy(rkprobs->tree, seg->tree_probs, sizeof(rkprobs->tree));
0337 memcpy(rkprobs->skip, probs->skip, sizeof(rkprobs->skip));
0338 memcpy(rkprobs->tx32, probs->tx32, sizeof(rkprobs->tx32));
0339 memcpy(rkprobs->tx16, probs->tx16, sizeof(rkprobs->tx16));
0340 memcpy(rkprobs->tx8, probs->tx8, sizeof(rkprobs->tx8));
0341 memcpy(rkprobs->is_inter, probs->is_inter, sizeof(rkprobs->is_inter));
0342
0343 if (intra_only)
0344 init_intra_only_probs(ctx, run);
0345 else
0346 init_inter_probs(ctx, run);
0347 }
0348
0349 struct rkvdec_vp9_ref_reg {
0350 u32 reg_frm_size;
0351 u32 reg_hor_stride;
0352 u32 reg_y_stride;
0353 u32 reg_yuv_stride;
0354 u32 reg_ref_base;
0355 };
0356
0357 static struct rkvdec_vp9_ref_reg ref_regs[] = {
0358 {
0359 .reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(0),
0360 .reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(0),
0361 .reg_y_stride = RKVDEC_VP9_LAST_FRAME_YSTRIDE,
0362 .reg_yuv_stride = RKVDEC_VP9_LAST_FRAME_YUVSTRIDE,
0363 .reg_ref_base = RKVDEC_REG_VP9_LAST_FRAME_BASE,
0364 },
0365 {
0366 .reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(1),
0367 .reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(1),
0368 .reg_y_stride = RKVDEC_VP9_GOLDEN_FRAME_YSTRIDE,
0369 .reg_yuv_stride = 0,
0370 .reg_ref_base = RKVDEC_REG_VP9_GOLDEN_FRAME_BASE,
0371 },
0372 {
0373 .reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(2),
0374 .reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(2),
0375 .reg_y_stride = RKVDEC_VP9_ALTREF_FRAME_YSTRIDE,
0376 .reg_yuv_stride = 0,
0377 .reg_ref_base = RKVDEC_REG_VP9_ALTREF_FRAME_BASE,
0378 }
0379 };
0380
0381 static struct rkvdec_decoded_buffer *
0382 get_ref_buf(struct rkvdec_ctx *ctx, struct vb2_v4l2_buffer *dst, u64 timestamp)
0383 {
0384 struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
0385 struct vb2_queue *cap_q = &m2m_ctx->cap_q_ctx.q;
0386 struct vb2_buffer *buf;
0387
0388
0389
0390
0391
0392 buf = vb2_find_buffer(cap_q, timestamp);
0393 if (!buf)
0394 buf = &dst->vb2_buf;
0395
0396 return vb2_to_rkvdec_decoded_buf(buf);
0397 }
0398
0399 static dma_addr_t get_mv_base_addr(struct rkvdec_decoded_buffer *buf)
0400 {
0401 unsigned int aligned_pitch, aligned_height, yuv_len;
0402
0403 aligned_height = round_up(buf->vp9.height, 64);
0404 aligned_pitch = round_up(buf->vp9.width * buf->vp9.bit_depth, 512) / 8;
0405 yuv_len = (aligned_height * aligned_pitch * 3) / 2;
0406
0407 return vb2_dma_contig_plane_dma_addr(&buf->base.vb.vb2_buf, 0) +
0408 yuv_len;
0409 }
0410
0411 static void config_ref_registers(struct rkvdec_ctx *ctx,
0412 const struct rkvdec_vp9_run *run,
0413 struct rkvdec_decoded_buffer *ref_buf,
0414 struct rkvdec_vp9_ref_reg *ref_reg)
0415 {
0416 unsigned int aligned_pitch, aligned_height, y_len, yuv_len;
0417 struct rkvdec_dev *rkvdec = ctx->dev;
0418
0419 aligned_height = round_up(ref_buf->vp9.height, 64);
0420 writel_relaxed(RKVDEC_VP9_FRAMEWIDTH(ref_buf->vp9.width) |
0421 RKVDEC_VP9_FRAMEHEIGHT(ref_buf->vp9.height),
0422 rkvdec->regs + ref_reg->reg_frm_size);
0423
0424 writel_relaxed(vb2_dma_contig_plane_dma_addr(&ref_buf->base.vb.vb2_buf, 0),
0425 rkvdec->regs + ref_reg->reg_ref_base);
0426
0427 if (&ref_buf->base.vb == run->base.bufs.dst)
0428 return;
0429
0430 aligned_pitch = round_up(ref_buf->vp9.width * ref_buf->vp9.bit_depth, 512) / 8;
0431 y_len = aligned_height * aligned_pitch;
0432 yuv_len = (y_len * 3) / 2;
0433
0434 writel_relaxed(RKVDEC_HOR_Y_VIRSTRIDE(aligned_pitch / 16) |
0435 RKVDEC_HOR_UV_VIRSTRIDE(aligned_pitch / 16),
0436 rkvdec->regs + ref_reg->reg_hor_stride);
0437 writel_relaxed(RKVDEC_VP9_REF_YSTRIDE(y_len / 16),
0438 rkvdec->regs + ref_reg->reg_y_stride);
0439
0440 if (!ref_reg->reg_yuv_stride)
0441 return;
0442
0443 writel_relaxed(RKVDEC_VP9_REF_YUVSTRIDE(yuv_len / 16),
0444 rkvdec->regs + ref_reg->reg_yuv_stride);
0445 }
0446
0447 static void config_seg_registers(struct rkvdec_ctx *ctx, unsigned int segid)
0448 {
0449 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
0450 const struct v4l2_vp9_segmentation *seg;
0451 struct rkvdec_dev *rkvdec = ctx->dev;
0452 s16 feature_val;
0453 int feature_id;
0454 u32 val = 0;
0455
0456 seg = vp9_ctx->last.valid ? &vp9_ctx->last.seg : &vp9_ctx->cur.seg;
0457 feature_id = V4L2_VP9_SEG_LVL_ALT_Q;
0458 if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
0459 feature_val = seg->feature_data[segid][feature_id];
0460 val |= RKVDEC_SEGID_FRAME_QP_DELTA_EN(1) |
0461 RKVDEC_SEGID_FRAME_QP_DELTA(feature_val);
0462 }
0463
0464 feature_id = V4L2_VP9_SEG_LVL_ALT_L;
0465 if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
0466 feature_val = seg->feature_data[segid][feature_id];
0467 val |= RKVDEC_SEGID_FRAME_LOOPFILTER_VALUE_EN(1) |
0468 RKVDEC_SEGID_FRAME_LOOPFILTER_VALUE(feature_val);
0469 }
0470
0471 feature_id = V4L2_VP9_SEG_LVL_REF_FRAME;
0472 if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
0473 feature_val = seg->feature_data[segid][feature_id];
0474 val |= RKVDEC_SEGID_REFERINFO_EN(1) |
0475 RKVDEC_SEGID_REFERINFO(feature_val);
0476 }
0477
0478 feature_id = V4L2_VP9_SEG_LVL_SKIP;
0479 if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid))
0480 val |= RKVDEC_SEGID_FRAME_SKIP_EN(1);
0481
0482 if (!segid &&
0483 (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE))
0484 val |= RKVDEC_SEGID_ABS_DELTA(1);
0485
0486 writel_relaxed(val, rkvdec->regs + RKVDEC_VP9_SEGID_GRP(segid));
0487 }
0488
0489 static void update_dec_buf_info(struct rkvdec_decoded_buffer *buf,
0490 const struct v4l2_ctrl_vp9_frame *dec_params)
0491 {
0492 buf->vp9.width = dec_params->frame_width_minus_1 + 1;
0493 buf->vp9.height = dec_params->frame_height_minus_1 + 1;
0494 buf->vp9.bit_depth = dec_params->bit_depth;
0495 }
0496
0497 static void update_ctx_cur_info(struct rkvdec_vp9_ctx *vp9_ctx,
0498 struct rkvdec_decoded_buffer *buf,
0499 const struct v4l2_ctrl_vp9_frame *dec_params)
0500 {
0501 vp9_ctx->cur.valid = true;
0502 vp9_ctx->cur.reference_mode = dec_params->reference_mode;
0503 vp9_ctx->cur.interpolation_filter = dec_params->interpolation_filter;
0504 vp9_ctx->cur.flags = dec_params->flags;
0505 vp9_ctx->cur.timestamp = buf->base.vb.vb2_buf.timestamp;
0506 vp9_ctx->cur.seg = dec_params->seg;
0507 vp9_ctx->cur.lf = dec_params->lf;
0508 }
0509
0510 static void update_ctx_last_info(struct rkvdec_vp9_ctx *vp9_ctx)
0511 {
0512 vp9_ctx->last = vp9_ctx->cur;
0513 }
0514
0515 static void config_registers(struct rkvdec_ctx *ctx,
0516 const struct rkvdec_vp9_run *run)
0517 {
0518 unsigned int y_len, uv_len, yuv_len, bit_depth, aligned_height, aligned_pitch, stream_len;
0519 const struct v4l2_ctrl_vp9_frame *dec_params;
0520 struct rkvdec_decoded_buffer *ref_bufs[3];
0521 struct rkvdec_decoded_buffer *dst, *last, *mv_ref;
0522 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
0523 u32 val, last_frame_info = 0;
0524 const struct v4l2_vp9_segmentation *seg;
0525 struct rkvdec_dev *rkvdec = ctx->dev;
0526 dma_addr_t addr;
0527 bool intra_only;
0528 unsigned int i;
0529
0530 dec_params = run->decode_params;
0531 dst = vb2_to_rkvdec_decoded_buf(&run->base.bufs.dst->vb2_buf);
0532 ref_bufs[0] = get_ref_buf(ctx, &dst->base.vb, dec_params->last_frame_ts);
0533 ref_bufs[1] = get_ref_buf(ctx, &dst->base.vb, dec_params->golden_frame_ts);
0534 ref_bufs[2] = get_ref_buf(ctx, &dst->base.vb, dec_params->alt_frame_ts);
0535
0536 if (vp9_ctx->last.valid)
0537 last = get_ref_buf(ctx, &dst->base.vb, vp9_ctx->last.timestamp);
0538 else
0539 last = dst;
0540
0541 update_dec_buf_info(dst, dec_params);
0542 update_ctx_cur_info(vp9_ctx, dst, dec_params);
0543 seg = &dec_params->seg;
0544
0545 intra_only = !!(dec_params->flags &
0546 (V4L2_VP9_FRAME_FLAG_KEY_FRAME |
0547 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
0548
0549 writel_relaxed(RKVDEC_MODE(RKVDEC_MODE_VP9),
0550 rkvdec->regs + RKVDEC_REG_SYSCTRL);
0551
0552 bit_depth = dec_params->bit_depth;
0553 aligned_height = round_up(ctx->decoded_fmt.fmt.pix_mp.height, 64);
0554
0555 aligned_pitch = round_up(ctx->decoded_fmt.fmt.pix_mp.width *
0556 bit_depth,
0557 512) / 8;
0558 y_len = aligned_height * aligned_pitch;
0559 uv_len = y_len / 2;
0560 yuv_len = y_len + uv_len;
0561
0562 writel_relaxed(RKVDEC_Y_HOR_VIRSTRIDE(aligned_pitch / 16) |
0563 RKVDEC_UV_HOR_VIRSTRIDE(aligned_pitch / 16),
0564 rkvdec->regs + RKVDEC_REG_PICPAR);
0565 writel_relaxed(RKVDEC_Y_VIRSTRIDE(y_len / 16),
0566 rkvdec->regs + RKVDEC_REG_Y_VIRSTRIDE);
0567 writel_relaxed(RKVDEC_YUV_VIRSTRIDE(yuv_len / 16),
0568 rkvdec->regs + RKVDEC_REG_YUV_VIRSTRIDE);
0569
0570 stream_len = vb2_get_plane_payload(&run->base.bufs.src->vb2_buf, 0);
0571 writel_relaxed(RKVDEC_STRM_LEN(stream_len),
0572 rkvdec->regs + RKVDEC_REG_STRM_LEN);
0573
0574
0575
0576
0577
0578
0579 if (intra_only)
0580 memset(vp9_ctx->count_tbl.cpu, 0, vp9_ctx->count_tbl.size);
0581
0582 vp9_ctx->cur.segmapid = vp9_ctx->last.segmapid;
0583 if (!intra_only &&
0584 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
0585 (!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED) ||
0586 (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP)))
0587 vp9_ctx->cur.segmapid++;
0588
0589 for (i = 0; i < ARRAY_SIZE(ref_bufs); i++)
0590 config_ref_registers(ctx, run, ref_bufs[i], &ref_regs[i]);
0591
0592 for (i = 0; i < 8; i++)
0593 config_seg_registers(ctx, i);
0594
0595 writel_relaxed(RKVDEC_VP9_TX_MODE(vp9_ctx->cur.tx_mode) |
0596 RKVDEC_VP9_FRAME_REF_MODE(dec_params->reference_mode),
0597 rkvdec->regs + RKVDEC_VP9_CPRHEADER_CONFIG);
0598
0599 if (!intra_only) {
0600 const struct v4l2_vp9_loop_filter *lf;
0601 s8 delta;
0602
0603 if (vp9_ctx->last.valid)
0604 lf = &vp9_ctx->last.lf;
0605 else
0606 lf = &vp9_ctx->cur.lf;
0607
0608 val = 0;
0609 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) {
0610 delta = lf->ref_deltas[i];
0611 val |= RKVDEC_REF_DELTAS_LASTFRAME(i, delta);
0612 }
0613
0614 writel_relaxed(val,
0615 rkvdec->regs + RKVDEC_VP9_REF_DELTAS_LASTFRAME);
0616
0617 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) {
0618 delta = lf->mode_deltas[i];
0619 last_frame_info |= RKVDEC_MODE_DELTAS_LASTFRAME(i,
0620 delta);
0621 }
0622 }
0623
0624 if (vp9_ctx->last.valid && !intra_only &&
0625 vp9_ctx->last.seg.flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED)
0626 last_frame_info |= RKVDEC_SEG_EN_LASTFRAME;
0627
0628 if (vp9_ctx->last.valid &&
0629 vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_SHOW_FRAME)
0630 last_frame_info |= RKVDEC_LAST_SHOW_FRAME;
0631
0632 if (vp9_ctx->last.valid &&
0633 vp9_ctx->last.flags &
0634 (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY))
0635 last_frame_info |= RKVDEC_LAST_INTRA_ONLY;
0636
0637 if (vp9_ctx->last.valid &&
0638 last->vp9.width == dst->vp9.width &&
0639 last->vp9.height == dst->vp9.height)
0640 last_frame_info |= RKVDEC_LAST_WIDHHEIGHT_EQCUR;
0641
0642 writel_relaxed(last_frame_info,
0643 rkvdec->regs + RKVDEC_VP9_INFO_LASTFRAME);
0644
0645 writel_relaxed(stream_len - dec_params->compressed_header_size -
0646 dec_params->uncompressed_header_size,
0647 rkvdec->regs + RKVDEC_VP9_LASTTILE_SIZE);
0648
0649 for (i = 0; !intra_only && i < ARRAY_SIZE(ref_bufs); i++) {
0650 unsigned int refw = ref_bufs[i]->vp9.width;
0651 unsigned int refh = ref_bufs[i]->vp9.height;
0652 u32 hscale, vscale;
0653
0654 hscale = (refw << 14) / dst->vp9.width;
0655 vscale = (refh << 14) / dst->vp9.height;
0656 writel_relaxed(RKVDEC_VP9_REF_HOR_SCALE(hscale) |
0657 RKVDEC_VP9_REF_VER_SCALE(vscale),
0658 rkvdec->regs + RKVDEC_VP9_REF_SCALE(i));
0659 }
0660
0661 addr = vb2_dma_contig_plane_dma_addr(&dst->base.vb.vb2_buf, 0);
0662 writel_relaxed(addr, rkvdec->regs + RKVDEC_REG_DECOUT_BASE);
0663 addr = vb2_dma_contig_plane_dma_addr(&run->base.bufs.src->vb2_buf, 0);
0664 writel_relaxed(addr, rkvdec->regs + RKVDEC_REG_STRM_RLC_BASE);
0665 writel_relaxed(vp9_ctx->priv_tbl.dma +
0666 offsetof(struct rkvdec_vp9_priv_tbl, probs),
0667 rkvdec->regs + RKVDEC_REG_CABACTBL_PROB_BASE);
0668 writel_relaxed(vp9_ctx->count_tbl.dma,
0669 rkvdec->regs + RKVDEC_REG_VP9COUNT_BASE);
0670
0671 writel_relaxed(vp9_ctx->priv_tbl.dma +
0672 offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
0673 (RKVDEC_VP9_MAX_SEGMAP_SIZE * vp9_ctx->cur.segmapid),
0674 rkvdec->regs + RKVDEC_REG_VP9_SEGIDCUR_BASE);
0675 writel_relaxed(vp9_ctx->priv_tbl.dma +
0676 offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
0677 (RKVDEC_VP9_MAX_SEGMAP_SIZE * (!vp9_ctx->cur.segmapid)),
0678 rkvdec->regs + RKVDEC_REG_VP9_SEGIDLAST_BASE);
0679
0680 if (!intra_only &&
0681 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
0682 vp9_ctx->last.valid)
0683 mv_ref = last;
0684 else
0685 mv_ref = dst;
0686
0687 writel_relaxed(get_mv_base_addr(mv_ref),
0688 rkvdec->regs + RKVDEC_VP9_REF_COLMV_BASE);
0689
0690 writel_relaxed(ctx->decoded_fmt.fmt.pix_mp.width |
0691 (ctx->decoded_fmt.fmt.pix_mp.height << 16),
0692 rkvdec->regs + RKVDEC_REG_PERFORMANCE_CYCLE);
0693 }
0694
0695 static int validate_dec_params(struct rkvdec_ctx *ctx,
0696 const struct v4l2_ctrl_vp9_frame *dec_params)
0697 {
0698 unsigned int aligned_width, aligned_height;
0699
0700
0701 if (dec_params->profile != 0) {
0702 dev_err(ctx->dev->dev, "unsupported profile %d\n",
0703 dec_params->profile);
0704 return -EINVAL;
0705 }
0706
0707 aligned_width = round_up(dec_params->frame_width_minus_1 + 1, 64);
0708 aligned_height = round_up(dec_params->frame_height_minus_1 + 1, 64);
0709
0710
0711
0712
0713
0714 if (aligned_width != ctx->decoded_fmt.fmt.pix_mp.width ||
0715 aligned_height != ctx->decoded_fmt.fmt.pix_mp.height) {
0716 dev_err(ctx->dev->dev,
0717 "unexpected bitstream resolution %dx%d\n",
0718 dec_params->frame_width_minus_1 + 1,
0719 dec_params->frame_height_minus_1 + 1);
0720 return -EINVAL;
0721 }
0722
0723 return 0;
0724 }
0725
0726 static int rkvdec_vp9_run_preamble(struct rkvdec_ctx *ctx,
0727 struct rkvdec_vp9_run *run)
0728 {
0729 const struct v4l2_ctrl_vp9_frame *dec_params;
0730 const struct v4l2_ctrl_vp9_compressed_hdr *prob_updates;
0731 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
0732 struct v4l2_ctrl *ctrl;
0733 unsigned int fctx_idx;
0734 int ret;
0735
0736
0737 rkvdec_run_preamble(ctx, &run->base);
0738
0739 ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl,
0740 V4L2_CID_STATELESS_VP9_FRAME);
0741 if (WARN_ON(!ctrl))
0742 return -EINVAL;
0743 dec_params = ctrl->p_cur.p;
0744
0745 ret = validate_dec_params(ctx, dec_params);
0746 if (ret)
0747 return ret;
0748
0749 run->decode_params = dec_params;
0750
0751 ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR);
0752 if (WARN_ON(!ctrl))
0753 return -EINVAL;
0754 prob_updates = ctrl->p_cur.p;
0755 vp9_ctx->cur.tx_mode = prob_updates->tx_mode;
0756
0757
0758
0759
0760
0761
0762
0763
0764
0765
0766
0767
0768
0769
0770
0771
0772
0773
0774
0775 fctx_idx = v4l2_vp9_reset_frame_ctx(dec_params, vp9_ctx->frame_context);
0776 vp9_ctx->cur.frame_context_idx = fctx_idx;
0777
0778
0779 vp9_ctx->probability_tables = vp9_ctx->frame_context[fctx_idx];
0780
0781
0782
0783
0784
0785
0786
0787
0788
0789
0790
0791
0792
0793
0794
0795 v4l2_vp9_fw_update_probs(&vp9_ctx->probability_tables, prob_updates, dec_params);
0796
0797 return 0;
0798 }
0799
0800 static int rkvdec_vp9_run(struct rkvdec_ctx *ctx)
0801 {
0802 struct rkvdec_dev *rkvdec = ctx->dev;
0803 struct rkvdec_vp9_run run = { };
0804 int ret;
0805
0806 ret = rkvdec_vp9_run_preamble(ctx, &run);
0807 if (ret) {
0808 rkvdec_run_postamble(ctx, &run.base);
0809 return ret;
0810 }
0811
0812
0813 init_probs(ctx, &run);
0814
0815
0816 config_registers(ctx, &run);
0817
0818 rkvdec_run_postamble(ctx, &run.base);
0819
0820 schedule_delayed_work(&rkvdec->watchdog_work, msecs_to_jiffies(2000));
0821
0822 writel(1, rkvdec->regs + RKVDEC_REG_PREF_LUMA_CACHE_COMMAND);
0823 writel(1, rkvdec->regs + RKVDEC_REG_PREF_CHR_CACHE_COMMAND);
0824
0825 writel(0xe, rkvdec->regs + RKVDEC_REG_STRMD_ERR_EN);
0826
0827 writel(RKVDEC_INTERRUPT_DEC_E | RKVDEC_CONFIG_DEC_CLK_GATE_E |
0828 RKVDEC_TIMEOUT_E | RKVDEC_BUF_EMPTY_E,
0829 rkvdec->regs + RKVDEC_REG_INTERRUPT);
0830
0831 return 0;
0832 }
0833
0834 #define copy_tx_and_skip(p1, p2) \
0835 do { \
0836 memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8)); \
0837 memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16)); \
0838 memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32)); \
0839 memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip)); \
0840 } while (0)
0841
0842 static void rkvdec_vp9_done(struct rkvdec_ctx *ctx,
0843 struct vb2_v4l2_buffer *src_buf,
0844 struct vb2_v4l2_buffer *dst_buf,
0845 enum vb2_buffer_state result)
0846 {
0847 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
0848 unsigned int fctx_idx;
0849
0850
0851 if (result == VB2_BUF_STATE_ERROR)
0852 goto out_update_last;
0853
0854
0855
0856
0857
0858
0859
0860
0861
0862
0863
0864
0865
0866 if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX))
0867 goto out_update_last;
0868
0869 fctx_idx = vp9_ctx->cur.frame_context_idx;
0870
0871 if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE)) {
0872
0873 struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
0874 bool frame_is_intra = vp9_ctx->cur.flags &
0875 (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
0876 struct tx_and_skip {
0877 u8 tx8[2][1];
0878 u8 tx16[2][2];
0879 u8 tx32[2][3];
0880 u8 skip[3];
0881 } _tx_skip, *tx_skip = &_tx_skip;
0882 struct v4l2_vp9_frame_symbol_counts *counts;
0883
0884
0885 if (frame_is_intra)
0886 copy_tx_and_skip(tx_skip, probs);
0887
0888
0889 *probs = vp9_ctx->frame_context[fctx_idx];
0890
0891
0892 if (frame_is_intra)
0893 copy_tx_and_skip(probs, tx_skip);
0894
0895 counts = frame_is_intra ? &vp9_ctx->intra_cnts : &vp9_ctx->inter_cnts;
0896 v4l2_vp9_adapt_coef_probs(probs, counts,
0897 !vp9_ctx->last.valid ||
0898 vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME,
0899 frame_is_intra);
0900 if (!frame_is_intra) {
0901 const struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts;
0902 u32 classes[2][11];
0903 int i;
0904
0905 inter_cnts = vp9_ctx->count_tbl.cpu;
0906 for (i = 0; i < ARRAY_SIZE(classes); ++i)
0907 memcpy(classes[i], inter_cnts->classes[i], sizeof(classes[0]));
0908 counts->classes = &classes;
0909
0910
0911 v4l2_vp9_adapt_noncoef_probs(&vp9_ctx->probability_tables, counts,
0912 vp9_ctx->cur.reference_mode,
0913 vp9_ctx->cur.interpolation_filter,
0914 vp9_ctx->cur.tx_mode, vp9_ctx->cur.flags);
0915 }
0916 }
0917
0918
0919 vp9_ctx->frame_context[fctx_idx] = vp9_ctx->probability_tables;
0920
0921 out_update_last:
0922 update_ctx_last_info(vp9_ctx);
0923 }
0924
0925 static void rkvdec_init_v4l2_vp9_count_tbl(struct rkvdec_ctx *ctx)
0926 {
0927 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
0928 struct rkvdec_vp9_intra_frame_symbol_counts *intra_cnts = vp9_ctx->count_tbl.cpu;
0929 struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts = vp9_ctx->count_tbl.cpu;
0930 int i, j, k, l, m;
0931
0932 vp9_ctx->inter_cnts.partition = &inter_cnts->partition;
0933 vp9_ctx->inter_cnts.skip = &inter_cnts->skip;
0934 vp9_ctx->inter_cnts.intra_inter = &inter_cnts->inter;
0935 vp9_ctx->inter_cnts.tx32p = &inter_cnts->tx32p;
0936 vp9_ctx->inter_cnts.tx16p = &inter_cnts->tx16p;
0937 vp9_ctx->inter_cnts.tx8p = &inter_cnts->tx8p;
0938
0939 vp9_ctx->intra_cnts.partition = (u32 (*)[16][4])(&intra_cnts->partition);
0940 vp9_ctx->intra_cnts.skip = &intra_cnts->skip;
0941 vp9_ctx->intra_cnts.intra_inter = &intra_cnts->intra;
0942 vp9_ctx->intra_cnts.tx32p = &intra_cnts->tx32p;
0943 vp9_ctx->intra_cnts.tx16p = &intra_cnts->tx16p;
0944 vp9_ctx->intra_cnts.tx8p = &intra_cnts->tx8p;
0945
0946 vp9_ctx->inter_cnts.y_mode = &inter_cnts->y_mode;
0947 vp9_ctx->inter_cnts.uv_mode = &inter_cnts->uv_mode;
0948 vp9_ctx->inter_cnts.comp = &inter_cnts->comp;
0949 vp9_ctx->inter_cnts.comp_ref = &inter_cnts->comp_ref;
0950 vp9_ctx->inter_cnts.single_ref = &inter_cnts->single_ref;
0951 vp9_ctx->inter_cnts.mv_mode = &inter_cnts->mv_mode;
0952 vp9_ctx->inter_cnts.filter = &inter_cnts->filter;
0953 vp9_ctx->inter_cnts.mv_joint = &inter_cnts->mv_joint;
0954 vp9_ctx->inter_cnts.sign = &inter_cnts->sign;
0955
0956
0957
0958
0959
0960
0961 vp9_ctx->inter_cnts.class0 = &inter_cnts->class0;
0962 vp9_ctx->inter_cnts.bits = &inter_cnts->bits;
0963 vp9_ctx->inter_cnts.class0_fp = &inter_cnts->class0_fp;
0964 vp9_ctx->inter_cnts.fp = &inter_cnts->fp;
0965 vp9_ctx->inter_cnts.class0_hp = &inter_cnts->class0_hp;
0966 vp9_ctx->inter_cnts.hp = &inter_cnts->hp;
0967
0968 #define INNERMOST_LOOP \
0969 do { \
0970 for (m = 0; m < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0][0]); ++m) {\
0971 vp9_ctx->inter_cnts.coeff[i][j][k][l][m] = \
0972 &inter_cnts->ref_cnt[k][i][j][l][m].coeff; \
0973 vp9_ctx->inter_cnts.eob[i][j][k][l][m][0] = \
0974 &inter_cnts->ref_cnt[k][i][j][l][m].eob[0]; \
0975 vp9_ctx->inter_cnts.eob[i][j][k][l][m][1] = \
0976 &inter_cnts->ref_cnt[k][i][j][l][m].eob[1]; \
0977 \
0978 vp9_ctx->intra_cnts.coeff[i][j][k][l][m] = \
0979 &intra_cnts->ref_cnt[k][i][j][l][m].coeff; \
0980 vp9_ctx->intra_cnts.eob[i][j][k][l][m][0] = \
0981 &intra_cnts->ref_cnt[k][i][j][l][m].eob[0]; \
0982 vp9_ctx->intra_cnts.eob[i][j][k][l][m][1] = \
0983 &intra_cnts->ref_cnt[k][i][j][l][m].eob[1]; \
0984 } \
0985 } while (0)
0986
0987 for (i = 0; i < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff); ++i)
0988 for (j = 0; j < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0]); ++j)
0989 for (k = 0; k < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0]); ++k)
0990 for (l = 0; l < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0]); ++l)
0991 INNERMOST_LOOP;
0992 #undef INNERMOST_LOOP
0993 }
0994
0995 static int rkvdec_vp9_start(struct rkvdec_ctx *ctx)
0996 {
0997 struct rkvdec_dev *rkvdec = ctx->dev;
0998 struct rkvdec_vp9_priv_tbl *priv_tbl;
0999 struct rkvdec_vp9_ctx *vp9_ctx;
1000 unsigned char *count_tbl;
1001 int ret;
1002
1003 vp9_ctx = kzalloc(sizeof(*vp9_ctx), GFP_KERNEL);
1004 if (!vp9_ctx)
1005 return -ENOMEM;
1006
1007 ctx->priv = vp9_ctx;
1008
1009 priv_tbl = dma_alloc_coherent(rkvdec->dev, sizeof(*priv_tbl),
1010 &vp9_ctx->priv_tbl.dma, GFP_KERNEL);
1011 if (!priv_tbl) {
1012 ret = -ENOMEM;
1013 goto err_free_ctx;
1014 }
1015
1016 vp9_ctx->priv_tbl.size = sizeof(*priv_tbl);
1017 vp9_ctx->priv_tbl.cpu = priv_tbl;
1018
1019 count_tbl = dma_alloc_coherent(rkvdec->dev, RKVDEC_VP9_COUNT_SIZE,
1020 &vp9_ctx->count_tbl.dma, GFP_KERNEL);
1021 if (!count_tbl) {
1022 ret = -ENOMEM;
1023 goto err_free_priv_tbl;
1024 }
1025
1026 vp9_ctx->count_tbl.size = RKVDEC_VP9_COUNT_SIZE;
1027 vp9_ctx->count_tbl.cpu = count_tbl;
1028 rkvdec_init_v4l2_vp9_count_tbl(ctx);
1029
1030 return 0;
1031
1032 err_free_priv_tbl:
1033 dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1034 vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1035
1036 err_free_ctx:
1037 kfree(vp9_ctx);
1038 return ret;
1039 }
1040
1041 static void rkvdec_vp9_stop(struct rkvdec_ctx *ctx)
1042 {
1043 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
1044 struct rkvdec_dev *rkvdec = ctx->dev;
1045
1046 dma_free_coherent(rkvdec->dev, vp9_ctx->count_tbl.size,
1047 vp9_ctx->count_tbl.cpu, vp9_ctx->count_tbl.dma);
1048 dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1049 vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1050 kfree(vp9_ctx);
1051 }
1052
1053 static int rkvdec_vp9_adjust_fmt(struct rkvdec_ctx *ctx,
1054 struct v4l2_format *f)
1055 {
1056 struct v4l2_pix_format_mplane *fmt = &f->fmt.pix_mp;
1057
1058 fmt->num_planes = 1;
1059 if (!fmt->plane_fmt[0].sizeimage)
1060 fmt->plane_fmt[0].sizeimage = fmt->width * fmt->height * 2;
1061 return 0;
1062 }
1063
1064 const struct rkvdec_coded_fmt_ops rkvdec_vp9_fmt_ops = {
1065 .adjust_fmt = rkvdec_vp9_adjust_fmt,
1066 .start = rkvdec_vp9_start,
1067 .stop = rkvdec_vp9_stop,
1068 .run = rkvdec_vp9_run,
1069 .done = rkvdec_vp9_done,
1070 };