1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause) 2 /* 3 * Wave5 series multi-standard codec IP - wave5 backend logic 4 * 5 * Copyright (C) 2021-2023 CHIPS&MEDIA INC 6 */ 7 8 #include <linux/iopoll.h> 9 #include <linux/bitfield.h> 10 #include "wave5-vpu.h" 11 #include "wave5.h" 12 #include "wave5-regdefine.h" 13 14 #define FIO_TIMEOUT 10000000 15 #define FIO_CTRL_READY BIT(31) 16 #define FIO_CTRL_WRITE BIT(16) 17 #define VPU_BUSY_CHECK_TIMEOUT 10000000 18 #define QUEUE_REPORT_MASK 0xffff 19 20 /* Encoder support fields */ 21 #define FEATURE_HEVC10BIT_ENC BIT(3) 22 #define FEATURE_AVC10BIT_ENC BIT(11) 23 #define FEATURE_AVC_ENCODER BIT(1) 24 #define FEATURE_HEVC_ENCODER BIT(0) 25 26 /* Decoder support fields */ 27 #define FEATURE_AVC_DECODER BIT(3) 28 #define FEATURE_HEVC_DECODER BIT(2) 29 30 #define FEATURE_BACKBONE BIT(16) 31 #define FEATURE_VCORE_BACKBONE BIT(22) 32 #define FEATURE_VCPU_BACKBONE BIT(28) 33 34 #define REMAP_CTRL_MAX_SIZE_BITS ((W5_REMAP_MAX_SIZE >> 12) & 0x1ff) 35 #define REMAP_CTRL_REGISTER_VALUE(index) ( \ 36 (BIT(31) | (index << 12) | BIT(11) | REMAP_CTRL_MAX_SIZE_BITS) \ 37 ) 38 39 #define FASTIO_ADDRESS_MASK GENMASK(15, 0) 40 #define SEQ_PARAM_PROFILE_MASK GENMASK(30, 24) 41 42 static void _wave5_print_reg_err(struct vpu_device *vpu_dev, u32 reg_fail_reason, 43 const char *func); 44 #define PRINT_REG_ERR(dev, reason) _wave5_print_reg_err((dev), (reason), __func__) 45 46 static inline const char *cmd_to_str(int cmd, bool is_dec) 47 { 48 switch (cmd) { 49 case W5_INIT_VPU: 50 return "W5_INIT_VPU"; 51 case W5_WAKEUP_VPU: 52 return "W5_WAKEUP_VPU"; 53 case W5_SLEEP_VPU: 54 return "W5_SLEEP_VPU"; 55 case W5_CREATE_INSTANCE: 56 return "W5_CREATE_INSTANCE"; 57 case W5_FLUSH_INSTANCE: 58 return "W5_FLUSH_INSTANCE"; 59 case W5_DESTROY_INSTANCE: 60 return "W5_DESTROY_INSTANCE"; 61 case W5_INIT_SEQ: 62 return "W5_INIT_SEQ"; 63 case W5_SET_FB: 64 return "W5_SET_FB"; 65 case W5_DEC_ENC_PIC: 66 if (is_dec) 67 return "W5_DEC_PIC"; 68 return "W5_ENC_PIC"; 69 case W5_ENC_SET_PARAM: 70 return "W5_ENC_SET_PARAM"; 71 case W5_QUERY: 72 return "W5_QUERY"; 73 case W5_UPDATE_BS: 74 return "W5_UPDATE_BS"; 75 case W5_MAX_VPU_COMD: 76 return "W5_MAX_VPU_COMD"; 77 default: 78 return "UNKNOWN"; 79 } 80 } 81 82 static void _wave5_print_reg_err(struct vpu_device *vpu_dev, u32 reg_fail_reason, 83 const char *func) 84 { 85 struct device *dev = vpu_dev->dev; 86 u32 reg_val; 87 88 switch (reg_fail_reason) { 89 case WAVE5_SYSERR_QUEUEING_FAIL: 90 reg_val = vpu_read_reg(vpu_dev, W5_RET_QUEUE_FAIL_REASON); 91 dev_dbg(dev, "%s: queueing failure: 0x%x\n", func, reg_val); 92 break; 93 case WAVE5_SYSERR_RESULT_NOT_READY: 94 dev_err(dev, "%s: result not ready: 0x%x\n", func, reg_fail_reason); 95 break; 96 case WAVE5_SYSERR_ACCESS_VIOLATION_HW: 97 dev_err(dev, "%s: access violation: 0x%x\n", func, reg_fail_reason); 98 break; 99 case WAVE5_SYSERR_WATCHDOG_TIMEOUT: 100 dev_err(dev, "%s: watchdog timeout: 0x%x\n", func, reg_fail_reason); 101 break; 102 case WAVE5_SYSERR_BUS_ERROR: 103 dev_err(dev, "%s: bus error: 0x%x\n", func, reg_fail_reason); 104 break; 105 case WAVE5_SYSERR_DOUBLE_FAULT: 106 dev_err(dev, "%s: double fault: 0x%x\n", func, reg_fail_reason); 107 break; 108 case WAVE5_SYSERR_VPU_STILL_RUNNING: 109 dev_err(dev, "%s: still running: 0x%x\n", func, reg_fail_reason); 110 break; 111 case WAVE5_SYSERR_VLC_BUF_FULL: 112 dev_err(dev, "%s: vlc buf full: 0x%x\n", func, reg_fail_reason); 113 break; 114 default: 115 dev_err(dev, "%s: failure:: 0x%x\n", func, reg_fail_reason); 116 break; 117 } 118 } 119 120 static int wave5_wait_fio_readl(struct vpu_device *vpu_dev, u32 addr, u32 val) 121 { 122 u32 ctrl; 123 int ret; 124 125 ctrl = addr & 0xffff; 126 wave5_vdi_write_register(vpu_dev, W5_VPU_FIO_CTRL_ADDR, ctrl); 127 ret = read_poll_timeout(wave5_vdi_read_register, ctrl, ctrl & FIO_CTRL_READY, 128 0, FIO_TIMEOUT, false, vpu_dev, W5_VPU_FIO_CTRL_ADDR); 129 if (ret) 130 return ret; 131 132 if (wave5_vdi_read_register(vpu_dev, W5_VPU_FIO_DATA) != val) 133 return -ETIMEDOUT; 134 135 return 0; 136 } 137 138 static void wave5_fio_writel(struct vpu_device *vpu_dev, unsigned int addr, unsigned int data) 139 { 140 int ret; 141 unsigned int ctrl; 142 143 wave5_vdi_write_register(vpu_dev, W5_VPU_FIO_DATA, data); 144 ctrl = FIELD_GET(FASTIO_ADDRESS_MASK, addr); 145 ctrl |= FIO_CTRL_WRITE; 146 wave5_vdi_write_register(vpu_dev, W5_VPU_FIO_CTRL_ADDR, ctrl); 147 ret = read_poll_timeout(wave5_vdi_read_register, ctrl, ctrl & FIO_CTRL_READY, 0, 148 FIO_TIMEOUT, false, vpu_dev, W5_VPU_FIO_CTRL_ADDR); 149 if (ret) 150 dev_dbg_ratelimited(vpu_dev->dev, "FIO write timeout: addr=0x%x data=%x\n", 151 ctrl, data); 152 } 153 154 static int wave5_wait_bus_busy(struct vpu_device *vpu_dev, unsigned int addr) 155 { 156 u32 gdi_status_check_value = 0x3f; 157 158 if (vpu_dev->product_code == WAVE521C_CODE || 159 vpu_dev->product_code == WAVE521_CODE || 160 vpu_dev->product_code == WAVE521E1_CODE) 161 gdi_status_check_value = 0x00ff1f3f; 162 163 return wave5_wait_fio_readl(vpu_dev, addr, gdi_status_check_value); 164 } 165 166 static int wave5_wait_vpu_busy(struct vpu_device *vpu_dev, unsigned int addr) 167 { 168 u32 data; 169 170 return read_poll_timeout(wave5_vdi_read_register, data, data == 0, 171 0, VPU_BUSY_CHECK_TIMEOUT, false, vpu_dev, addr); 172 } 173 174 static int wave5_wait_vcpu_bus_busy(struct vpu_device *vpu_dev, unsigned int addr) 175 { 176 return wave5_wait_fio_readl(vpu_dev, addr, 0); 177 } 178 179 bool wave5_vpu_is_init(struct vpu_device *vpu_dev) 180 { 181 return vpu_read_reg(vpu_dev, W5_VCPU_CUR_PC) != 0; 182 } 183 184 unsigned int wave5_vpu_get_product_id(struct vpu_device *vpu_dev) 185 { 186 u32 val = vpu_read_reg(vpu_dev, W5_PRODUCT_NUMBER); 187 188 switch (val) { 189 case WAVE521C_CODE: 190 return PRODUCT_ID_521; 191 case WAVE521_CODE: 192 case WAVE521C_DUAL_CODE: 193 case WAVE521E1_CODE: 194 case WAVE511_CODE: 195 case WAVE517_CODE: 196 case WAVE537_CODE: 197 dev_err(vpu_dev->dev, "Unsupported product id (%x)\n", val); 198 break; 199 default: 200 dev_err(vpu_dev->dev, "Invalid product id (%x)\n", val); 201 break; 202 } 203 204 return PRODUCT_ID_NONE; 205 } 206 207 static void wave5_bit_issue_command(struct vpu_device *vpu_dev, struct vpu_instance *inst, u32 cmd) 208 { 209 u32 instance_index; 210 u32 codec_mode; 211 212 if (inst) { 213 instance_index = inst->id; 214 codec_mode = inst->std; 215 216 vpu_write_reg(vpu_dev, W5_CMD_INSTANCE_INFO, (codec_mode << 16) | 217 (instance_index & 0xffff)); 218 vpu_write_reg(vpu_dev, W5_VPU_BUSY_STATUS, 1); 219 } 220 221 vpu_write_reg(vpu_dev, W5_COMMAND, cmd); 222 223 if (inst) { 224 dev_dbg(vpu_dev->dev, "%s: cmd=0x%x (%s)\n", __func__, cmd, 225 cmd_to_str(cmd, inst->type == VPU_INST_TYPE_DEC)); 226 } else { 227 dev_dbg(vpu_dev->dev, "%s: cmd=0x%x\n", __func__, cmd); 228 } 229 230 vpu_write_reg(vpu_dev, W5_VPU_HOST_INT_REQ, 1); 231 } 232 233 static int wave5_vpu_firmware_command_queue_error_check(struct vpu_device *dev, u32 *fail_res) 234 { 235 u32 reason = 0; 236 237 /* Check if we were able to add a command into the VCPU QUEUE */ 238 if (!vpu_read_reg(dev, W5_RET_SUCCESS)) { 239 reason = vpu_read_reg(dev, W5_RET_FAIL_REASON); 240 PRINT_REG_ERR(dev, reason); 241 242 /* 243 * The fail_res argument will be either NULL or 0. 244 * If the fail_res argument is NULL, then just return -EIO. 245 * Otherwise, assign the reason to fail_res, so that the 246 * calling function can use it. 247 */ 248 if (fail_res) 249 *fail_res = reason; 250 else 251 return -EIO; 252 253 if (reason == WAVE5_SYSERR_VPU_STILL_RUNNING) 254 return -EBUSY; 255 } 256 return 0; 257 } 258 259 static int send_firmware_command(struct vpu_instance *inst, u32 cmd, bool check_success, 260 u32 *queue_status, u32 *fail_result) 261 { 262 int ret; 263 264 wave5_bit_issue_command(inst->dev, inst, cmd); 265 ret = wave5_wait_vpu_busy(inst->dev, W5_VPU_BUSY_STATUS); 266 if (ret) { 267 dev_warn(inst->dev->dev, "%s: command: '%s', timed out\n", __func__, 268 cmd_to_str(cmd, inst->type == VPU_INST_TYPE_DEC)); 269 return -ETIMEDOUT; 270 } 271 272 if (queue_status) 273 *queue_status = vpu_read_reg(inst->dev, W5_RET_QUEUE_STATUS); 274 275 /* In some cases we want to send multiple commands before checking 276 * whether they are queued properly 277 */ 278 if (!check_success) 279 return 0; 280 281 return wave5_vpu_firmware_command_queue_error_check(inst->dev, fail_result); 282 } 283 284 static int wave5_send_query(struct vpu_device *vpu_dev, struct vpu_instance *inst, 285 enum query_opt query_opt) 286 { 287 int ret; 288 289 vpu_write_reg(vpu_dev, W5_QUERY_OPTION, query_opt); 290 vpu_write_reg(vpu_dev, W5_VPU_BUSY_STATUS, 1); 291 wave5_bit_issue_command(vpu_dev, inst, W5_QUERY); 292 293 ret = wave5_wait_vpu_busy(vpu_dev, W5_VPU_BUSY_STATUS); 294 if (ret) { 295 dev_warn(vpu_dev->dev, "command: 'W5_QUERY', timed out opt=0x%x\n", query_opt); 296 return ret; 297 } 298 299 return wave5_vpu_firmware_command_queue_error_check(vpu_dev, NULL); 300 } 301 302 static int setup_wave5_properties(struct device *dev) 303 { 304 struct vpu_device *vpu_dev = dev_get_drvdata(dev); 305 struct vpu_attr *p_attr = &vpu_dev->attr; 306 u32 reg_val; 307 u8 *str; 308 int ret; 309 u32 hw_config_def0, hw_config_def1, hw_config_feature; 310 311 ret = wave5_send_query(vpu_dev, NULL, GET_VPU_INFO); 312 if (ret) 313 return ret; 314 315 reg_val = vpu_read_reg(vpu_dev, W5_RET_PRODUCT_NAME); 316 str = (u8 *)®_val; 317 p_attr->product_name[0] = str[3]; 318 p_attr->product_name[1] = str[2]; 319 p_attr->product_name[2] = str[1]; 320 p_attr->product_name[3] = str[0]; 321 p_attr->product_name[4] = 0; 322 323 p_attr->product_id = wave5_vpu_get_product_id(vpu_dev); 324 p_attr->product_version = vpu_read_reg(vpu_dev, W5_RET_PRODUCT_VERSION); 325 p_attr->fw_version = vpu_read_reg(vpu_dev, W5_RET_FW_VERSION); 326 p_attr->customer_id = vpu_read_reg(vpu_dev, W5_RET_CUSTOMER_ID); 327 hw_config_def0 = vpu_read_reg(vpu_dev, W5_RET_STD_DEF0); 328 hw_config_def1 = vpu_read_reg(vpu_dev, W5_RET_STD_DEF1); 329 hw_config_feature = vpu_read_reg(vpu_dev, W5_RET_CONF_FEATURE); 330 331 p_attr->support_hevc10bit_enc = FIELD_GET(FEATURE_HEVC10BIT_ENC, hw_config_feature); 332 p_attr->support_avc10bit_enc = FIELD_GET(FEATURE_AVC10BIT_ENC, hw_config_feature); 333 334 p_attr->support_decoders = FIELD_GET(FEATURE_AVC_DECODER, hw_config_def1) << STD_AVC; 335 p_attr->support_decoders |= FIELD_GET(FEATURE_HEVC_DECODER, hw_config_def1) << STD_HEVC; 336 p_attr->support_encoders = FIELD_GET(FEATURE_AVC_ENCODER, hw_config_def1) << STD_AVC; 337 p_attr->support_encoders |= FIELD_GET(FEATURE_HEVC_ENCODER, hw_config_def1) << STD_HEVC; 338 339 p_attr->support_backbone = FIELD_GET(FEATURE_BACKBONE, hw_config_def0); 340 p_attr->support_vcpu_backbone = FIELD_GET(FEATURE_VCPU_BACKBONE, hw_config_def0); 341 p_attr->support_vcore_backbone = FIELD_GET(FEATURE_VCORE_BACKBONE, hw_config_def0); 342 343 return 0; 344 } 345 346 int wave5_vpu_get_version(struct vpu_device *vpu_dev, u32 *revision) 347 { 348 u32 reg_val; 349 int ret; 350 351 ret = wave5_send_query(vpu_dev, NULL, GET_VPU_INFO); 352 if (ret) 353 return ret; 354 355 reg_val = vpu_read_reg(vpu_dev, W5_RET_FW_VERSION); 356 if (revision) { 357 *revision = reg_val; 358 return 0; 359 } 360 361 return -EINVAL; 362 } 363 364 static void remap_page(struct vpu_device *vpu_dev, dma_addr_t code_base, u32 index) 365 { 366 vpu_write_reg(vpu_dev, W5_VPU_REMAP_CTRL, REMAP_CTRL_REGISTER_VALUE(index)); 367 vpu_write_reg(vpu_dev, W5_VPU_REMAP_VADDR, index * W5_REMAP_MAX_SIZE); 368 vpu_write_reg(vpu_dev, W5_VPU_REMAP_PADDR, code_base + index * W5_REMAP_MAX_SIZE); 369 } 370 371 int wave5_vpu_init(struct device *dev, u8 *fw, size_t size) 372 { 373 struct vpu_buf *common_vb; 374 dma_addr_t code_base, temp_base; 375 u32 code_size, temp_size; 376 u32 i, reg_val, reason_code; 377 int ret; 378 struct vpu_device *vpu_dev = dev_get_drvdata(dev); 379 380 common_vb = &vpu_dev->common_mem; 381 382 code_base = common_vb->daddr; 383 /* ALIGN TO 4KB */ 384 code_size = (WAVE5_MAX_CODE_BUF_SIZE & ~0xfff); 385 if (code_size < size * 2) 386 return -EINVAL; 387 388 temp_base = common_vb->daddr + WAVE5_TEMPBUF_OFFSET; 389 temp_size = WAVE5_TEMPBUF_SIZE; 390 391 ret = wave5_vdi_write_memory(vpu_dev, common_vb, 0, fw, size); 392 if (ret < 0) { 393 dev_err(vpu_dev->dev, "VPU init, Writing firmware to common buffer, fail: %d\n", 394 ret); 395 return ret; 396 } 397 398 vpu_write_reg(vpu_dev, W5_PO_CONF, 0); 399 400 /* clear registers */ 401 402 for (i = W5_CMD_REG_BASE; i < W5_CMD_REG_END; i += 4) 403 vpu_write_reg(vpu_dev, i, 0x00); 404 405 remap_page(vpu_dev, code_base, W5_REMAP_INDEX0); 406 remap_page(vpu_dev, code_base, W5_REMAP_INDEX1); 407 408 vpu_write_reg(vpu_dev, W5_ADDR_CODE_BASE, code_base); 409 vpu_write_reg(vpu_dev, W5_CODE_SIZE, code_size); 410 vpu_write_reg(vpu_dev, W5_CODE_PARAM, (WAVE5_UPPER_PROC_AXI_ID << 4) | 0); 411 vpu_write_reg(vpu_dev, W5_ADDR_TEMP_BASE, temp_base); 412 vpu_write_reg(vpu_dev, W5_TEMP_SIZE, temp_size); 413 414 /* These register must be reset explicitly */ 415 vpu_write_reg(vpu_dev, W5_HW_OPTION, 0); 416 wave5_fio_writel(vpu_dev, W5_BACKBONE_PROC_EXT_ADDR, 0); 417 wave5_fio_writel(vpu_dev, W5_BACKBONE_AXI_PARAM, 0); 418 vpu_write_reg(vpu_dev, W5_SEC_AXI_PARAM, 0); 419 420 /* Encoder interrupt */ 421 reg_val = BIT(INT_WAVE5_ENC_SET_PARAM); 422 reg_val |= BIT(INT_WAVE5_ENC_PIC); 423 reg_val |= BIT(INT_WAVE5_BSBUF_FULL); 424 /* Decoder interrupt */ 425 reg_val |= BIT(INT_WAVE5_INIT_SEQ); 426 reg_val |= BIT(INT_WAVE5_DEC_PIC); 427 reg_val |= BIT(INT_WAVE5_BSBUF_EMPTY); 428 vpu_write_reg(vpu_dev, W5_VPU_VINT_ENABLE, reg_val); 429 430 reg_val = vpu_read_reg(vpu_dev, W5_VPU_RET_VPU_CONFIG0); 431 if (FIELD_GET(FEATURE_BACKBONE, reg_val)) { 432 reg_val = ((WAVE5_PROC_AXI_ID << 28) | 433 (WAVE5_PRP_AXI_ID << 24) | 434 (WAVE5_FBD_Y_AXI_ID << 20) | 435 (WAVE5_FBC_Y_AXI_ID << 16) | 436 (WAVE5_FBD_C_AXI_ID << 12) | 437 (WAVE5_FBC_C_AXI_ID << 8) | 438 (WAVE5_PRI_AXI_ID << 4) | 439 WAVE5_SEC_AXI_ID); 440 wave5_fio_writel(vpu_dev, W5_BACKBONE_PROG_AXI_ID, reg_val); 441 } 442 443 vpu_write_reg(vpu_dev, W5_VPU_BUSY_STATUS, 1); 444 vpu_write_reg(vpu_dev, W5_COMMAND, W5_INIT_VPU); 445 vpu_write_reg(vpu_dev, W5_VPU_REMAP_CORE_START, 1); 446 ret = wave5_wait_vpu_busy(vpu_dev, W5_VPU_BUSY_STATUS); 447 if (ret) { 448 dev_err(vpu_dev->dev, "VPU init(W5_VPU_REMAP_CORE_START) timeout\n"); 449 return ret; 450 } 451 452 ret = wave5_vpu_firmware_command_queue_error_check(vpu_dev, &reason_code); 453 if (ret) 454 return ret; 455 456 return setup_wave5_properties(dev); 457 } 458 459 int wave5_vpu_build_up_dec_param(struct vpu_instance *inst, 460 struct dec_open_param *param) 461 { 462 int ret; 463 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 464 struct vpu_device *vpu_dev = inst->dev; 465 466 p_dec_info->cycle_per_tick = 256; 467 if (vpu_dev->sram_buf.size) { 468 p_dec_info->sec_axi_info.use_bit_enable = 1; 469 p_dec_info->sec_axi_info.use_ip_enable = 1; 470 p_dec_info->sec_axi_info.use_lf_row_enable = 1; 471 } 472 switch (inst->std) { 473 case W_HEVC_DEC: 474 p_dec_info->seq_change_mask = SEQ_CHANGE_ENABLE_ALL_HEVC; 475 break; 476 case W_AVC_DEC: 477 p_dec_info->seq_change_mask = SEQ_CHANGE_ENABLE_ALL_AVC; 478 break; 479 default: 480 return -EINVAL; 481 } 482 483 p_dec_info->vb_work.size = WAVE521DEC_WORKBUF_SIZE; 484 ret = wave5_vdi_allocate_dma_memory(inst->dev, &p_dec_info->vb_work); 485 if (ret) 486 return ret; 487 488 vpu_write_reg(inst->dev, W5_CMD_DEC_VCORE_INFO, 1); 489 490 wave5_vdi_clear_memory(inst->dev, &p_dec_info->vb_work); 491 492 vpu_write_reg(inst->dev, W5_ADDR_WORK_BASE, p_dec_info->vb_work.daddr); 493 vpu_write_reg(inst->dev, W5_WORK_SIZE, p_dec_info->vb_work.size); 494 495 vpu_write_reg(inst->dev, W5_CMD_ADDR_SEC_AXI, vpu_dev->sram_buf.daddr); 496 vpu_write_reg(inst->dev, W5_CMD_SEC_AXI_SIZE, vpu_dev->sram_buf.size); 497 498 vpu_write_reg(inst->dev, W5_CMD_DEC_BS_START_ADDR, p_dec_info->stream_buf_start_addr); 499 vpu_write_reg(inst->dev, W5_CMD_DEC_BS_SIZE, p_dec_info->stream_buf_size); 500 501 /* NOTE: SDMA reads MSB first */ 502 vpu_write_reg(inst->dev, W5_CMD_BS_PARAM, BITSTREAM_ENDIANNESS_BIG_ENDIAN); 503 /* This register must be reset explicitly */ 504 vpu_write_reg(inst->dev, W5_CMD_EXT_ADDR, 0); 505 vpu_write_reg(inst->dev, W5_CMD_NUM_CQ_DEPTH_M1, (COMMAND_QUEUE_DEPTH - 1)); 506 507 ret = send_firmware_command(inst, W5_CREATE_INSTANCE, true, NULL, NULL); 508 if (ret) { 509 wave5_vdi_free_dma_memory(vpu_dev, &p_dec_info->vb_work); 510 return ret; 511 } 512 513 p_dec_info->product_code = vpu_read_reg(inst->dev, W5_PRODUCT_NUMBER); 514 515 return 0; 516 } 517 518 int wave5_vpu_hw_flush_instance(struct vpu_instance *inst) 519 { 520 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 521 u32 instance_queue_count, report_queue_count; 522 u32 reg_val = 0; 523 u32 fail_res = 0; 524 int ret; 525 526 ret = send_firmware_command(inst, W5_FLUSH_INSTANCE, true, ®_val, &fail_res); 527 if (ret) 528 return ret; 529 530 instance_queue_count = (reg_val >> 16) & 0xff; 531 report_queue_count = (reg_val & QUEUE_REPORT_MASK); 532 if (instance_queue_count != 0 || report_queue_count != 0) { 533 dev_warn(inst->dev->dev, 534 "FLUSH_INSTANCE cmd didn't reset the amount of queued commands & reports"); 535 } 536 537 /* reset our local copy of the counts */ 538 p_dec_info->instance_queue_count = 0; 539 p_dec_info->report_queue_count = 0; 540 541 return 0; 542 } 543 544 static u32 get_bitstream_options(struct dec_info *info) 545 { 546 u32 bs_option = BSOPTION_ENABLE_EXPLICIT_END; 547 548 if (info->stream_endflag) 549 bs_option |= BSOPTION_HIGHLIGHT_STREAM_END; 550 return bs_option; 551 } 552 553 int wave5_vpu_dec_init_seq(struct vpu_instance *inst) 554 { 555 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 556 u32 cmd_option = INIT_SEQ_NORMAL; 557 u32 reg_val, fail_res; 558 int ret; 559 560 if (!inst->codec_info) 561 return -EINVAL; 562 563 vpu_write_reg(inst->dev, W5_BS_RD_PTR, p_dec_info->stream_rd_ptr); 564 vpu_write_reg(inst->dev, W5_BS_WR_PTR, p_dec_info->stream_wr_ptr); 565 566 vpu_write_reg(inst->dev, W5_BS_OPTION, get_bitstream_options(p_dec_info)); 567 568 vpu_write_reg(inst->dev, W5_COMMAND_OPTION, cmd_option); 569 vpu_write_reg(inst->dev, W5_CMD_DEC_USER_MASK, p_dec_info->user_data_enable); 570 571 ret = send_firmware_command(inst, W5_INIT_SEQ, true, ®_val, &fail_res); 572 if (ret) 573 return ret; 574 575 p_dec_info->instance_queue_count = (reg_val >> 16) & 0xff; 576 p_dec_info->report_queue_count = (reg_val & QUEUE_REPORT_MASK); 577 578 dev_dbg(inst->dev->dev, "%s: init seq sent (queue %u : %u)\n", __func__, 579 p_dec_info->instance_queue_count, p_dec_info->report_queue_count); 580 581 return 0; 582 } 583 584 static void wave5_get_dec_seq_result(struct vpu_instance *inst, struct dec_initial_info *info) 585 { 586 u32 reg_val; 587 u32 profile_compatibility_flag; 588 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 589 590 p_dec_info->stream_rd_ptr = wave5_dec_get_rd_ptr(inst); 591 info->rd_ptr = p_dec_info->stream_rd_ptr; 592 593 p_dec_info->frame_display_flag = vpu_read_reg(inst->dev, W5_RET_DEC_DISP_IDC); 594 595 reg_val = vpu_read_reg(inst->dev, W5_RET_DEC_PIC_SIZE); 596 info->pic_width = ((reg_val >> 16) & 0xffff); 597 info->pic_height = (reg_val & 0xffff); 598 info->min_frame_buffer_count = vpu_read_reg(inst->dev, W5_RET_DEC_NUM_REQUIRED_FB); 599 600 reg_val = vpu_read_reg(inst->dev, W5_RET_DEC_CROP_LEFT_RIGHT); 601 info->pic_crop_rect.left = (reg_val >> 16) & 0xffff; 602 info->pic_crop_rect.right = reg_val & 0xffff; 603 reg_val = vpu_read_reg(inst->dev, W5_RET_DEC_CROP_TOP_BOTTOM); 604 info->pic_crop_rect.top = (reg_val >> 16) & 0xffff; 605 info->pic_crop_rect.bottom = reg_val & 0xffff; 606 607 reg_val = vpu_read_reg(inst->dev, W5_RET_DEC_COLOR_SAMPLE_INFO); 608 info->luma_bitdepth = reg_val & 0xf; 609 info->chroma_bitdepth = (reg_val >> 4) & 0xf; 610 611 reg_val = vpu_read_reg(inst->dev, W5_RET_DEC_SEQ_PARAM); 612 profile_compatibility_flag = (reg_val >> 12) & 0xff; 613 info->profile = (reg_val >> 24) & 0x1f; 614 615 if (inst->std == W_HEVC_DEC) { 616 /* guessing profile */ 617 if (!info->profile) { 618 if ((profile_compatibility_flag & 0x06) == 0x06) 619 info->profile = HEVC_PROFILE_MAIN; /* main profile */ 620 else if (profile_compatibility_flag & 0x04) 621 info->profile = HEVC_PROFILE_MAIN10; /* main10 profile */ 622 else if (profile_compatibility_flag & 0x08) 623 /* main still picture profile */ 624 info->profile = HEVC_PROFILE_STILLPICTURE; 625 else 626 info->profile = HEVC_PROFILE_MAIN; /* for old version HM */ 627 } 628 } else if (inst->std == W_AVC_DEC) { 629 info->profile = FIELD_GET(SEQ_PARAM_PROFILE_MASK, reg_val); 630 } 631 632 info->vlc_buf_size = vpu_read_reg(inst->dev, W5_RET_VLC_BUF_SIZE); 633 info->param_buf_size = vpu_read_reg(inst->dev, W5_RET_PARAM_BUF_SIZE); 634 p_dec_info->vlc_buf_size = info->vlc_buf_size; 635 p_dec_info->param_buf_size = info->param_buf_size; 636 } 637 638 int wave5_vpu_dec_get_seq_info(struct vpu_instance *inst, struct dec_initial_info *info) 639 { 640 int ret; 641 u32 reg_val; 642 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 643 644 vpu_write_reg(inst->dev, W5_CMD_DEC_ADDR_REPORT_BASE, p_dec_info->user_data_buf_addr); 645 vpu_write_reg(inst->dev, W5_CMD_DEC_REPORT_SIZE, p_dec_info->user_data_buf_size); 646 vpu_write_reg(inst->dev, W5_CMD_DEC_REPORT_PARAM, REPORT_PARAM_ENDIANNESS_BIG_ENDIAN); 647 648 /* send QUERY cmd */ 649 ret = wave5_send_query(inst->dev, inst, GET_RESULT); 650 if (ret) 651 return ret; 652 653 reg_val = vpu_read_reg(inst->dev, W5_RET_QUEUE_STATUS); 654 655 p_dec_info->instance_queue_count = (reg_val >> 16) & 0xff; 656 p_dec_info->report_queue_count = (reg_val & QUEUE_REPORT_MASK); 657 658 dev_dbg(inst->dev->dev, "%s: init seq complete (queue %u : %u)\n", __func__, 659 p_dec_info->instance_queue_count, p_dec_info->report_queue_count); 660 661 /* this is not a fatal error, set ret to -EIO but don't return immediately */ 662 if (vpu_read_reg(inst->dev, W5_RET_DEC_DECODING_SUCCESS) != 1) { 663 info->seq_init_err_reason = vpu_read_reg(inst->dev, W5_RET_DEC_ERR_INFO); 664 ret = -EIO; 665 } 666 667 wave5_get_dec_seq_result(inst, info); 668 669 return ret; 670 } 671 672 int wave5_vpu_dec_register_framebuffer(struct vpu_instance *inst, struct frame_buffer *fb_arr, 673 enum tiled_map_type map_type, unsigned int count) 674 { 675 int ret; 676 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 677 struct dec_initial_info *init_info = &p_dec_info->initial_info; 678 size_t remain, idx, j, i, cnt_8_chunk, size; 679 u32 start_no, end_no; 680 u32 reg_val, cbcr_interleave, nv21, pic_size; 681 u32 addr_y, addr_cb, addr_cr; 682 u32 mv_col_size, frame_width, frame_height, fbc_y_tbl_size, fbc_c_tbl_size; 683 struct vpu_buf vb_buf; 684 bool justified = WTL_RIGHT_JUSTIFIED; 685 u32 format_no = WTL_PIXEL_8BIT; 686 u32 color_format = 0; 687 u32 pixel_order = 1; 688 u32 bwb_flag = (map_type == LINEAR_FRAME_MAP) ? 1 : 0; 689 690 cbcr_interleave = inst->cbcr_interleave; 691 nv21 = inst->nv21; 692 mv_col_size = 0; 693 fbc_y_tbl_size = 0; 694 fbc_c_tbl_size = 0; 695 696 if (map_type >= COMPRESSED_FRAME_MAP) { 697 cbcr_interleave = 0; 698 nv21 = 0; 699 700 switch (inst->std) { 701 case W_HEVC_DEC: 702 mv_col_size = WAVE5_DEC_HEVC_BUF_SIZE(init_info->pic_width, 703 init_info->pic_height); 704 break; 705 case W_AVC_DEC: 706 mv_col_size = WAVE5_DEC_AVC_BUF_SIZE(init_info->pic_width, 707 init_info->pic_height); 708 break; 709 default: 710 return -EINVAL; 711 } 712 713 if (inst->std == W_HEVC_DEC || inst->std == W_AVC_DEC) { 714 size = ALIGN(ALIGN(mv_col_size, 16), BUFFER_MARGIN) + BUFFER_MARGIN; 715 ret = wave5_vdi_allocate_array(inst->dev, p_dec_info->vb_mv, count, size); 716 if (ret) 717 goto free_mv_buffers; 718 } 719 720 frame_width = init_info->pic_width; 721 frame_height = init_info->pic_height; 722 fbc_y_tbl_size = ALIGN(WAVE5_FBC_LUMA_TABLE_SIZE(frame_width, frame_height), 16); 723 fbc_c_tbl_size = ALIGN(WAVE5_FBC_CHROMA_TABLE_SIZE(frame_width, frame_height), 16); 724 725 size = ALIGN(fbc_y_tbl_size, BUFFER_MARGIN) + BUFFER_MARGIN; 726 ret = wave5_vdi_allocate_array(inst->dev, p_dec_info->vb_fbc_y_tbl, count, size); 727 if (ret) 728 goto free_fbc_y_tbl_buffers; 729 730 size = ALIGN(fbc_c_tbl_size, BUFFER_MARGIN) + BUFFER_MARGIN; 731 ret = wave5_vdi_allocate_array(inst->dev, p_dec_info->vb_fbc_c_tbl, count, size); 732 if (ret) 733 goto free_fbc_c_tbl_buffers; 734 735 pic_size = (init_info->pic_width << 16) | (init_info->pic_height); 736 737 vb_buf.size = (p_dec_info->vlc_buf_size * VLC_BUF_NUM) + 738 (p_dec_info->param_buf_size * COMMAND_QUEUE_DEPTH); 739 vb_buf.daddr = 0; 740 741 if (vb_buf.size != p_dec_info->vb_task.size) { 742 wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_task); 743 ret = wave5_vdi_allocate_dma_memory(inst->dev, &vb_buf); 744 if (ret) 745 goto free_fbc_c_tbl_buffers; 746 747 p_dec_info->vb_task = vb_buf; 748 } 749 750 vpu_write_reg(inst->dev, W5_CMD_SET_FB_ADDR_TASK_BUF, 751 p_dec_info->vb_task.daddr); 752 vpu_write_reg(inst->dev, W5_CMD_SET_FB_TASK_BUF_SIZE, vb_buf.size); 753 } else { 754 pic_size = (init_info->pic_width << 16) | (init_info->pic_height); 755 756 if (inst->output_format == FORMAT_422) 757 color_format = 1; 758 } 759 vpu_write_reg(inst->dev, W5_PIC_SIZE, pic_size); 760 761 reg_val = (bwb_flag << 28) | 762 (pixel_order << 23) | 763 (justified << 22) | 764 (format_no << 20) | 765 (color_format << 19) | 766 (nv21 << 17) | 767 (cbcr_interleave << 16) | 768 (fb_arr[0].stride); 769 vpu_write_reg(inst->dev, W5_COMMON_PIC_INFO, reg_val); 770 771 remain = count; 772 cnt_8_chunk = DIV_ROUND_UP(count, 8); 773 idx = 0; 774 for (j = 0; j < cnt_8_chunk; j++) { 775 reg_val = (j == cnt_8_chunk - 1) << 4 | ((j == 0) << 3); 776 vpu_write_reg(inst->dev, W5_SFB_OPTION, reg_val); 777 start_no = j * 8; 778 end_no = start_no + ((remain >= 8) ? 8 : remain) - 1; 779 780 vpu_write_reg(inst->dev, W5_SET_FB_NUM, (start_no << 8) | end_no); 781 782 for (i = 0; i < 8 && i < remain; i++) { 783 addr_y = fb_arr[i + start_no].buf_y; 784 addr_cb = fb_arr[i + start_no].buf_cb; 785 addr_cr = fb_arr[i + start_no].buf_cr; 786 vpu_write_reg(inst->dev, W5_ADDR_LUMA_BASE0 + (i << 4), addr_y); 787 vpu_write_reg(inst->dev, W5_ADDR_CB_BASE0 + (i << 4), addr_cb); 788 if (map_type >= COMPRESSED_FRAME_MAP) { 789 /* luma FBC offset table */ 790 vpu_write_reg(inst->dev, W5_ADDR_FBC_Y_OFFSET0 + (i << 4), 791 p_dec_info->vb_fbc_y_tbl[idx].daddr); 792 /* chroma FBC offset table */ 793 vpu_write_reg(inst->dev, W5_ADDR_FBC_C_OFFSET0 + (i << 4), 794 p_dec_info->vb_fbc_c_tbl[idx].daddr); 795 vpu_write_reg(inst->dev, W5_ADDR_MV_COL0 + (i << 2), 796 p_dec_info->vb_mv[idx].daddr); 797 } else { 798 vpu_write_reg(inst->dev, W5_ADDR_CR_BASE0 + (i << 4), addr_cr); 799 vpu_write_reg(inst->dev, W5_ADDR_FBC_C_OFFSET0 + (i << 4), 0); 800 vpu_write_reg(inst->dev, W5_ADDR_MV_COL0 + (i << 2), 0); 801 } 802 idx++; 803 } 804 remain -= i; 805 806 ret = send_firmware_command(inst, W5_SET_FB, false, NULL, NULL); 807 if (ret) 808 goto free_buffers; 809 } 810 811 reg_val = vpu_read_reg(inst->dev, W5_RET_SUCCESS); 812 if (!reg_val) { 813 ret = -EIO; 814 goto free_buffers; 815 } 816 817 return 0; 818 819 free_buffers: 820 wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_task); 821 free_fbc_c_tbl_buffers: 822 for (i = 0; i < count; i++) 823 wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_fbc_c_tbl[i]); 824 free_fbc_y_tbl_buffers: 825 for (i = 0; i < count; i++) 826 wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_fbc_y_tbl[i]); 827 free_mv_buffers: 828 for (i = 0; i < count; i++) 829 wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_mv[i]); 830 return ret; 831 } 832 833 int wave5_vpu_decode(struct vpu_instance *inst, u32 *fail_res) 834 { 835 u32 reg_val; 836 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 837 int ret; 838 839 vpu_write_reg(inst->dev, W5_BS_RD_PTR, p_dec_info->stream_rd_ptr); 840 vpu_write_reg(inst->dev, W5_BS_WR_PTR, p_dec_info->stream_wr_ptr); 841 842 vpu_write_reg(inst->dev, W5_BS_OPTION, get_bitstream_options(p_dec_info)); 843 844 /* secondary AXI */ 845 reg_val = p_dec_info->sec_axi_info.use_bit_enable | 846 (p_dec_info->sec_axi_info.use_ip_enable << 9) | 847 (p_dec_info->sec_axi_info.use_lf_row_enable << 15); 848 vpu_write_reg(inst->dev, W5_USE_SEC_AXI, reg_val); 849 850 /* set attributes of user buffer */ 851 vpu_write_reg(inst->dev, W5_CMD_DEC_USER_MASK, p_dec_info->user_data_enable); 852 853 vpu_write_reg(inst->dev, W5_COMMAND_OPTION, DEC_PIC_NORMAL); 854 vpu_write_reg(inst->dev, W5_CMD_DEC_TEMPORAL_ID_PLUS1, 855 (p_dec_info->target_spatial_id << 9) | 856 (p_dec_info->temp_id_select_mode << 8) | p_dec_info->target_temp_id); 857 vpu_write_reg(inst->dev, W5_CMD_SEQ_CHANGE_ENABLE_FLAG, p_dec_info->seq_change_mask); 858 /* When reordering is disabled we force the latency of the framebuffers */ 859 vpu_write_reg(inst->dev, W5_CMD_DEC_FORCE_FB_LATENCY_PLUS1, !p_dec_info->reorder_enable); 860 861 ret = send_firmware_command(inst, W5_DEC_ENC_PIC, true, ®_val, fail_res); 862 if (ret == -ETIMEDOUT) 863 return ret; 864 865 p_dec_info->instance_queue_count = (reg_val >> 16) & 0xff; 866 p_dec_info->report_queue_count = (reg_val & QUEUE_REPORT_MASK); 867 868 dev_dbg(inst->dev->dev, "%s: dec pic sent (queue %u : %u)\n", __func__, 869 p_dec_info->instance_queue_count, p_dec_info->report_queue_count); 870 871 if (ret) 872 return ret; 873 874 return 0; 875 } 876 877 int wave5_vpu_dec_get_result(struct vpu_instance *inst, struct dec_output_info *result) 878 { 879 int ret; 880 u32 index, nal_unit_type, reg_val, sub_layer_info; 881 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 882 struct vpu_device *vpu_dev = inst->dev; 883 884 vpu_write_reg(inst->dev, W5_CMD_DEC_ADDR_REPORT_BASE, p_dec_info->user_data_buf_addr); 885 vpu_write_reg(inst->dev, W5_CMD_DEC_REPORT_SIZE, p_dec_info->user_data_buf_size); 886 vpu_write_reg(inst->dev, W5_CMD_DEC_REPORT_PARAM, REPORT_PARAM_ENDIANNESS_BIG_ENDIAN); 887 888 /* send QUERY cmd */ 889 ret = wave5_send_query(vpu_dev, inst, GET_RESULT); 890 if (ret) 891 return ret; 892 893 reg_val = vpu_read_reg(inst->dev, W5_RET_QUEUE_STATUS); 894 895 p_dec_info->instance_queue_count = (reg_val >> 16) & 0xff; 896 p_dec_info->report_queue_count = (reg_val & QUEUE_REPORT_MASK); 897 898 dev_dbg(inst->dev->dev, "%s: dec pic complete (queue %u : %u)\n", __func__, 899 p_dec_info->instance_queue_count, p_dec_info->report_queue_count); 900 901 reg_val = vpu_read_reg(inst->dev, W5_RET_DEC_PIC_TYPE); 902 903 nal_unit_type = (reg_val >> 4) & 0x3f; 904 905 if (inst->std == W_HEVC_DEC) { 906 if (reg_val & 0x04) 907 result->pic_type = PIC_TYPE_B; 908 else if (reg_val & 0x02) 909 result->pic_type = PIC_TYPE_P; 910 else if (reg_val & 0x01) 911 result->pic_type = PIC_TYPE_I; 912 else 913 result->pic_type = PIC_TYPE_MAX; 914 if ((nal_unit_type == 19 || nal_unit_type == 20) && result->pic_type == PIC_TYPE_I) 915 /* IDR_W_RADL, IDR_N_LP */ 916 result->pic_type = PIC_TYPE_IDR; 917 } else if (inst->std == W_AVC_DEC) { 918 if (reg_val & 0x04) 919 result->pic_type = PIC_TYPE_B; 920 else if (reg_val & 0x02) 921 result->pic_type = PIC_TYPE_P; 922 else if (reg_val & 0x01) 923 result->pic_type = PIC_TYPE_I; 924 else 925 result->pic_type = PIC_TYPE_MAX; 926 if (nal_unit_type == 5 && result->pic_type == PIC_TYPE_I) 927 result->pic_type = PIC_TYPE_IDR; 928 } 929 index = vpu_read_reg(inst->dev, W5_RET_DEC_DISPLAY_INDEX); 930 result->index_frame_display = index; 931 index = vpu_read_reg(inst->dev, W5_RET_DEC_DECODED_INDEX); 932 result->index_frame_decoded = index; 933 result->index_frame_decoded_for_tiled = index; 934 935 sub_layer_info = vpu_read_reg(inst->dev, W5_RET_DEC_SUB_LAYER_INFO); 936 result->temporal_id = sub_layer_info & 0x7; 937 938 if (inst->std == W_HEVC_DEC || inst->std == W_AVC_DEC) { 939 result->decoded_poc = -1; 940 if (result->index_frame_decoded >= 0 || 941 result->index_frame_decoded == DECODED_IDX_FLAG_SKIP) 942 result->decoded_poc = vpu_read_reg(inst->dev, W5_RET_DEC_PIC_POC); 943 } 944 945 result->sequence_changed = vpu_read_reg(inst->dev, W5_RET_DEC_NOTIFICATION); 946 reg_val = vpu_read_reg(inst->dev, W5_RET_DEC_PIC_SIZE); 947 result->dec_pic_width = reg_val >> 16; 948 result->dec_pic_height = reg_val & 0xffff; 949 950 if (result->sequence_changed) { 951 memcpy((void *)&p_dec_info->new_seq_info, (void *)&p_dec_info->initial_info, 952 sizeof(struct dec_initial_info)); 953 wave5_get_dec_seq_result(inst, &p_dec_info->new_seq_info); 954 } 955 956 result->dec_host_cmd_tick = vpu_read_reg(inst->dev, W5_RET_DEC_HOST_CMD_TICK); 957 result->dec_decode_end_tick = vpu_read_reg(inst->dev, W5_RET_DEC_DECODING_ENC_TICK); 958 959 if (!p_dec_info->first_cycle_check) { 960 result->frame_cycle = 961 (result->dec_decode_end_tick - result->dec_host_cmd_tick) * 962 p_dec_info->cycle_per_tick; 963 vpu_dev->last_performance_cycles = result->dec_decode_end_tick; 964 p_dec_info->first_cycle_check = true; 965 } else if (result->index_frame_decoded_for_tiled != -1) { 966 result->frame_cycle = 967 (result->dec_decode_end_tick - vpu_dev->last_performance_cycles) * 968 p_dec_info->cycle_per_tick; 969 vpu_dev->last_performance_cycles = result->dec_decode_end_tick; 970 if (vpu_dev->last_performance_cycles < result->dec_host_cmd_tick) 971 result->frame_cycle = 972 (result->dec_decode_end_tick - result->dec_host_cmd_tick) * 973 p_dec_info->cycle_per_tick; 974 } 975 976 /* no remaining command. reset frame cycle. */ 977 if (p_dec_info->instance_queue_count == 0 && p_dec_info->report_queue_count == 0) 978 p_dec_info->first_cycle_check = false; 979 980 return 0; 981 } 982 983 int wave5_vpu_re_init(struct device *dev, u8 *fw, size_t size) 984 { 985 struct vpu_buf *common_vb; 986 dma_addr_t code_base, temp_base; 987 dma_addr_t old_code_base, temp_size; 988 u32 code_size, reason_code; 989 u32 reg_val; 990 struct vpu_device *vpu_dev = dev_get_drvdata(dev); 991 992 common_vb = &vpu_dev->common_mem; 993 994 code_base = common_vb->daddr; 995 /* ALIGN TO 4KB */ 996 code_size = (WAVE5_MAX_CODE_BUF_SIZE & ~0xfff); 997 if (code_size < size * 2) 998 return -EINVAL; 999 temp_base = common_vb->daddr + WAVE5_TEMPBUF_OFFSET; 1000 temp_size = WAVE5_TEMPBUF_SIZE; 1001 1002 old_code_base = vpu_read_reg(vpu_dev, W5_VPU_REMAP_PADDR); 1003 1004 if (old_code_base != code_base + W5_REMAP_INDEX1 * W5_REMAP_MAX_SIZE) { 1005 int ret; 1006 1007 ret = wave5_vdi_write_memory(vpu_dev, common_vb, 0, fw, size); 1008 if (ret < 0) { 1009 dev_err(vpu_dev->dev, 1010 "VPU init, Writing firmware to common buffer, fail: %d\n", ret); 1011 return ret; 1012 } 1013 1014 vpu_write_reg(vpu_dev, W5_PO_CONF, 0); 1015 1016 ret = wave5_vpu_reset(dev, SW_RESET_ON_BOOT); 1017 if (ret < 0) { 1018 dev_err(vpu_dev->dev, "VPU init, Resetting the VPU, fail: %d\n", ret); 1019 return ret; 1020 } 1021 1022 remap_page(vpu_dev, code_base, W5_REMAP_INDEX0); 1023 remap_page(vpu_dev, code_base, W5_REMAP_INDEX1); 1024 1025 vpu_write_reg(vpu_dev, W5_ADDR_CODE_BASE, code_base); 1026 vpu_write_reg(vpu_dev, W5_CODE_SIZE, code_size); 1027 vpu_write_reg(vpu_dev, W5_CODE_PARAM, (WAVE5_UPPER_PROC_AXI_ID << 4) | 0); 1028 vpu_write_reg(vpu_dev, W5_ADDR_TEMP_BASE, temp_base); 1029 vpu_write_reg(vpu_dev, W5_TEMP_SIZE, temp_size); 1030 1031 /* These register must be reset explicitly */ 1032 vpu_write_reg(vpu_dev, W5_HW_OPTION, 0); 1033 wave5_fio_writel(vpu_dev, W5_BACKBONE_PROC_EXT_ADDR, 0); 1034 wave5_fio_writel(vpu_dev, W5_BACKBONE_AXI_PARAM, 0); 1035 vpu_write_reg(vpu_dev, W5_SEC_AXI_PARAM, 0); 1036 1037 /* Encoder interrupt */ 1038 reg_val = BIT(INT_WAVE5_ENC_SET_PARAM); 1039 reg_val |= BIT(INT_WAVE5_ENC_PIC); 1040 reg_val |= BIT(INT_WAVE5_BSBUF_FULL); 1041 /* Decoder interrupt */ 1042 reg_val |= BIT(INT_WAVE5_INIT_SEQ); 1043 reg_val |= BIT(INT_WAVE5_DEC_PIC); 1044 reg_val |= BIT(INT_WAVE5_BSBUF_EMPTY); 1045 vpu_write_reg(vpu_dev, W5_VPU_VINT_ENABLE, reg_val); 1046 1047 reg_val = vpu_read_reg(vpu_dev, W5_VPU_RET_VPU_CONFIG0); 1048 if (FIELD_GET(FEATURE_BACKBONE, reg_val)) { 1049 reg_val = ((WAVE5_PROC_AXI_ID << 28) | 1050 (WAVE5_PRP_AXI_ID << 24) | 1051 (WAVE5_FBD_Y_AXI_ID << 20) | 1052 (WAVE5_FBC_Y_AXI_ID << 16) | 1053 (WAVE5_FBD_C_AXI_ID << 12) | 1054 (WAVE5_FBC_C_AXI_ID << 8) | 1055 (WAVE5_PRI_AXI_ID << 4) | 1056 WAVE5_SEC_AXI_ID); 1057 wave5_fio_writel(vpu_dev, W5_BACKBONE_PROG_AXI_ID, reg_val); 1058 } 1059 1060 vpu_write_reg(vpu_dev, W5_VPU_BUSY_STATUS, 1); 1061 vpu_write_reg(vpu_dev, W5_COMMAND, W5_INIT_VPU); 1062 vpu_write_reg(vpu_dev, W5_VPU_REMAP_CORE_START, 1); 1063 1064 ret = wave5_wait_vpu_busy(vpu_dev, W5_VPU_BUSY_STATUS); 1065 if (ret) { 1066 dev_err(vpu_dev->dev, "VPU reinit(W5_VPU_REMAP_CORE_START) timeout\n"); 1067 return ret; 1068 } 1069 1070 ret = wave5_vpu_firmware_command_queue_error_check(vpu_dev, &reason_code); 1071 if (ret) 1072 return ret; 1073 } 1074 1075 return setup_wave5_properties(dev); 1076 } 1077 1078 static int wave5_vpu_sleep_wake(struct device *dev, bool i_sleep_wake, const uint16_t *code, 1079 size_t size) 1080 { 1081 u32 reg_val; 1082 struct vpu_buf *common_vb; 1083 dma_addr_t code_base; 1084 u32 code_size, reason_code; 1085 struct vpu_device *vpu_dev = dev_get_drvdata(dev); 1086 int ret; 1087 1088 if (i_sleep_wake) { 1089 ret = wave5_wait_vpu_busy(vpu_dev, W5_VPU_BUSY_STATUS); 1090 if (ret) 1091 return ret; 1092 1093 /* 1094 * Declare who has ownership for the host interface access 1095 * 1 = VPU 1096 * 0 = Host processor 1097 */ 1098 vpu_write_reg(vpu_dev, W5_VPU_BUSY_STATUS, 1); 1099 vpu_write_reg(vpu_dev, W5_COMMAND, W5_SLEEP_VPU); 1100 /* Send an interrupt named HOST to the VPU */ 1101 vpu_write_reg(vpu_dev, W5_VPU_HOST_INT_REQ, 1); 1102 1103 ret = wave5_wait_vpu_busy(vpu_dev, W5_VPU_BUSY_STATUS); 1104 if (ret) 1105 return ret; 1106 1107 ret = wave5_vpu_firmware_command_queue_error_check(vpu_dev, &reason_code); 1108 if (ret) 1109 return ret; 1110 } else { /* restore */ 1111 common_vb = &vpu_dev->common_mem; 1112 1113 code_base = common_vb->daddr; 1114 /* ALIGN TO 4KB */ 1115 code_size = (WAVE5_MAX_CODE_BUF_SIZE & ~0xfff); 1116 if (code_size < size * 2) { 1117 dev_err(dev, "size too small\n"); 1118 return -EINVAL; 1119 } 1120 1121 /* Power on without DEBUG mode */ 1122 vpu_write_reg(vpu_dev, W5_PO_CONF, 0); 1123 1124 remap_page(vpu_dev, code_base, W5_REMAP_INDEX0); 1125 remap_page(vpu_dev, code_base, W5_REMAP_INDEX1); 1126 1127 vpu_write_reg(vpu_dev, W5_ADDR_CODE_BASE, code_base); 1128 vpu_write_reg(vpu_dev, W5_CODE_SIZE, code_size); 1129 vpu_write_reg(vpu_dev, W5_CODE_PARAM, (WAVE5_UPPER_PROC_AXI_ID << 4) | 0); 1130 1131 /* These register must be reset explicitly */ 1132 vpu_write_reg(vpu_dev, W5_HW_OPTION, 0); 1133 wave5_fio_writel(vpu_dev, W5_BACKBONE_PROC_EXT_ADDR, 0); 1134 wave5_fio_writel(vpu_dev, W5_BACKBONE_AXI_PARAM, 0); 1135 vpu_write_reg(vpu_dev, W5_SEC_AXI_PARAM, 0); 1136 1137 /* Encoder interrupt */ 1138 reg_val = BIT(INT_WAVE5_ENC_SET_PARAM); 1139 reg_val |= BIT(INT_WAVE5_ENC_PIC); 1140 reg_val |= BIT(INT_WAVE5_BSBUF_FULL); 1141 /* Decoder interrupt */ 1142 reg_val |= BIT(INT_WAVE5_INIT_SEQ); 1143 reg_val |= BIT(INT_WAVE5_DEC_PIC); 1144 reg_val |= BIT(INT_WAVE5_BSBUF_EMPTY); 1145 vpu_write_reg(vpu_dev, W5_VPU_VINT_ENABLE, reg_val); 1146 1147 reg_val = vpu_read_reg(vpu_dev, W5_VPU_RET_VPU_CONFIG0); 1148 if (FIELD_GET(FEATURE_BACKBONE, reg_val)) { 1149 reg_val = ((WAVE5_PROC_AXI_ID << 28) | 1150 (WAVE5_PRP_AXI_ID << 24) | 1151 (WAVE5_FBD_Y_AXI_ID << 20) | 1152 (WAVE5_FBC_Y_AXI_ID << 16) | 1153 (WAVE5_FBD_C_AXI_ID << 12) | 1154 (WAVE5_FBC_C_AXI_ID << 8) | 1155 (WAVE5_PRI_AXI_ID << 4) | 1156 WAVE5_SEC_AXI_ID); 1157 wave5_fio_writel(vpu_dev, W5_BACKBONE_PROG_AXI_ID, reg_val); 1158 } 1159 1160 vpu_write_reg(vpu_dev, W5_VPU_BUSY_STATUS, 1); 1161 vpu_write_reg(vpu_dev, W5_COMMAND, W5_WAKEUP_VPU); 1162 /* Start VPU after settings */ 1163 vpu_write_reg(vpu_dev, W5_VPU_REMAP_CORE_START, 1); 1164 1165 ret = wave5_wait_vpu_busy(vpu_dev, W5_VPU_BUSY_STATUS); 1166 if (ret) { 1167 dev_err(vpu_dev->dev, "VPU wakeup(W5_VPU_REMAP_CORE_START) timeout\n"); 1168 return ret; 1169 } 1170 1171 return wave5_vpu_firmware_command_queue_error_check(vpu_dev, &reason_code); 1172 } 1173 1174 return 0; 1175 } 1176 1177 int wave5_vpu_reset(struct device *dev, enum sw_reset_mode reset_mode) 1178 { 1179 u32 val = 0; 1180 int ret = 0; 1181 struct vpu_device *vpu_dev = dev_get_drvdata(dev); 1182 struct vpu_attr *p_attr = &vpu_dev->attr; 1183 /* VPU doesn't send response. force to set BUSY flag to 0. */ 1184 vpu_write_reg(vpu_dev, W5_VPU_BUSY_STATUS, 0); 1185 1186 if (reset_mode == SW_RESET_SAFETY) { 1187 ret = wave5_vpu_sleep_wake(dev, true, NULL, 0); 1188 if (ret) 1189 return ret; 1190 } 1191 1192 val = vpu_read_reg(vpu_dev, W5_VPU_RET_VPU_CONFIG0); 1193 if ((val >> 16) & 0x1) 1194 p_attr->support_backbone = true; 1195 if ((val >> 22) & 0x1) 1196 p_attr->support_vcore_backbone = true; 1197 if ((val >> 28) & 0x1) 1198 p_attr->support_vcpu_backbone = true; 1199 1200 /* waiting for completion of bus transaction */ 1201 if (p_attr->support_backbone) { 1202 dev_dbg(dev, "%s: backbone supported\n", __func__); 1203 1204 if (p_attr->support_vcore_backbone) { 1205 if (p_attr->support_vcpu_backbone) { 1206 /* step1 : disable request */ 1207 wave5_fio_writel(vpu_dev, W5_BACKBONE_BUS_CTRL_VCPU, 0xFF); 1208 1209 /* step2 : waiting for completion of bus transaction */ 1210 ret = wave5_wait_vcpu_bus_busy(vpu_dev, 1211 W5_BACKBONE_BUS_STATUS_VCPU); 1212 if (ret) { 1213 wave5_fio_writel(vpu_dev, W5_BACKBONE_BUS_CTRL_VCPU, 0x00); 1214 return ret; 1215 } 1216 } 1217 /* step1 : disable request */ 1218 wave5_fio_writel(vpu_dev, W5_BACKBONE_BUS_CTRL_VCORE0, 0x7); 1219 1220 /* step2 : waiting for completion of bus transaction */ 1221 if (wave5_wait_bus_busy(vpu_dev, W5_BACKBONE_BUS_STATUS_VCORE0)) { 1222 wave5_fio_writel(vpu_dev, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00); 1223 return -EBUSY; 1224 } 1225 } else { 1226 /* step1 : disable request */ 1227 wave5_fio_writel(vpu_dev, W5_COMBINED_BACKBONE_BUS_CTRL, 0x7); 1228 1229 /* step2 : waiting for completion of bus transaction */ 1230 if (wave5_wait_bus_busy(vpu_dev, W5_COMBINED_BACKBONE_BUS_STATUS)) { 1231 wave5_fio_writel(vpu_dev, W5_COMBINED_BACKBONE_BUS_CTRL, 0x00); 1232 return -EBUSY; 1233 } 1234 } 1235 } else { 1236 dev_dbg(dev, "%s: backbone NOT supported\n", __func__); 1237 /* step1 : disable request */ 1238 wave5_fio_writel(vpu_dev, W5_GDI_BUS_CTRL, 0x100); 1239 1240 /* step2 : waiting for completion of bus transaction */ 1241 ret = wave5_wait_bus_busy(vpu_dev, W5_GDI_BUS_STATUS); 1242 if (ret) { 1243 wave5_fio_writel(vpu_dev, W5_GDI_BUS_CTRL, 0x00); 1244 return ret; 1245 } 1246 } 1247 1248 switch (reset_mode) { 1249 case SW_RESET_ON_BOOT: 1250 case SW_RESET_FORCE: 1251 case SW_RESET_SAFETY: 1252 val = W5_RST_BLOCK_ALL; 1253 break; 1254 default: 1255 return -EINVAL; 1256 } 1257 1258 if (val) { 1259 vpu_write_reg(vpu_dev, W5_VPU_RESET_REQ, val); 1260 1261 ret = wave5_wait_vpu_busy(vpu_dev, W5_VPU_RESET_STATUS); 1262 if (ret) { 1263 vpu_write_reg(vpu_dev, W5_VPU_RESET_REQ, 0); 1264 return ret; 1265 } 1266 vpu_write_reg(vpu_dev, W5_VPU_RESET_REQ, 0); 1267 } 1268 /* step3 : must clear GDI_BUS_CTRL after done SW_RESET */ 1269 if (p_attr->support_backbone) { 1270 if (p_attr->support_vcore_backbone) { 1271 if (p_attr->support_vcpu_backbone) 1272 wave5_fio_writel(vpu_dev, W5_BACKBONE_BUS_CTRL_VCPU, 0x00); 1273 wave5_fio_writel(vpu_dev, W5_BACKBONE_BUS_CTRL_VCORE0, 0x00); 1274 } else { 1275 wave5_fio_writel(vpu_dev, W5_COMBINED_BACKBONE_BUS_CTRL, 0x00); 1276 } 1277 } else { 1278 wave5_fio_writel(vpu_dev, W5_GDI_BUS_CTRL, 0x00); 1279 } 1280 if (reset_mode == SW_RESET_SAFETY || reset_mode == SW_RESET_FORCE) 1281 ret = wave5_vpu_sleep_wake(dev, false, NULL, 0); 1282 1283 return ret; 1284 } 1285 1286 int wave5_vpu_dec_finish_seq(struct vpu_instance *inst, u32 *fail_res) 1287 { 1288 return send_firmware_command(inst, W5_DESTROY_INSTANCE, true, NULL, fail_res); 1289 } 1290 1291 int wave5_vpu_dec_set_bitstream_flag(struct vpu_instance *inst, bool eos) 1292 { 1293 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 1294 1295 p_dec_info->stream_endflag = eos ? 1 : 0; 1296 vpu_write_reg(inst->dev, W5_BS_OPTION, get_bitstream_options(p_dec_info)); 1297 vpu_write_reg(inst->dev, W5_BS_WR_PTR, p_dec_info->stream_wr_ptr); 1298 1299 return send_firmware_command(inst, W5_UPDATE_BS, true, NULL, NULL); 1300 } 1301 1302 int wave5_dec_clr_disp_flag(struct vpu_instance *inst, unsigned int index) 1303 { 1304 struct dec_info *p_dec_info = &inst->codec_info->dec_info; 1305 int ret; 1306 1307 vpu_write_reg(inst->dev, W5_CMD_DEC_CLR_DISP_IDC, BIT(index)); 1308 vpu_write_reg(inst->dev, W5_CMD_DEC_SET_DISP_IDC, 0); 1309 1310 ret = wave5_send_query(inst->dev, inst, UPDATE_DISP_FLAG); 1311 if (ret) 1312 return ret; 1313 1314 p_dec_info->frame_display_flag = vpu_read_reg(inst->dev, W5_RET_DEC_DISP_IDC); 1315 1316 return 0; 1317 } 1318 1319 int wave5_dec_set_disp_flag(struct vpu_instance *inst, unsigned int index) 1320 { 1321 int ret; 1322 1323 vpu_write_reg(inst->dev, W5_CMD_DEC_CLR_DISP_IDC, 0); 1324 vpu_write_reg(inst->dev, W5_CMD_DEC_SET_DISP_IDC, BIT(index)); 1325 1326 ret = wave5_send_query(inst->dev, inst, UPDATE_DISP_FLAG); 1327 if (ret) 1328 return ret; 1329 1330 return 0; 1331 } 1332 1333 int wave5_vpu_clear_interrupt(struct vpu_instance *inst, u32 flags) 1334 { 1335 u32 interrupt_reason; 1336 1337 interrupt_reason = vpu_read_reg(inst->dev, W5_VPU_VINT_REASON_USR); 1338 interrupt_reason &= ~flags; 1339 vpu_write_reg(inst->dev, W5_VPU_VINT_REASON_USR, interrupt_reason); 1340 1341 return 0; 1342 } 1343 1344 dma_addr_t wave5_dec_get_rd_ptr(struct vpu_instance *inst) 1345 { 1346 int ret; 1347 1348 ret = wave5_send_query(inst->dev, inst, GET_BS_RD_PTR); 1349 if (ret) 1350 return inst->codec_info->dec_info.stream_rd_ptr; 1351 1352 return vpu_read_reg(inst->dev, W5_RET_QUERY_DEC_BS_RD_PTR); 1353 } 1354 1355 int wave5_dec_set_rd_ptr(struct vpu_instance *inst, dma_addr_t addr) 1356 { 1357 int ret; 1358 1359 vpu_write_reg(inst->dev, W5_RET_QUERY_DEC_SET_BS_RD_PTR, addr); 1360 1361 ret = wave5_send_query(inst->dev, inst, SET_BS_RD_PTR); 1362 1363 return ret; 1364 } 1365 1366 /************************************************************************/ 1367 /* ENCODER functions */ 1368 /************************************************************************/ 1369 1370 int wave5_vpu_build_up_enc_param(struct device *dev, struct vpu_instance *inst, 1371 struct enc_open_param *open_param) 1372 { 1373 int ret; 1374 struct enc_info *p_enc_info = &inst->codec_info->enc_info; 1375 u32 reg_val; 1376 struct vpu_device *vpu_dev = dev_get_drvdata(dev); 1377 dma_addr_t buffer_addr; 1378 size_t buffer_size; 1379 1380 p_enc_info->cycle_per_tick = 256; 1381 if (vpu_dev->sram_buf.size) { 1382 p_enc_info->sec_axi_info.use_enc_rdo_enable = 1; 1383 p_enc_info->sec_axi_info.use_enc_lf_enable = 1; 1384 } 1385 1386 p_enc_info->vb_work.size = WAVE521ENC_WORKBUF_SIZE; 1387 ret = wave5_vdi_allocate_dma_memory(vpu_dev, &p_enc_info->vb_work); 1388 if (ret) { 1389 memset(&p_enc_info->vb_work, 0, sizeof(p_enc_info->vb_work)); 1390 return ret; 1391 } 1392 1393 wave5_vdi_clear_memory(vpu_dev, &p_enc_info->vb_work); 1394 1395 vpu_write_reg(inst->dev, W5_ADDR_WORK_BASE, p_enc_info->vb_work.daddr); 1396 vpu_write_reg(inst->dev, W5_WORK_SIZE, p_enc_info->vb_work.size); 1397 1398 vpu_write_reg(inst->dev, W5_CMD_ADDR_SEC_AXI, vpu_dev->sram_buf.daddr); 1399 vpu_write_reg(inst->dev, W5_CMD_SEC_AXI_SIZE, vpu_dev->sram_buf.size); 1400 1401 reg_val = (open_param->line_buf_int_en << 6) | BITSTREAM_ENDIANNESS_BIG_ENDIAN; 1402 vpu_write_reg(inst->dev, W5_CMD_BS_PARAM, reg_val); 1403 vpu_write_reg(inst->dev, W5_CMD_EXT_ADDR, 0); 1404 vpu_write_reg(inst->dev, W5_CMD_NUM_CQ_DEPTH_M1, (COMMAND_QUEUE_DEPTH - 1)); 1405 1406 /* This register must be reset explicitly */ 1407 vpu_write_reg(inst->dev, W5_CMD_ENC_SRC_OPTIONS, 0); 1408 vpu_write_reg(inst->dev, W5_CMD_ENC_VCORE_INFO, 1); 1409 1410 ret = send_firmware_command(inst, W5_CREATE_INSTANCE, true, NULL, NULL); 1411 if (ret) 1412 goto free_vb_work; 1413 1414 buffer_addr = open_param->bitstream_buffer; 1415 buffer_size = open_param->bitstream_buffer_size; 1416 p_enc_info->stream_rd_ptr = buffer_addr; 1417 p_enc_info->stream_wr_ptr = buffer_addr; 1418 p_enc_info->line_buf_int_en = open_param->line_buf_int_en; 1419 p_enc_info->stream_buf_start_addr = buffer_addr; 1420 p_enc_info->stream_buf_size = buffer_size; 1421 p_enc_info->stream_buf_end_addr = buffer_addr + buffer_size; 1422 p_enc_info->stride = 0; 1423 p_enc_info->initial_info_obtained = false; 1424 p_enc_info->product_code = vpu_read_reg(inst->dev, W5_PRODUCT_NUMBER); 1425 1426 return 0; 1427 free_vb_work: 1428 if (wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_work)) 1429 memset(&p_enc_info->vb_work, 0, sizeof(p_enc_info->vb_work)); 1430 return ret; 1431 } 1432 1433 static void wave5_set_enc_crop_info(u32 codec, struct enc_wave_param *param, int rot_mode, 1434 int src_width, int src_height) 1435 { 1436 int aligned_width = (codec == W_HEVC_ENC) ? ALIGN(src_width, 32) : ALIGN(src_width, 16); 1437 int aligned_height = (codec == W_HEVC_ENC) ? ALIGN(src_height, 32) : ALIGN(src_height, 16); 1438 int pad_right, pad_bot; 1439 int crop_right, crop_left, crop_top, crop_bot; 1440 int prp_mode = rot_mode >> 1; /* remove prp_enable bit */ 1441 1442 if (codec == W_HEVC_ENC && 1443 (!rot_mode || prp_mode == 14)) /* prp_mode 14 : hor_mir && ver_mir && rot_180 */ 1444 return; 1445 1446 pad_right = aligned_width - src_width; 1447 pad_bot = aligned_height - src_height; 1448 1449 if (param->conf_win_right > 0) 1450 crop_right = param->conf_win_right + pad_right; 1451 else 1452 crop_right = pad_right; 1453 1454 if (param->conf_win_bot > 0) 1455 crop_bot = param->conf_win_bot + pad_bot; 1456 else 1457 crop_bot = pad_bot; 1458 1459 crop_top = param->conf_win_top; 1460 crop_left = param->conf_win_left; 1461 1462 param->conf_win_top = crop_top; 1463 param->conf_win_left = crop_left; 1464 param->conf_win_bot = crop_bot; 1465 param->conf_win_right = crop_right; 1466 1467 switch (prp_mode) { 1468 case 0: 1469 return; 1470 case 1: 1471 case 15: 1472 param->conf_win_top = crop_right; 1473 param->conf_win_left = crop_top; 1474 param->conf_win_bot = crop_left; 1475 param->conf_win_right = crop_bot; 1476 break; 1477 case 2: 1478 case 12: 1479 param->conf_win_top = crop_bot; 1480 param->conf_win_left = crop_right; 1481 param->conf_win_bot = crop_top; 1482 param->conf_win_right = crop_left; 1483 break; 1484 case 3: 1485 case 13: 1486 param->conf_win_top = crop_left; 1487 param->conf_win_left = crop_bot; 1488 param->conf_win_bot = crop_right; 1489 param->conf_win_right = crop_top; 1490 break; 1491 case 4: 1492 case 10: 1493 param->conf_win_top = crop_bot; 1494 param->conf_win_bot = crop_top; 1495 break; 1496 case 8: 1497 case 6: 1498 param->conf_win_left = crop_right; 1499 param->conf_win_right = crop_left; 1500 break; 1501 case 5: 1502 case 11: 1503 param->conf_win_top = crop_left; 1504 param->conf_win_left = crop_top; 1505 param->conf_win_bot = crop_right; 1506 param->conf_win_right = crop_bot; 1507 break; 1508 case 7: 1509 case 9: 1510 param->conf_win_top = crop_right; 1511 param->conf_win_left = crop_bot; 1512 param->conf_win_bot = crop_left; 1513 param->conf_win_right = crop_top; 1514 break; 1515 default: 1516 WARN(1, "Invalid prp_mode: %d, must be in range of 1 - 15\n", prp_mode); 1517 } 1518 } 1519 1520 int wave5_vpu_enc_init_seq(struct vpu_instance *inst) 1521 { 1522 u32 reg_val = 0, rot_mir_mode, fixed_cu_size_mode = 0x7; 1523 struct enc_info *p_enc_info = &inst->codec_info->enc_info; 1524 struct enc_open_param *p_open_param = &p_enc_info->open_param; 1525 struct enc_wave_param *p_param = &p_open_param->wave_param; 1526 1527 /* 1528 * OPT_COMMON: 1529 * the last SET_PARAM command should be called with OPT_COMMON 1530 */ 1531 rot_mir_mode = 0; 1532 if (p_enc_info->rotation_enable) { 1533 switch (p_enc_info->rotation_angle) { 1534 case 0: 1535 rot_mir_mode |= NONE_ROTATE; 1536 break; 1537 case 90: 1538 rot_mir_mode |= ROT_CLOCKWISE_90; 1539 break; 1540 case 180: 1541 rot_mir_mode |= ROT_CLOCKWISE_180; 1542 break; 1543 case 270: 1544 rot_mir_mode |= ROT_CLOCKWISE_270; 1545 break; 1546 } 1547 } 1548 1549 if (p_enc_info->mirror_enable) { 1550 switch (p_enc_info->mirror_direction) { 1551 case MIRDIR_NONE: 1552 rot_mir_mode |= NONE_ROTATE; 1553 break; 1554 case MIRDIR_VER: 1555 rot_mir_mode |= MIR_VER_FLIP; 1556 break; 1557 case MIRDIR_HOR: 1558 rot_mir_mode |= MIR_HOR_FLIP; 1559 break; 1560 case MIRDIR_HOR_VER: 1561 rot_mir_mode |= MIR_HOR_VER_FLIP; 1562 break; 1563 } 1564 } 1565 1566 wave5_set_enc_crop_info(inst->std, p_param, rot_mir_mode, p_open_param->pic_width, 1567 p_open_param->pic_height); 1568 1569 /* SET_PARAM + COMMON */ 1570 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_SET_PARAM_OPTION, OPT_COMMON); 1571 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_SRC_SIZE, p_open_param->pic_height << 16 1572 | p_open_param->pic_width); 1573 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_MAP_ENDIAN, VDI_LITTLE_ENDIAN); 1574 1575 reg_val = p_param->profile | 1576 (p_param->level << 3) | 1577 (p_param->internal_bit_depth << 14); 1578 if (inst->std == W_HEVC_ENC) 1579 reg_val |= (p_param->tier << 12) | 1580 (p_param->tmvp_enable << 23) | 1581 (p_param->sao_enable << 24) | 1582 (p_param->skip_intra_trans << 25) | 1583 (p_param->strong_intra_smooth_enable << 27) | 1584 (p_param->en_still_picture << 30); 1585 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_SPS_PARAM, reg_val); 1586 1587 reg_val = (p_param->lossless_enable) | 1588 (p_param->const_intra_pred_flag << 1) | 1589 (p_param->lf_cross_slice_boundary_enable << 2) | 1590 (p_param->wpp_enable << 4) | 1591 (p_param->disable_deblk << 5) | 1592 ((p_param->beta_offset_div2 & 0xF) << 6) | 1593 ((p_param->tc_offset_div2 & 0xF) << 10) | 1594 ((p_param->chroma_cb_qp_offset & 0x1F) << 14) | 1595 ((p_param->chroma_cr_qp_offset & 0x1F) << 19) | 1596 (p_param->transform8x8_enable << 29) | 1597 (p_param->entropy_coding_mode << 30); 1598 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_PPS_PARAM, reg_val); 1599 1600 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_GOP_PARAM, p_param->gop_preset_idx); 1601 1602 if (inst->std == W_AVC_ENC) 1603 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_INTRA_PARAM, p_param->intra_qp | 1604 ((p_param->intra_period & 0x7ff) << 6) | 1605 ((p_param->avc_idr_period & 0x7ff) << 17)); 1606 else if (inst->std == W_HEVC_ENC) 1607 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_INTRA_PARAM, 1608 p_param->decoding_refresh_type | (p_param->intra_qp << 3) | 1609 (p_param->intra_period << 16)); 1610 1611 reg_val = (p_param->rdo_skip << 2) | 1612 (p_param->lambda_scaling_enable << 3) | 1613 (fixed_cu_size_mode << 5) | 1614 (p_param->intra_nx_n_enable << 8) | 1615 (p_param->max_num_merge << 18); 1616 1617 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RDO_PARAM, reg_val); 1618 1619 if (inst->std == W_AVC_ENC) 1620 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_INTRA_REFRESH, 1621 p_param->intra_mb_refresh_arg << 16 | p_param->intra_mb_refresh_mode); 1622 else if (inst->std == W_HEVC_ENC) 1623 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_INTRA_REFRESH, 1624 p_param->intra_refresh_arg << 16 | p_param->intra_refresh_mode); 1625 1626 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RC_FRAME_RATE, p_open_param->frame_rate_info); 1627 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RC_TARGET_RATE, p_open_param->bit_rate); 1628 1629 reg_val = p_open_param->rc_enable | 1630 (p_param->hvs_qp_enable << 2) | 1631 (p_param->hvs_qp_scale << 4) | 1632 ((p_param->initial_rc_qp & 0x3F) << 14) | 1633 (p_open_param->vbv_buffer_size << 20); 1634 if (inst->std == W_AVC_ENC) 1635 reg_val |= (p_param->mb_level_rc_enable << 1); 1636 else if (inst->std == W_HEVC_ENC) 1637 reg_val |= (p_param->cu_level_rc_enable << 1); 1638 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RC_PARAM, reg_val); 1639 1640 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RC_WEIGHT_PARAM, 1641 p_param->rc_weight_buf << 8 | p_param->rc_weight_param); 1642 1643 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RC_MIN_MAX_QP, p_param->min_qp_i | 1644 (p_param->max_qp_i << 6) | (p_param->hvs_max_delta_qp << 12)); 1645 1646 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RC_INTER_MIN_MAX_QP, p_param->min_qp_p | 1647 (p_param->max_qp_p << 6) | (p_param->min_qp_b << 12) | 1648 (p_param->max_qp_b << 18)); 1649 1650 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RC_BIT_RATIO_LAYER_0_3, 0); 1651 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_RC_BIT_RATIO_LAYER_4_7, 0); 1652 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_ROT_PARAM, rot_mir_mode); 1653 1654 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_BG_PARAM, 0); 1655 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_LAMBDA_ADDR, 0); 1656 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CONF_WIN_TOP_BOT, 1657 p_param->conf_win_bot << 16 | p_param->conf_win_top); 1658 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CONF_WIN_LEFT_RIGHT, 1659 p_param->conf_win_right << 16 | p_param->conf_win_left); 1660 1661 if (inst->std == W_AVC_ENC) 1662 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_INDEPENDENT_SLICE, 1663 p_param->avc_slice_arg << 16 | p_param->avc_slice_mode); 1664 else if (inst->std == W_HEVC_ENC) 1665 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_INDEPENDENT_SLICE, 1666 p_param->independ_slice_mode_arg << 16 | 1667 p_param->independ_slice_mode); 1668 1669 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_USER_SCALING_LIST_ADDR, 0); 1670 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_NUM_UNITS_IN_TICK, 0); 1671 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_TIME_SCALE, 0); 1672 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_NUM_TICKS_POC_DIFF_ONE, 0); 1673 1674 if (inst->std == W_HEVC_ENC) { 1675 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_MD_PU04, 0); 1676 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_MD_PU08, 0); 1677 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_MD_PU16, 0); 1678 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_MD_PU32, 0); 1679 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_MD_CU08, 0); 1680 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_MD_CU16, 0); 1681 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_CUSTOM_MD_CU32, 0); 1682 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_DEPENDENT_SLICE, 1683 p_param->depend_slice_mode_arg << 16 | p_param->depend_slice_mode); 1684 1685 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_NR_PARAM, 0); 1686 1687 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_NR_WEIGHT, 1688 p_param->nr_intra_weight_y | 1689 (p_param->nr_intra_weight_cb << 5) | 1690 (p_param->nr_intra_weight_cr << 10) | 1691 (p_param->nr_inter_weight_y << 15) | 1692 (p_param->nr_inter_weight_cb << 20) | 1693 (p_param->nr_inter_weight_cr << 25)); 1694 } 1695 vpu_write_reg(inst->dev, W5_CMD_ENC_SEQ_VUI_HRD_PARAM, 0); 1696 1697 return send_firmware_command(inst, W5_ENC_SET_PARAM, true, NULL, NULL); 1698 } 1699 1700 int wave5_vpu_enc_get_seq_info(struct vpu_instance *inst, struct enc_initial_info *info) 1701 { 1702 int ret; 1703 u32 reg_val; 1704 struct enc_info *p_enc_info = &inst->codec_info->enc_info; 1705 1706 /* send QUERY cmd */ 1707 ret = wave5_send_query(inst->dev, inst, GET_RESULT); 1708 if (ret) 1709 return ret; 1710 1711 dev_dbg(inst->dev->dev, "%s: init seq\n", __func__); 1712 1713 reg_val = vpu_read_reg(inst->dev, W5_RET_QUEUE_STATUS); 1714 1715 p_enc_info->instance_queue_count = (reg_val >> 16) & 0xff; 1716 p_enc_info->report_queue_count = (reg_val & QUEUE_REPORT_MASK); 1717 1718 if (vpu_read_reg(inst->dev, W5_RET_ENC_ENCODING_SUCCESS) != 1) { 1719 info->seq_init_err_reason = vpu_read_reg(inst->dev, W5_RET_ENC_ERR_INFO); 1720 ret = -EIO; 1721 } else { 1722 info->warn_info = vpu_read_reg(inst->dev, W5_RET_ENC_WARN_INFO); 1723 } 1724 1725 info->min_frame_buffer_count = vpu_read_reg(inst->dev, W5_RET_ENC_NUM_REQUIRED_FB); 1726 info->min_src_frame_count = vpu_read_reg(inst->dev, W5_RET_ENC_MIN_SRC_BUF_NUM); 1727 info->vlc_buf_size = vpu_read_reg(inst->dev, W5_RET_VLC_BUF_SIZE); 1728 info->param_buf_size = vpu_read_reg(inst->dev, W5_RET_PARAM_BUF_SIZE); 1729 p_enc_info->vlc_buf_size = info->vlc_buf_size; 1730 p_enc_info->param_buf_size = info->param_buf_size; 1731 1732 return ret; 1733 } 1734 1735 static u32 calculate_luma_stride(u32 width, u32 bit_depth) 1736 { 1737 return ALIGN(ALIGN(width, 16) * ((bit_depth > 8) ? 5 : 4), 32); 1738 } 1739 1740 static u32 calculate_chroma_stride(u32 width, u32 bit_depth) 1741 { 1742 return ALIGN(ALIGN(width / 2, 16) * ((bit_depth > 8) ? 5 : 4), 32); 1743 } 1744 1745 int wave5_vpu_enc_register_framebuffer(struct device *dev, struct vpu_instance *inst, 1746 struct frame_buffer *fb_arr, enum tiled_map_type map_type, 1747 unsigned int count) 1748 { 1749 struct vpu_device *vpu_dev = dev_get_drvdata(dev); 1750 int ret = 0; 1751 u32 stride; 1752 u32 start_no, end_no; 1753 size_t remain, idx, j, i, cnt_8_chunk; 1754 u32 reg_val = 0, pic_size = 0, mv_col_size, fbc_y_tbl_size, fbc_c_tbl_size; 1755 u32 sub_sampled_size = 0; 1756 u32 luma_stride, chroma_stride; 1757 u32 buf_height = 0, buf_width = 0; 1758 u32 bit_depth; 1759 bool avc_encoding = (inst->std == W_AVC_ENC); 1760 struct vpu_buf vb_mv = {0}; 1761 struct vpu_buf vb_fbc_y_tbl = {0}; 1762 struct vpu_buf vb_fbc_c_tbl = {0}; 1763 struct vpu_buf vb_sub_sam_buf = {0}; 1764 struct vpu_buf vb_task = {0}; 1765 struct enc_open_param *p_open_param; 1766 struct enc_info *p_enc_info = &inst->codec_info->enc_info; 1767 1768 p_open_param = &p_enc_info->open_param; 1769 mv_col_size = 0; 1770 fbc_y_tbl_size = 0; 1771 fbc_c_tbl_size = 0; 1772 stride = p_enc_info->stride; 1773 bit_depth = p_open_param->wave_param.internal_bit_depth; 1774 1775 if (avc_encoding) { 1776 buf_width = ALIGN(p_open_param->pic_width, 16); 1777 buf_height = ALIGN(p_open_param->pic_height, 16); 1778 1779 if ((p_enc_info->rotation_angle || p_enc_info->mirror_direction) && 1780 !(p_enc_info->rotation_angle == 180 && 1781 p_enc_info->mirror_direction == MIRDIR_HOR_VER)) { 1782 buf_width = ALIGN(p_open_param->pic_width, 16); 1783 buf_height = ALIGN(p_open_param->pic_height, 16); 1784 } 1785 1786 if (p_enc_info->rotation_angle == 90 || p_enc_info->rotation_angle == 270) { 1787 buf_width = ALIGN(p_open_param->pic_height, 16); 1788 buf_height = ALIGN(p_open_param->pic_width, 16); 1789 } 1790 } else { 1791 buf_width = ALIGN(p_open_param->pic_width, 8); 1792 buf_height = ALIGN(p_open_param->pic_height, 8); 1793 1794 if ((p_enc_info->rotation_angle || p_enc_info->mirror_direction) && 1795 !(p_enc_info->rotation_angle == 180 && 1796 p_enc_info->mirror_direction == MIRDIR_HOR_VER)) { 1797 buf_width = ALIGN(p_open_param->pic_width, 32); 1798 buf_height = ALIGN(p_open_param->pic_height, 32); 1799 } 1800 1801 if (p_enc_info->rotation_angle == 90 || p_enc_info->rotation_angle == 270) { 1802 buf_width = ALIGN(p_open_param->pic_height, 32); 1803 buf_height = ALIGN(p_open_param->pic_width, 32); 1804 } 1805 } 1806 1807 pic_size = (buf_width << 16) | buf_height; 1808 1809 if (avc_encoding) { 1810 mv_col_size = WAVE5_ENC_AVC_BUF_SIZE(buf_width, buf_height); 1811 vb_mv.daddr = 0; 1812 vb_mv.size = ALIGN(mv_col_size * count, BUFFER_MARGIN) + BUFFER_MARGIN; 1813 } else { 1814 mv_col_size = WAVE5_ENC_HEVC_BUF_SIZE(buf_width, buf_height); 1815 mv_col_size = ALIGN(mv_col_size, 16); 1816 vb_mv.daddr = 0; 1817 vb_mv.size = ALIGN(mv_col_size * count, BUFFER_MARGIN) + BUFFER_MARGIN; 1818 } 1819 1820 ret = wave5_vdi_allocate_dma_memory(vpu_dev, &vb_mv); 1821 if (ret) 1822 return ret; 1823 1824 p_enc_info->vb_mv = vb_mv; 1825 1826 fbc_y_tbl_size = ALIGN(WAVE5_FBC_LUMA_TABLE_SIZE(buf_width, buf_height), 16); 1827 fbc_c_tbl_size = ALIGN(WAVE5_FBC_CHROMA_TABLE_SIZE(buf_width, buf_height), 16); 1828 1829 vb_fbc_y_tbl.daddr = 0; 1830 vb_fbc_y_tbl.size = ALIGN(fbc_y_tbl_size * count, BUFFER_MARGIN) + BUFFER_MARGIN; 1831 ret = wave5_vdi_allocate_dma_memory(vpu_dev, &vb_fbc_y_tbl); 1832 if (ret) 1833 goto free_vb_fbc_y_tbl; 1834 1835 p_enc_info->vb_fbc_y_tbl = vb_fbc_y_tbl; 1836 1837 vb_fbc_c_tbl.daddr = 0; 1838 vb_fbc_c_tbl.size = ALIGN(fbc_c_tbl_size * count, BUFFER_MARGIN) + BUFFER_MARGIN; 1839 ret = wave5_vdi_allocate_dma_memory(vpu_dev, &vb_fbc_c_tbl); 1840 if (ret) 1841 goto free_vb_fbc_c_tbl; 1842 1843 p_enc_info->vb_fbc_c_tbl = vb_fbc_c_tbl; 1844 1845 if (avc_encoding) 1846 sub_sampled_size = WAVE5_SUBSAMPLED_ONE_SIZE_AVC(buf_width, buf_height); 1847 else 1848 sub_sampled_size = WAVE5_SUBSAMPLED_ONE_SIZE(buf_width, buf_height); 1849 vb_sub_sam_buf.size = ALIGN(sub_sampled_size * count, BUFFER_MARGIN) + BUFFER_MARGIN; 1850 vb_sub_sam_buf.daddr = 0; 1851 ret = wave5_vdi_allocate_dma_memory(vpu_dev, &vb_sub_sam_buf); 1852 if (ret) 1853 goto free_vb_sam_buf; 1854 1855 p_enc_info->vb_sub_sam_buf = vb_sub_sam_buf; 1856 1857 vb_task.size = (p_enc_info->vlc_buf_size * VLC_BUF_NUM) + 1858 (p_enc_info->param_buf_size * COMMAND_QUEUE_DEPTH); 1859 vb_task.daddr = 0; 1860 if (p_enc_info->vb_task.size == 0) { 1861 ret = wave5_vdi_allocate_dma_memory(vpu_dev, &vb_task); 1862 if (ret) 1863 goto free_vb_task; 1864 1865 p_enc_info->vb_task = vb_task; 1866 1867 vpu_write_reg(inst->dev, W5_CMD_SET_FB_ADDR_TASK_BUF, 1868 p_enc_info->vb_task.daddr); 1869 vpu_write_reg(inst->dev, W5_CMD_SET_FB_TASK_BUF_SIZE, vb_task.size); 1870 } 1871 1872 /* set sub-sampled buffer base addr */ 1873 vpu_write_reg(inst->dev, W5_ADDR_SUB_SAMPLED_FB_BASE, vb_sub_sam_buf.daddr); 1874 /* set sub-sampled buffer size for one frame */ 1875 vpu_write_reg(inst->dev, W5_SUB_SAMPLED_ONE_FB_SIZE, sub_sampled_size); 1876 1877 vpu_write_reg(inst->dev, W5_PIC_SIZE, pic_size); 1878 1879 /* set stride of luma/chroma for compressed buffer */ 1880 if ((p_enc_info->rotation_angle || p_enc_info->mirror_direction) && 1881 !(p_enc_info->rotation_angle == 180 && 1882 p_enc_info->mirror_direction == MIRDIR_HOR_VER)) { 1883 luma_stride = calculate_luma_stride(buf_width, bit_depth); 1884 chroma_stride = calculate_chroma_stride(buf_width / 2, bit_depth); 1885 } else { 1886 luma_stride = calculate_luma_stride(p_open_param->pic_width, bit_depth); 1887 chroma_stride = calculate_chroma_stride(p_open_param->pic_width / 2, bit_depth); 1888 } 1889 1890 vpu_write_reg(inst->dev, W5_FBC_STRIDE, luma_stride << 16 | chroma_stride); 1891 vpu_write_reg(inst->dev, W5_COMMON_PIC_INFO, stride); 1892 1893 remain = count; 1894 cnt_8_chunk = DIV_ROUND_UP(count, 8); 1895 idx = 0; 1896 for (j = 0; j < cnt_8_chunk; j++) { 1897 reg_val = (j == cnt_8_chunk - 1) << 4 | ((j == 0) << 3); 1898 vpu_write_reg(inst->dev, W5_SFB_OPTION, reg_val); 1899 start_no = j * 8; 1900 end_no = start_no + ((remain >= 8) ? 8 : remain) - 1; 1901 1902 vpu_write_reg(inst->dev, W5_SET_FB_NUM, (start_no << 8) | end_no); 1903 1904 for (i = 0; i < 8 && i < remain; i++) { 1905 vpu_write_reg(inst->dev, W5_ADDR_LUMA_BASE0 + (i << 4), fb_arr[i + 1906 start_no].buf_y); 1907 vpu_write_reg(inst->dev, W5_ADDR_CB_BASE0 + (i << 4), 1908 fb_arr[i + start_no].buf_cb); 1909 /* luma FBC offset table */ 1910 vpu_write_reg(inst->dev, W5_ADDR_FBC_Y_OFFSET0 + (i << 4), 1911 vb_fbc_y_tbl.daddr + idx * fbc_y_tbl_size); 1912 /* chroma FBC offset table */ 1913 vpu_write_reg(inst->dev, W5_ADDR_FBC_C_OFFSET0 + (i << 4), 1914 vb_fbc_c_tbl.daddr + idx * fbc_c_tbl_size); 1915 1916 vpu_write_reg(inst->dev, W5_ADDR_MV_COL0 + (i << 2), 1917 vb_mv.daddr + idx * mv_col_size); 1918 idx++; 1919 } 1920 remain -= i; 1921 1922 ret = send_firmware_command(inst, W5_SET_FB, false, NULL, NULL); 1923 if (ret) 1924 goto free_vb_mem; 1925 } 1926 1927 ret = wave5_vpu_firmware_command_queue_error_check(vpu_dev, NULL); 1928 if (ret) 1929 goto free_vb_mem; 1930 1931 return ret; 1932 1933 free_vb_mem: 1934 wave5_vdi_free_dma_memory(vpu_dev, &vb_task); 1935 free_vb_task: 1936 wave5_vdi_free_dma_memory(vpu_dev, &vb_sub_sam_buf); 1937 free_vb_sam_buf: 1938 wave5_vdi_free_dma_memory(vpu_dev, &vb_fbc_c_tbl); 1939 free_vb_fbc_c_tbl: 1940 wave5_vdi_free_dma_memory(vpu_dev, &vb_fbc_y_tbl); 1941 free_vb_fbc_y_tbl: 1942 wave5_vdi_free_dma_memory(vpu_dev, &vb_mv); 1943 return ret; 1944 } 1945 1946 int wave5_vpu_encode(struct vpu_instance *inst, struct enc_param *option, u32 *fail_res) 1947 { 1948 u32 src_frame_format; 1949 u32 reg_val = 0; 1950 u32 src_stride_c = 0; 1951 struct enc_info *p_enc_info = &inst->codec_info->enc_info; 1952 struct frame_buffer *p_src_frame = option->source_frame; 1953 struct enc_open_param *p_open_param = &p_enc_info->open_param; 1954 bool justified = WTL_RIGHT_JUSTIFIED; 1955 u32 format_no = WTL_PIXEL_8BIT; 1956 int ret; 1957 1958 vpu_write_reg(inst->dev, W5_CMD_ENC_BS_START_ADDR, option->pic_stream_buffer_addr); 1959 vpu_write_reg(inst->dev, W5_CMD_ENC_BS_SIZE, option->pic_stream_buffer_size); 1960 p_enc_info->stream_buf_start_addr = option->pic_stream_buffer_addr; 1961 p_enc_info->stream_buf_size = option->pic_stream_buffer_size; 1962 p_enc_info->stream_buf_end_addr = 1963 option->pic_stream_buffer_addr + option->pic_stream_buffer_size; 1964 1965 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SRC_AXI_SEL, DEFAULT_SRC_AXI); 1966 /* secondary AXI */ 1967 reg_val = (p_enc_info->sec_axi_info.use_enc_rdo_enable << 11) | 1968 (p_enc_info->sec_axi_info.use_enc_lf_enable << 15); 1969 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_USE_SEC_AXI, reg_val); 1970 1971 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_REPORT_PARAM, 0); 1972 1973 /* 1974 * CODEOPT_ENC_VCL is used to implicitly encode header/headers to generate bitstream. 1975 * (use ENC_PUT_VIDEO_HEADER for give_command to encode only a header) 1976 */ 1977 if (option->code_option.implicit_header_encode) 1978 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_CODE_OPTION, 1979 CODEOPT_ENC_HEADER_IMPLICIT | CODEOPT_ENC_VCL | 1980 (option->code_option.encode_aud << 5) | 1981 (option->code_option.encode_eos << 6) | 1982 (option->code_option.encode_eob << 7)); 1983 else 1984 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_CODE_OPTION, 1985 option->code_option.implicit_header_encode | 1986 (option->code_option.encode_vcl << 1) | 1987 (option->code_option.encode_vps << 2) | 1988 (option->code_option.encode_sps << 3) | 1989 (option->code_option.encode_pps << 4) | 1990 (option->code_option.encode_aud << 5) | 1991 (option->code_option.encode_eos << 6) | 1992 (option->code_option.encode_eob << 7)); 1993 1994 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_PIC_PARAM, 0); 1995 1996 if (option->src_end_flag) 1997 /* no more source images. */ 1998 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SRC_PIC_IDX, 0xFFFFFFFF); 1999 else 2000 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SRC_PIC_IDX, option->src_idx); 2001 2002 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SRC_ADDR_Y, p_src_frame->buf_y); 2003 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SRC_ADDR_U, p_src_frame->buf_cb); 2004 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SRC_ADDR_V, p_src_frame->buf_cr); 2005 2006 switch (p_open_param->src_format) { 2007 case FORMAT_420: 2008 case FORMAT_422: 2009 case FORMAT_YUYV: 2010 case FORMAT_YVYU: 2011 case FORMAT_UYVY: 2012 case FORMAT_VYUY: 2013 justified = WTL_LEFT_JUSTIFIED; 2014 format_no = WTL_PIXEL_8BIT; 2015 src_stride_c = inst->cbcr_interleave ? p_src_frame->stride : 2016 (p_src_frame->stride / 2); 2017 src_stride_c = (p_open_param->src_format == FORMAT_422) ? src_stride_c * 2 : 2018 src_stride_c; 2019 break; 2020 case FORMAT_420_P10_16BIT_MSB: 2021 case FORMAT_422_P10_16BIT_MSB: 2022 case FORMAT_YUYV_P10_16BIT_MSB: 2023 case FORMAT_YVYU_P10_16BIT_MSB: 2024 case FORMAT_UYVY_P10_16BIT_MSB: 2025 case FORMAT_VYUY_P10_16BIT_MSB: 2026 justified = WTL_RIGHT_JUSTIFIED; 2027 format_no = WTL_PIXEL_16BIT; 2028 src_stride_c = inst->cbcr_interleave ? p_src_frame->stride : 2029 (p_src_frame->stride / 2); 2030 src_stride_c = (p_open_param->src_format == 2031 FORMAT_422_P10_16BIT_MSB) ? src_stride_c * 2 : src_stride_c; 2032 break; 2033 case FORMAT_420_P10_16BIT_LSB: 2034 case FORMAT_422_P10_16BIT_LSB: 2035 case FORMAT_YUYV_P10_16BIT_LSB: 2036 case FORMAT_YVYU_P10_16BIT_LSB: 2037 case FORMAT_UYVY_P10_16BIT_LSB: 2038 case FORMAT_VYUY_P10_16BIT_LSB: 2039 justified = WTL_LEFT_JUSTIFIED; 2040 format_no = WTL_PIXEL_16BIT; 2041 src_stride_c = inst->cbcr_interleave ? p_src_frame->stride : 2042 (p_src_frame->stride / 2); 2043 src_stride_c = (p_open_param->src_format == 2044 FORMAT_422_P10_16BIT_LSB) ? src_stride_c * 2 : src_stride_c; 2045 break; 2046 case FORMAT_420_P10_32BIT_MSB: 2047 case FORMAT_422_P10_32BIT_MSB: 2048 case FORMAT_YUYV_P10_32BIT_MSB: 2049 case FORMAT_YVYU_P10_32BIT_MSB: 2050 case FORMAT_UYVY_P10_32BIT_MSB: 2051 case FORMAT_VYUY_P10_32BIT_MSB: 2052 justified = WTL_RIGHT_JUSTIFIED; 2053 format_no = WTL_PIXEL_32BIT; 2054 src_stride_c = inst->cbcr_interleave ? p_src_frame->stride : 2055 ALIGN(p_src_frame->stride / 2, 16) * BIT(inst->cbcr_interleave); 2056 src_stride_c = (p_open_param->src_format == 2057 FORMAT_422_P10_32BIT_MSB) ? src_stride_c * 2 : src_stride_c; 2058 break; 2059 case FORMAT_420_P10_32BIT_LSB: 2060 case FORMAT_422_P10_32BIT_LSB: 2061 case FORMAT_YUYV_P10_32BIT_LSB: 2062 case FORMAT_YVYU_P10_32BIT_LSB: 2063 case FORMAT_UYVY_P10_32BIT_LSB: 2064 case FORMAT_VYUY_P10_32BIT_LSB: 2065 justified = WTL_LEFT_JUSTIFIED; 2066 format_no = WTL_PIXEL_32BIT; 2067 src_stride_c = inst->cbcr_interleave ? p_src_frame->stride : 2068 ALIGN(p_src_frame->stride / 2, 16) * BIT(inst->cbcr_interleave); 2069 src_stride_c = (p_open_param->src_format == 2070 FORMAT_422_P10_32BIT_LSB) ? src_stride_c * 2 : src_stride_c; 2071 break; 2072 default: 2073 return -EINVAL; 2074 } 2075 2076 src_frame_format = (inst->cbcr_interleave << 1) | (inst->nv21); 2077 switch (p_open_param->packed_format) { 2078 case PACKED_YUYV: 2079 src_frame_format = 4; 2080 break; 2081 case PACKED_YVYU: 2082 src_frame_format = 5; 2083 break; 2084 case PACKED_UYVY: 2085 src_frame_format = 6; 2086 break; 2087 case PACKED_VYUY: 2088 src_frame_format = 7; 2089 break; 2090 default: 2091 break; 2092 } 2093 2094 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SRC_STRIDE, 2095 (p_src_frame->stride << 16) | src_stride_c); 2096 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SRC_FORMAT, src_frame_format | 2097 (format_no << 3) | (justified << 5) | (PIC_SRC_ENDIANNESS_BIG_ENDIAN << 6)); 2098 2099 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_CUSTOM_MAP_OPTION_ADDR, 0); 2100 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_CUSTOM_MAP_OPTION_PARAM, 0); 2101 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_LONGTERM_PIC, 0); 2102 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_WP_PIXEL_SIGMA_Y, 0); 2103 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_WP_PIXEL_SIGMA_C, 0); 2104 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_WP_PIXEL_MEAN_Y, 0); 2105 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_WP_PIXEL_MEAN_C, 0); 2106 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_PREFIX_SEI_INFO, 0); 2107 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_PREFIX_SEI_NAL_ADDR, 0); 2108 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SUFFIX_SEI_INFO, 0); 2109 vpu_write_reg(inst->dev, W5_CMD_ENC_PIC_SUFFIX_SEI_NAL_ADDR, 0); 2110 2111 ret = send_firmware_command(inst, W5_DEC_ENC_PIC, true, ®_val, fail_res); 2112 if (ret == -ETIMEDOUT) 2113 return ret; 2114 2115 p_enc_info->instance_queue_count = (reg_val >> 16) & 0xff; 2116 p_enc_info->report_queue_count = (reg_val & QUEUE_REPORT_MASK); 2117 2118 if (ret) 2119 return ret; 2120 2121 return 0; 2122 } 2123 2124 int wave5_vpu_enc_get_result(struct vpu_instance *inst, struct enc_output_info *result) 2125 { 2126 int ret; 2127 u32 encoding_success; 2128 u32 reg_val; 2129 struct enc_info *p_enc_info = &inst->codec_info->enc_info; 2130 struct vpu_device *vpu_dev = inst->dev; 2131 2132 ret = wave5_send_query(inst->dev, inst, GET_RESULT); 2133 if (ret) 2134 return ret; 2135 2136 dev_dbg(inst->dev->dev, "%s: enc pic complete\n", __func__); 2137 2138 reg_val = vpu_read_reg(inst->dev, W5_RET_QUEUE_STATUS); 2139 2140 p_enc_info->instance_queue_count = (reg_val >> 16) & 0xff; 2141 p_enc_info->report_queue_count = (reg_val & QUEUE_REPORT_MASK); 2142 2143 encoding_success = vpu_read_reg(inst->dev, W5_RET_ENC_ENCODING_SUCCESS); 2144 if (!encoding_success) { 2145 result->error_reason = vpu_read_reg(inst->dev, W5_RET_ENC_ERR_INFO); 2146 return -EIO; 2147 } 2148 2149 result->warn_info = vpu_read_reg(inst->dev, W5_RET_ENC_WARN_INFO); 2150 2151 reg_val = vpu_read_reg(inst->dev, W5_RET_ENC_PIC_TYPE); 2152 result->pic_type = reg_val & 0xFFFF; 2153 2154 result->enc_vcl_nut = vpu_read_reg(inst->dev, W5_RET_ENC_VCL_NUT); 2155 /* 2156 * To get the reconstructed frame use the following index on 2157 * inst->frame_buf 2158 */ 2159 result->recon_frame_index = vpu_read_reg(inst->dev, W5_RET_ENC_PIC_IDX); 2160 result->enc_pic_byte = vpu_read_reg(inst->dev, W5_RET_ENC_PIC_BYTE); 2161 result->enc_src_idx = vpu_read_reg(inst->dev, W5_RET_ENC_USED_SRC_IDX); 2162 p_enc_info->stream_wr_ptr = vpu_read_reg(inst->dev, W5_RET_ENC_WR_PTR); 2163 p_enc_info->stream_rd_ptr = vpu_read_reg(inst->dev, W5_RET_ENC_RD_PTR); 2164 2165 result->bitstream_buffer = vpu_read_reg(inst->dev, W5_RET_ENC_RD_PTR); 2166 result->rd_ptr = p_enc_info->stream_rd_ptr; 2167 result->wr_ptr = p_enc_info->stream_wr_ptr; 2168 2169 /*result for header only(no vcl) encoding */ 2170 if (result->recon_frame_index == RECON_IDX_FLAG_HEADER_ONLY) 2171 result->bitstream_size = result->enc_pic_byte; 2172 else if (result->recon_frame_index < 0) 2173 result->bitstream_size = 0; 2174 else 2175 result->bitstream_size = result->enc_pic_byte; 2176 2177 result->enc_host_cmd_tick = vpu_read_reg(inst->dev, W5_RET_ENC_HOST_CMD_TICK); 2178 result->enc_encode_end_tick = vpu_read_reg(inst->dev, W5_RET_ENC_ENCODING_END_TICK); 2179 2180 if (!p_enc_info->first_cycle_check) { 2181 result->frame_cycle = (result->enc_encode_end_tick - result->enc_host_cmd_tick) * 2182 p_enc_info->cycle_per_tick; 2183 p_enc_info->first_cycle_check = true; 2184 } else { 2185 result->frame_cycle = 2186 (result->enc_encode_end_tick - vpu_dev->last_performance_cycles) * 2187 p_enc_info->cycle_per_tick; 2188 if (vpu_dev->last_performance_cycles < result->enc_host_cmd_tick) 2189 result->frame_cycle = (result->enc_encode_end_tick - 2190 result->enc_host_cmd_tick) * p_enc_info->cycle_per_tick; 2191 } 2192 vpu_dev->last_performance_cycles = result->enc_encode_end_tick; 2193 2194 return 0; 2195 } 2196 2197 int wave5_vpu_enc_finish_seq(struct vpu_instance *inst, u32 *fail_res) 2198 { 2199 return send_firmware_command(inst, W5_DESTROY_INSTANCE, true, NULL, fail_res); 2200 } 2201 2202 static bool wave5_vpu_enc_check_common_param_valid(struct vpu_instance *inst, 2203 struct enc_open_param *open_param) 2204 { 2205 bool low_delay = true; 2206 struct enc_wave_param *param = &open_param->wave_param; 2207 struct vpu_device *vpu_dev = inst->dev; 2208 struct device *dev = vpu_dev->dev; 2209 u32 num_ctu_row = (open_param->pic_height + 64 - 1) / 64; 2210 u32 num_ctu_col = (open_param->pic_width + 64 - 1) / 64; 2211 u32 ctu_sz = num_ctu_col * num_ctu_row; 2212 2213 if (inst->std == W_HEVC_ENC && low_delay && 2214 param->decoding_refresh_type == DEC_REFRESH_TYPE_CRA) { 2215 dev_warn(dev, 2216 "dec_refresh_type(CRA) shouldn't be used together with low delay GOP\n"); 2217 dev_warn(dev, "Suggested configuration parameter: decoding refresh type (IDR)\n"); 2218 param->decoding_refresh_type = 2; 2219 } 2220 2221 if (param->wpp_enable && param->independ_slice_mode) { 2222 unsigned int num_ctb_in_width = ALIGN(open_param->pic_width, 64) >> 6; 2223 2224 if (param->independ_slice_mode_arg % num_ctb_in_width) { 2225 dev_err(dev, "independ_slice_mode_arg %u must be a multiple of %u\n", 2226 param->independ_slice_mode_arg, num_ctb_in_width); 2227 return false; 2228 } 2229 } 2230 2231 /* multi-slice & wpp */ 2232 if (param->wpp_enable && param->depend_slice_mode) { 2233 dev_err(dev, "wpp_enable && depend_slice_mode cannot be used simultaneously\n"); 2234 return false; 2235 } 2236 2237 if (!param->independ_slice_mode && param->depend_slice_mode) { 2238 dev_err(dev, "depend_slice_mode requires independ_slice_mode\n"); 2239 return false; 2240 } else if (param->independ_slice_mode && 2241 param->depend_slice_mode == DEPEND_SLICE_MODE_RECOMMENDED && 2242 param->independ_slice_mode_arg < param->depend_slice_mode_arg) { 2243 dev_err(dev, "independ_slice_mode_arg: %u must be smaller than %u\n", 2244 param->independ_slice_mode_arg, param->depend_slice_mode_arg); 2245 return false; 2246 } 2247 2248 if (param->independ_slice_mode && param->independ_slice_mode_arg > 65535) { 2249 dev_err(dev, "independ_slice_mode_arg: %u must be smaller than 65535\n", 2250 param->independ_slice_mode_arg); 2251 return false; 2252 } 2253 2254 if (param->depend_slice_mode && param->depend_slice_mode_arg > 65535) { 2255 dev_err(dev, "depend_slice_mode_arg: %u must be smaller than 65535\n", 2256 param->depend_slice_mode_arg); 2257 return false; 2258 } 2259 2260 if (param->conf_win_top % 2) { 2261 dev_err(dev, "conf_win_top: %u, must be a multiple of 2\n", param->conf_win_top); 2262 return false; 2263 } 2264 2265 if (param->conf_win_bot % 2) { 2266 dev_err(dev, "conf_win_bot: %u, must be a multiple of 2\n", param->conf_win_bot); 2267 return false; 2268 } 2269 2270 if (param->conf_win_left % 2) { 2271 dev_err(dev, "conf_win_left: %u, must be a multiple of 2\n", param->conf_win_left); 2272 return false; 2273 } 2274 2275 if (param->conf_win_right % 2) { 2276 dev_err(dev, "conf_win_right: %u, Must be a multiple of 2\n", 2277 param->conf_win_right); 2278 return false; 2279 } 2280 2281 if (param->lossless_enable && open_param->rc_enable) { 2282 dev_err(dev, "option rate_control cannot be used with lossless_coding\n"); 2283 return false; 2284 } 2285 2286 if (param->lossless_enable && !param->skip_intra_trans) { 2287 dev_err(dev, "option intra_trans_skip must be enabled with lossless_coding\n"); 2288 return false; 2289 } 2290 2291 /* intra refresh */ 2292 if (param->intra_refresh_mode && param->intra_refresh_arg == 0) { 2293 dev_err(dev, "Invalid refresh argument, mode: %u, refresh: %u must be > 0\n", 2294 param->intra_refresh_mode, param->intra_refresh_arg); 2295 return false; 2296 } 2297 switch (param->intra_refresh_mode) { 2298 case REFRESH_MODE_CTU_ROWS: 2299 if (param->intra_mb_refresh_arg > num_ctu_row) 2300 goto invalid_refresh_argument; 2301 break; 2302 case REFRESH_MODE_CTU_COLUMNS: 2303 if (param->intra_refresh_arg > num_ctu_col) 2304 goto invalid_refresh_argument; 2305 break; 2306 case REFRESH_MODE_CTU_STEP_SIZE: 2307 if (param->intra_refresh_arg > ctu_sz) 2308 goto invalid_refresh_argument; 2309 break; 2310 case REFRESH_MODE_CTUS: 2311 if (param->intra_refresh_arg > ctu_sz) 2312 goto invalid_refresh_argument; 2313 if (param->lossless_enable) { 2314 dev_err(dev, "mode: %u cannot be used lossless_enable", 2315 param->intra_refresh_mode); 2316 return false; 2317 } 2318 }; 2319 return true; 2320 2321 invalid_refresh_argument: 2322 dev_err(dev, "Invalid refresh argument, mode: %u, refresh: %u > W(%u)xH(%u)\n", 2323 param->intra_refresh_mode, param->intra_refresh_arg, 2324 num_ctu_row, num_ctu_col); 2325 return false; 2326 } 2327 2328 static bool wave5_vpu_enc_check_param_valid(struct vpu_device *vpu_dev, 2329 struct enc_open_param *open_param) 2330 { 2331 struct enc_wave_param *param = &open_param->wave_param; 2332 2333 if (open_param->rc_enable) { 2334 if (param->min_qp_i > param->max_qp_i || param->min_qp_p > param->max_qp_p || 2335 param->min_qp_b > param->max_qp_b) { 2336 dev_err(vpu_dev->dev, "Configuration failed because min_qp is greater than max_qp\n"); 2337 dev_err(vpu_dev->dev, "Suggested configuration parameters: min_qp = max_qp\n"); 2338 return false; 2339 } 2340 2341 if (open_param->bit_rate <= (int)open_param->frame_rate_info) { 2342 dev_err(vpu_dev->dev, 2343 "enc_bit_rate: %u must be greater than the frame_rate: %u\n", 2344 open_param->bit_rate, (int)open_param->frame_rate_info); 2345 return false; 2346 } 2347 } 2348 2349 return true; 2350 } 2351 2352 int wave5_vpu_enc_check_open_param(struct vpu_instance *inst, struct enc_open_param *open_param) 2353 { 2354 u32 pic_width; 2355 u32 pic_height; 2356 s32 product_id = inst->dev->product; 2357 struct vpu_attr *p_attr = &inst->dev->attr; 2358 struct enc_wave_param *param; 2359 2360 if (!open_param) 2361 return -EINVAL; 2362 2363 param = &open_param->wave_param; 2364 pic_width = open_param->pic_width; 2365 pic_height = open_param->pic_height; 2366 2367 if (inst->id >= MAX_NUM_INSTANCE) { 2368 dev_err(inst->dev->dev, "Too many simultaneous instances: %d (max: %u)\n", 2369 inst->id, MAX_NUM_INSTANCE); 2370 return -EOPNOTSUPP; 2371 } 2372 2373 if (inst->std != W_HEVC_ENC && 2374 !(inst->std == W_AVC_ENC && product_id == PRODUCT_ID_521)) { 2375 dev_err(inst->dev->dev, "Unsupported encoder-codec & product combination\n"); 2376 return -EOPNOTSUPP; 2377 } 2378 2379 if (param->internal_bit_depth == 10) { 2380 if (inst->std == W_HEVC_ENC && !p_attr->support_hevc10bit_enc) { 2381 dev_err(inst->dev->dev, 2382 "Flag support_hevc10bit_enc must be set to encode 10bit HEVC\n"); 2383 return -EOPNOTSUPP; 2384 } else if (inst->std == W_AVC_ENC && !p_attr->support_avc10bit_enc) { 2385 dev_err(inst->dev->dev, 2386 "Flag support_avc10bit_enc must be set to encode 10bit AVC\n"); 2387 return -EOPNOTSUPP; 2388 } 2389 } 2390 2391 if (!open_param->frame_rate_info) { 2392 dev_err(inst->dev->dev, "No frame rate information.\n"); 2393 return -EINVAL; 2394 } 2395 2396 if (open_param->bit_rate > MAX_BIT_RATE) { 2397 dev_err(inst->dev->dev, "Invalid encoding bit-rate: %u (valid: 0-%u)\n", 2398 open_param->bit_rate, MAX_BIT_RATE); 2399 return -EINVAL; 2400 } 2401 2402 if (pic_width < W5_MIN_ENC_PIC_WIDTH || pic_width > W5_MAX_ENC_PIC_WIDTH || 2403 pic_height < W5_MIN_ENC_PIC_HEIGHT || pic_height > W5_MAX_ENC_PIC_HEIGHT) { 2404 dev_err(inst->dev->dev, "Invalid encoding dimension: %ux%u\n", 2405 pic_width, pic_height); 2406 return -EINVAL; 2407 } 2408 2409 if (param->profile) { 2410 if (inst->std == W_HEVC_ENC) { 2411 if ((param->profile != HEVC_PROFILE_MAIN || 2412 (param->profile == HEVC_PROFILE_MAIN && 2413 param->internal_bit_depth > 8)) && 2414 (param->profile != HEVC_PROFILE_MAIN10 || 2415 (param->profile == HEVC_PROFILE_MAIN10 && 2416 param->internal_bit_depth < 10)) && 2417 param->profile != HEVC_PROFILE_STILLPICTURE) { 2418 dev_err(inst->dev->dev, 2419 "Invalid HEVC encoding profile: %u (bit-depth: %u)\n", 2420 param->profile, param->internal_bit_depth); 2421 return -EINVAL; 2422 } 2423 } else if (inst->std == W_AVC_ENC) { 2424 if ((param->internal_bit_depth > 8 && 2425 param->profile != H264_PROFILE_HIGH10)) { 2426 dev_err(inst->dev->dev, 2427 "Invalid AVC encoding profile: %u (bit-depth: %u)\n", 2428 param->profile, param->internal_bit_depth); 2429 return -EINVAL; 2430 } 2431 } 2432 } 2433 2434 if (param->decoding_refresh_type > DEC_REFRESH_TYPE_IDR) { 2435 dev_err(inst->dev->dev, "Invalid decoding refresh type: %u (valid: 0-2)\n", 2436 param->decoding_refresh_type); 2437 return -EINVAL; 2438 } 2439 2440 if (param->intra_refresh_mode > REFRESH_MODE_CTUS) { 2441 dev_err(inst->dev->dev, "Invalid intra refresh mode: %d (valid: 0-4)\n", 2442 param->intra_refresh_mode); 2443 return -EINVAL; 2444 } 2445 2446 if (inst->std == W_HEVC_ENC && param->independ_slice_mode && 2447 param->depend_slice_mode > DEPEND_SLICE_MODE_BOOST) { 2448 dev_err(inst->dev->dev, 2449 "Can't combine slice modes: independent and fast dependent for HEVC\n"); 2450 return -EINVAL; 2451 } 2452 2453 if (!param->disable_deblk) { 2454 if (param->beta_offset_div2 < -6 || param->beta_offset_div2 > 6) { 2455 dev_err(inst->dev->dev, "Invalid beta offset: %d (valid: -6-6)\n", 2456 param->beta_offset_div2); 2457 return -EINVAL; 2458 } 2459 2460 if (param->tc_offset_div2 < -6 || param->tc_offset_div2 > 6) { 2461 dev_err(inst->dev->dev, "Invalid tc offset: %d (valid: -6-6)\n", 2462 param->tc_offset_div2); 2463 return -EINVAL; 2464 } 2465 } 2466 2467 if (param->intra_qp > MAX_INTRA_QP) { 2468 dev_err(inst->dev->dev, 2469 "Invalid intra quantization parameter: %u (valid: 0-%u)\n", 2470 param->intra_qp, MAX_INTRA_QP); 2471 return -EINVAL; 2472 } 2473 2474 if (open_param->rc_enable) { 2475 if (param->min_qp_i > MAX_INTRA_QP || param->max_qp_i > MAX_INTRA_QP || 2476 param->min_qp_p > MAX_INTRA_QP || param->max_qp_p > MAX_INTRA_QP || 2477 param->min_qp_b > MAX_INTRA_QP || param->max_qp_b > MAX_INTRA_QP) { 2478 dev_err(inst->dev->dev, 2479 "Invalid quantization parameter min/max values: " 2480 "I: %u-%u, P: %u-%u, B: %u-%u (valid for each: 0-%u)\n", 2481 param->min_qp_i, param->max_qp_i, param->min_qp_p, param->max_qp_p, 2482 param->min_qp_b, param->max_qp_b, MAX_INTRA_QP); 2483 return -EINVAL; 2484 } 2485 2486 if (param->hvs_qp_enable && param->hvs_max_delta_qp > MAX_HVS_MAX_DELTA_QP) { 2487 dev_err(inst->dev->dev, 2488 "Invalid HVS max delta quantization parameter: %u (valid: 0-%u)\n", 2489 param->hvs_max_delta_qp, MAX_HVS_MAX_DELTA_QP); 2490 return -EINVAL; 2491 } 2492 2493 if (open_param->vbv_buffer_size < MIN_VBV_BUFFER_SIZE || 2494 open_param->vbv_buffer_size > MAX_VBV_BUFFER_SIZE) { 2495 dev_err(inst->dev->dev, "VBV buffer size: %u (valid: %u-%u)\n", 2496 open_param->vbv_buffer_size, MIN_VBV_BUFFER_SIZE, 2497 MAX_VBV_BUFFER_SIZE); 2498 return -EINVAL; 2499 } 2500 } 2501 2502 if (!wave5_vpu_enc_check_common_param_valid(inst, open_param)) 2503 return -EINVAL; 2504 2505 if (!wave5_vpu_enc_check_param_valid(inst->dev, open_param)) 2506 return -EINVAL; 2507 2508 if (param->chroma_cb_qp_offset < -12 || param->chroma_cb_qp_offset > 12) { 2509 dev_err(inst->dev->dev, 2510 "Invalid chroma Cb quantization parameter offset: %d (valid: -12-12)\n", 2511 param->chroma_cb_qp_offset); 2512 return -EINVAL; 2513 } 2514 2515 if (param->chroma_cr_qp_offset < -12 || param->chroma_cr_qp_offset > 12) { 2516 dev_err(inst->dev->dev, 2517 "Invalid chroma Cr quantization parameter offset: %d (valid: -12-12)\n", 2518 param->chroma_cr_qp_offset); 2519 return -EINVAL; 2520 } 2521 2522 if (param->intra_refresh_mode == REFRESH_MODE_CTU_STEP_SIZE && !param->intra_refresh_arg) { 2523 dev_err(inst->dev->dev, 2524 "Intra refresh mode CTU step-size requires an argument\n"); 2525 return -EINVAL; 2526 } 2527 2528 if (inst->std == W_HEVC_ENC) { 2529 if (param->nr_intra_weight_y > MAX_INTRA_WEIGHT || 2530 param->nr_intra_weight_cb > MAX_INTRA_WEIGHT || 2531 param->nr_intra_weight_cr > MAX_INTRA_WEIGHT) { 2532 dev_err(inst->dev->dev, 2533 "Invalid intra weight Y(%u) Cb(%u) Cr(%u) (valid: %u)\n", 2534 param->nr_intra_weight_y, param->nr_intra_weight_cb, 2535 param->nr_intra_weight_cr, MAX_INTRA_WEIGHT); 2536 return -EINVAL; 2537 } 2538 2539 if (param->nr_inter_weight_y > MAX_INTER_WEIGHT || 2540 param->nr_inter_weight_cb > MAX_INTER_WEIGHT || 2541 param->nr_inter_weight_cr > MAX_INTER_WEIGHT) { 2542 dev_err(inst->dev->dev, 2543 "Invalid inter weight Y(%u) Cb(%u) Cr(%u) (valid: %u)\n", 2544 param->nr_inter_weight_y, param->nr_inter_weight_cb, 2545 param->nr_inter_weight_cr, MAX_INTER_WEIGHT); 2546 return -EINVAL; 2547 } 2548 } 2549 2550 return 0; 2551 } 2552