Lines Matching refs:enc

519 	struct rkvenc_dev *enc = to_rkvenc_dev(mpp);  in rkvenc_isr()  local
545 if (enc->aux_iova != -1) { in rkvenc_isr()
546 iommu_unmap(mpp->iommu_info->domain, enc->aux_iova, IOMMU_PAGE_SIZE); in rkvenc_isr()
547 enc->aux_iova = -1; in rkvenc_isr()
729 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_procfs_remove() local
731 if (enc->procfs) { in rkvenc_procfs_remove()
732 proc_remove(enc->procfs); in rkvenc_procfs_remove()
733 enc->procfs = NULL; in rkvenc_procfs_remove()
806 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_procfs_init() local
808 enc->procfs = proc_mkdir(mpp->dev->of_node->name, mpp->srv->procfs); in rkvenc_procfs_init()
809 if (IS_ERR_OR_NULL(enc->procfs)) { in rkvenc_procfs_init()
811 enc->procfs = NULL; in rkvenc_procfs_init()
816 mpp_procfs_create_common(enc->procfs, mpp); in rkvenc_procfs_init()
820 enc->procfs, &enc->aclk_info.debug_rate_hz); in rkvenc_procfs_init()
822 enc->procfs, &enc->core_clk_info.debug_rate_hz); in rkvenc_procfs_init()
824 enc->procfs, &mpp->session_max_buffers); in rkvenc_procfs_init()
827 enc->procfs, rkvenc_show_session_info, mpp); in rkvenc_procfs_init()
856 struct rkvenc_dev *enc = dev_get_drvdata(dev); in rkvenc_devfreq_target() local
857 struct devfreq *devfreq = enc->devfreq; in rkvenc_devfreq_target()
871 enc->core_last_rate_hz = target_freq; in rkvenc_devfreq_target()
872 if (enc->volt == target_volt) in rkvenc_devfreq_target()
874 ret = regulator_set_voltage(enc->vdd, target_volt, INT_MAX); in rkvenc_devfreq_target()
880 enc->volt = target_volt; in rkvenc_devfreq_target()
885 ret = regulator_set_voltage(enc->vdd, target_volt, INT_MAX); in rkvenc_devfreq_target()
893 clk_set_rate(enc->core_clk_info.clk, target_freq); in rkvenc_devfreq_target()
895 enc->core_last_rate_hz = target_freq; in rkvenc_devfreq_target()
898 ret = regulator_set_voltage(enc->vdd, target_volt, INT_MAX); in rkvenc_devfreq_target()
904 enc->volt = target_volt; in rkvenc_devfreq_target()
918 struct rkvenc_dev *enc = dev_get_drvdata(dev); in rkvenc_devfreq_get_cur_freq() local
920 *freq = enc->core_last_rate_hz; in rkvenc_devfreq_get_cur_freq()
933 struct rkvenc_dev *enc = df->data; in devfreq_venc_ondemand_func() local
935 if (enc) in devfreq_venc_ondemand_func()
936 *freq = enc->core_rate_hz; in devfreq_venc_ondemand_func()
958 struct rkvenc_dev *enc = devfreq->data; in rkvenc_get_static_power() local
960 if (!enc->model_data) in rkvenc_get_static_power()
963 return rockchip_ipa_get_static_power(enc->model_data, in rkvenc_get_static_power()
1021 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_devfreq_init() local
1022 struct clk *clk_core = enc->core_clk_info.clk; in rkvenc_devfreq_init()
1030 enc->vdd = devm_regulator_get_optional(mpp->dev, "venc"); in rkvenc_devfreq_init()
1031 if (IS_ERR_OR_NULL(enc->vdd)) { in rkvenc_devfreq_init()
1032 if (PTR_ERR(enc->vdd) == -EPROBE_DEFER) { in rkvenc_devfreq_init()
1057 enc->devfreq = devm_devfreq_add_device(mpp->dev, in rkvenc_devfreq_init()
1059 "venc_ondemand", (void *)enc); in rkvenc_devfreq_init()
1060 if (IS_ERR(enc->devfreq)) { in rkvenc_devfreq_init()
1061 ret = PTR_ERR(enc->devfreq); in rkvenc_devfreq_init()
1062 enc->devfreq = NULL; in rkvenc_devfreq_init()
1065 enc->devfreq->last_status.total_time = 1; in rkvenc_devfreq_init()
1066 enc->devfreq->last_status.busy_time = 1; in rkvenc_devfreq_init()
1068 devfreq_register_opp_notifier(mpp->dev, enc->devfreq); in rkvenc_devfreq_init()
1072 enc->model_data = rockchip_ipa_power_model_init(mpp->dev, in rkvenc_devfreq_init()
1074 if (IS_ERR_OR_NULL(enc->model_data)) { in rkvenc_devfreq_init()
1075 enc->model_data = NULL; in rkvenc_devfreq_init()
1077 } else if (enc->model_data->dynamic_coefficient) { in rkvenc_devfreq_init()
1079 enc->model_data->dynamic_coefficient; in rkvenc_devfreq_init()
1086 enc->devfreq_cooling = in rkvenc_devfreq_init()
1088 enc->devfreq, venc_dcp); in rkvenc_devfreq_init()
1089 if (IS_ERR_OR_NULL(enc->devfreq_cooling)) in rkvenc_devfreq_init()
1092 enc_mdevp.data = enc->devfreq; in rkvenc_devfreq_init()
1093 enc->mdev_info = rockchip_system_monitor_register(mpp->dev, &enc_mdevp); in rkvenc_devfreq_init()
1094 if (IS_ERR(enc->mdev_info)) { in rkvenc_devfreq_init()
1096 enc->mdev_info = NULL; in rkvenc_devfreq_init()
1113 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_devfreq_remove() local
1115 if (enc->mdev_info) in rkvenc_devfreq_remove()
1116 rockchip_system_monitor_unregister(enc->mdev_info); in rkvenc_devfreq_remove()
1117 if (enc->devfreq) { in rkvenc_devfreq_remove()
1118 devfreq_unregister_opp_notifier(mpp->dev, enc->devfreq); in rkvenc_devfreq_remove()
1130 struct rkvenc_dev *enc = container_of(work_s, struct rkvenc_dev, iommu_work); in rkvenc_iommu_handle_work() local
1131 struct mpp_dev *mpp = &enc->mpp; in rkvenc_iommu_handle_work()
1139 if (enc->aux_iova != -1) { in rkvenc_iommu_handle_work()
1140 iommu_unmap(mpp->iommu_info->domain, enc->aux_iova, IOMMU_PAGE_SIZE); in rkvenc_iommu_handle_work()
1141 enc->aux_iova = -1; in rkvenc_iommu_handle_work()
1144 page_iova = round_down(enc->fault_iova, SZ_4K); in rkvenc_iommu_handle_work()
1146 page_to_phys(enc->aux_page), IOMMU_PAGE_SIZE, in rkvenc_iommu_handle_work()
1151 enc->aux_iova = page_iova; in rkvenc_iommu_handle_work()
1164 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_iommu_fault_handle() local
1169 enc->fault_iova = iova; in rkvenc_iommu_fault_handle()
1171 queue_work(enc->iommu_wq, &enc->iommu_work); in rkvenc_iommu_fault_handle()
1180 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_init() local
1186 ret = mpp_get_clk_info(mpp, &enc->aclk_info, "aclk_vcodec"); in rkvenc_init()
1189 ret = mpp_get_clk_info(mpp, &enc->hclk_info, "hclk_vcodec"); in rkvenc_init()
1192 ret = mpp_get_clk_info(mpp, &enc->core_clk_info, "clk_core"); in rkvenc_init()
1198 &enc->default_max_load); in rkvenc_init()
1200 mpp_set_clk_info_rate_hz(&enc->aclk_info, CLK_MODE_DEFAULT, 300 * MHZ); in rkvenc_init()
1201 mpp_set_clk_info_rate_hz(&enc->core_clk_info, CLK_MODE_DEFAULT, 600 * MHZ); in rkvenc_init()
1204 enc->rst_a = mpp_reset_control_get(mpp, RST_TYPE_A, "video_a"); in rkvenc_init()
1205 if (!enc->rst_a) in rkvenc_init()
1207 enc->rst_h = mpp_reset_control_get(mpp, RST_TYPE_H, "video_h"); in rkvenc_init()
1208 if (!enc->rst_h) in rkvenc_init()
1210 enc->rst_core = mpp_reset_control_get(mpp, RST_TYPE_CORE, "video_core"); in rkvenc_init()
1211 if (!enc->rst_core) in rkvenc_init()
1221 enc->aux_page = alloc_page(GFP_KERNEL); in rkvenc_init()
1222 if (!enc->aux_page) { in rkvenc_init()
1226 enc->aux_iova = -1; in rkvenc_init()
1228 enc->iommu_wq = create_singlethread_workqueue("iommu_wq"); in rkvenc_init()
1229 if (!enc->iommu_wq) { in rkvenc_init()
1233 INIT_WORK(&enc->iommu_work, rkvenc_iommu_handle_work); in rkvenc_init()
1242 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_exit() local
1248 if (enc->aux_page) in rkvenc_exit()
1249 __free_page(enc->aux_page); in rkvenc_exit()
1251 if (enc->aux_iova != -1) { in rkvenc_exit()
1252 iommu_unmap(mpp->iommu_info->domain, enc->aux_iova, IOMMU_PAGE_SIZE); in rkvenc_exit()
1253 enc->aux_iova = -1; in rkvenc_exit()
1256 if (enc->iommu_wq) { in rkvenc_exit()
1257 destroy_workqueue(enc->iommu_wq); in rkvenc_exit()
1258 enc->iommu_wq = NULL; in rkvenc_exit()
1266 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_reset() local
1271 if (enc->devfreq) in rkvenc_reset()
1272 mutex_lock(&enc->devfreq->lock); in rkvenc_reset()
1274 mpp_clk_set_rate(&enc->aclk_info, CLK_MODE_REDUCE); in rkvenc_reset()
1275 mpp_clk_set_rate(&enc->core_clk_info, CLK_MODE_REDUCE); in rkvenc_reset()
1284 if (enc->rst_a && enc->rst_h && enc->rst_core) { in rkvenc_reset()
1286 mpp_safe_reset(enc->rst_a); in rkvenc_reset()
1287 mpp_safe_reset(enc->rst_h); in rkvenc_reset()
1288 mpp_safe_reset(enc->rst_core); in rkvenc_reset()
1290 mpp_safe_unreset(enc->rst_a); in rkvenc_reset()
1291 mpp_safe_unreset(enc->rst_h); in rkvenc_reset()
1292 mpp_safe_unreset(enc->rst_core); in rkvenc_reset()
1296 if (enc->devfreq) in rkvenc_reset()
1297 mutex_unlock(&enc->devfreq->lock); in rkvenc_reset()
1307 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_clk_on() local
1309 mpp_clk_safe_enable(enc->aclk_info.clk); in rkvenc_clk_on()
1310 mpp_clk_safe_enable(enc->hclk_info.clk); in rkvenc_clk_on()
1311 mpp_clk_safe_enable(enc->core_clk_info.clk); in rkvenc_clk_on()
1318 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_clk_off() local
1320 clk_disable_unprepare(enc->aclk_info.clk); in rkvenc_clk_off()
1321 clk_disable_unprepare(enc->hclk_info.clk); in rkvenc_clk_off()
1322 clk_disable_unprepare(enc->core_clk_info.clk); in rkvenc_clk_off()
1333 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_get_freq() local
1337 if (!enc->default_max_load) in rkvenc_get_freq()
1354 if (workload > enc->default_max_load) in rkvenc_get_freq()
1366 struct rkvenc_dev *enc = to_rkvenc_dev(mpp); in rkvenc_set_freq() local
1369 mpp_clk_set_rate(&enc->aclk_info, task->clk_mode); in rkvenc_set_freq()
1372 if (enc->devfreq) { in rkvenc_set_freq()
1375 mutex_lock(&enc->devfreq->lock); in rkvenc_set_freq()
1376 core_rate_hz = mpp_get_clk_info_rate_hz(&enc->core_clk_info, task->clk_mode); in rkvenc_set_freq()
1377 if (enc->core_rate_hz != core_rate_hz) { in rkvenc_set_freq()
1378 enc->core_rate_hz = core_rate_hz; in rkvenc_set_freq()
1379 update_devfreq(enc->devfreq); in rkvenc_set_freq()
1385 clk_set_rate(enc->core_clk_info.clk, enc->core_last_rate_hz); in rkvenc_set_freq()
1387 mutex_unlock(&enc->devfreq->lock); in rkvenc_set_freq()
1391 mpp_clk_set_rate(&enc->core_clk_info, task->clk_mode); in rkvenc_set_freq()
1440 struct rkvenc_dev *enc = NULL; in rkvenc_probe() local
1446 enc = devm_kzalloc(dev, sizeof(*enc), GFP_KERNEL); in rkvenc_probe()
1447 if (!enc) in rkvenc_probe()
1449 mpp = &enc->mpp; in rkvenc_probe()