Lines Matching refs:job

373 				 struct rga_job *job)  in rga_mm_map_dma_buffer()  argument
383 scheduler = job ? job->scheduler : in rga_mm_map_dma_buffer()
525 struct rga_job *job, int write_flag) in rga_mm_map_virt_addr() argument
535 scheduler = job ? job->scheduler : in rga_mm_map_virt_addr()
542 internal_buffer->current_mm = job ? job->mm : current->mm; in rga_mm_map_virt_addr()
673 struct rga_job *job) in rga_mm_map_phys_addr() argument
682 scheduler = job ? job->scheduler : in rga_mm_map_phys_addr()
765 struct rga_job *job, int write_flag) in rga_mm_map_buffer() argument
777 ret = rga_mm_map_dma_buffer(external_buffer, internal_buffer, job); in rga_mm_map_buffer()
790 ret = rga_mm_map_virt_addr(external_buffer, internal_buffer, job, write_flag); in rga_mm_map_buffer()
803 ret = rga_mm_map_phys_addr(external_buffer, internal_buffer, job); in rga_mm_map_buffer()
1031 static bool rga_mm_is_need_mmu(struct rga_job *job, struct rga_internal_buffer *buffer) in rga_mm_is_need_mmu() argument
1033 if (buffer == NULL || job == NULL || job->scheduler == NULL) in rga_mm_is_need_mmu()
1037 if (job->scheduler->data->mmu == RGA_IOMMU) in rga_mm_is_need_mmu()
1049 static int rga_mm_set_mmu_flag(struct rga_job *job) in rga_mm_set_mmu_flag() argument
1057 src_mmu_en = rga_mm_is_need_mmu(job, job->src_buffer.addr); in rga_mm_set_mmu_flag()
1058 src1_mmu_en = rga_mm_is_need_mmu(job, job->src1_buffer.addr); in rga_mm_set_mmu_flag()
1059 dst_mmu_en = rga_mm_is_need_mmu(job, job->dst_buffer.addr); in rga_mm_set_mmu_flag()
1060 els_mmu_en = rga_mm_is_need_mmu(job, job->els_buffer.addr); in rga_mm_set_mmu_flag()
1062 mmu_info = &job->rga_command_base.mmu_info; in rga_mm_set_mmu_flag()
1135 static int rga_mm_set_mmu_base(struct rga_job *job, in rga_mm_set_mmu_base() argument
1182 if (job->flags & RGA_JOB_USE_HANDLE) { in rga_mm_set_mmu_base()
1247 if (job->flags & RGA_JOB_USE_HANDLE) { in rga_mm_set_mmu_base()
1293 if (job->flags & RGA_JOB_USE_HANDLE) in rga_mm_set_mmu_base()
1299 struct rga_job *job, in rga_mm_sync_dma_sg_for_device() argument
1308 __func__, __LINE__, job->core); in rga_mm_sync_dma_sg_for_device()
1315 __func__, __LINE__, job->core); in rga_mm_sync_dma_sg_for_device()
1325 struct rga_job *job, in rga_mm_sync_dma_sg_for_cpu() argument
1334 __func__, __LINE__, job->core); in rga_mm_sync_dma_sg_for_cpu()
1341 __func__, __LINE__, job->core); in rga_mm_sync_dma_sg_for_cpu()
1350 static int rga_mm_get_buffer_info(struct rga_job *job, in rga_mm_get_buffer_info() argument
1356 switch (job->scheduler->data->mmu) { in rga_mm_get_buffer_info()
1361 job->core, internal_buffer->type); in rga_mm_get_buffer_info()
1396 struct rga_job *job, in rga_mm_get_buffer() argument
1430 ret = rga_mm_get_buffer_info(job, internal_buffer, channel_addr); in rga_mm_get_buffer()
1450 ret = rga_mm_sync_dma_sg_for_device(internal_buffer, job, dir); in rga_mm_get_buffer()
1469 struct rga_job *job, in rga_mm_put_buffer() argument
1474 if (rga_mm_sync_dma_sg_for_cpu(internal_buffer, job, dir)) in rga_mm_put_buffer()
1483 struct rga_job *job, in rga_mm_put_channel_handle_info() argument
1488 rga_mm_put_buffer(mm, job, job_buf->y_addr, dir); in rga_mm_put_channel_handle_info()
1490 rga_mm_put_buffer(mm, job, job_buf->uv_addr, dir); in rga_mm_put_channel_handle_info()
1492 rga_mm_put_buffer(mm, job, job_buf->v_addr, dir); in rga_mm_put_channel_handle_info()
1499 struct rga_job *job, in rga_mm_get_channel_handle_info() argument
1520 ret = rga_mm_get_buffer(mm, job, handle, &img->yrgb_addr, in rga_mm_get_channel_handle_info()
1530 ret = rga_mm_get_buffer(mm, job, handle, &img->uv_addr, in rga_mm_get_channel_handle_info()
1540 ret = rga_mm_get_buffer(mm, job, handle, &img->v_addr, in rga_mm_get_channel_handle_info()
1550 ret = rga_mm_get_buffer(mm, job, handle, &img->yrgb_addr, in rga_mm_get_channel_handle_info()
1561 if (job->scheduler->data->mmu == RGA_MMU && in rga_mm_get_channel_handle_info()
1562 rga_mm_is_need_mmu(job, job_buf->addr)) { in rga_mm_get_channel_handle_info()
1563 ret = rga_mm_set_mmu_base(job, img, job_buf); in rga_mm_get_channel_handle_info()
1567 rga_mm_put_channel_handle_info(mm, job, job_buf, dir); in rga_mm_get_channel_handle_info()
1575 static int rga_mm_get_handle_info(struct rga_job *job) in rga_mm_get_handle_info() argument
1582 req = &job->rga_command_base; in rga_mm_get_handle_info()
1586 ret = rga_mm_get_channel_handle_info(mm, job, &req->src, in rga_mm_get_handle_info()
1587 &job->src_buffer, in rga_mm_get_handle_info()
1596 ret = rga_mm_get_channel_handle_info(mm, job, &req->dst, in rga_mm_get_handle_info()
1597 &job->dst_buffer, in rga_mm_get_handle_info()
1613 ret = rga_mm_get_channel_handle_info(mm, job, &req->pat, in rga_mm_get_handle_info()
1614 &job->src1_buffer, in rga_mm_get_handle_info()
1617 ret = rga_mm_get_channel_handle_info(mm, job, &req->pat, in rga_mm_get_handle_info()
1618 &job->els_buffer, in rga_mm_get_handle_info()
1627 rga_mm_set_mmu_flag(job); in rga_mm_get_handle_info()
1632 static void rga_mm_put_handle_info(struct rga_job *job) in rga_mm_put_handle_info() argument
1636 rga_mm_put_channel_handle_info(mm, job, &job->src_buffer, DMA_NONE); in rga_mm_put_handle_info()
1637 rga_mm_put_channel_handle_info(mm, job, &job->dst_buffer, DMA_FROM_DEVICE); in rga_mm_put_handle_info()
1638 rga_mm_put_channel_handle_info(mm, job, &job->src1_buffer, DMA_NONE); in rga_mm_put_handle_info()
1639 rga_mm_put_channel_handle_info(mm, job, &job->els_buffer, DMA_NONE); in rga_mm_put_handle_info()
1696 static void rga_mm_put_external_buffer(struct rga_job *job) in rga_mm_put_external_buffer() argument
1698 if (job->src_buffer.ex_addr) in rga_mm_put_external_buffer()
1699 rga_mm_put_channel_external_buffer(&job->src_buffer); in rga_mm_put_external_buffer()
1700 if (job->src1_buffer.ex_addr) in rga_mm_put_external_buffer()
1701 rga_mm_put_channel_external_buffer(&job->src1_buffer); in rga_mm_put_external_buffer()
1702 if (job->dst_buffer.ex_addr) in rga_mm_put_external_buffer()
1703 rga_mm_put_channel_external_buffer(&job->dst_buffer); in rga_mm_put_external_buffer()
1704 if (job->els_buffer.ex_addr) in rga_mm_put_external_buffer()
1705 rga_mm_put_channel_external_buffer(&job->els_buffer); in rga_mm_put_external_buffer()
1708 static int rga_mm_get_external_buffer(struct rga_job *job) in rga_mm_get_external_buffer() argument
1718 if (job->rga_command_base.render_mode != COLOR_FILL_MODE) in rga_mm_get_external_buffer()
1719 src0 = &job->rga_command_base.src; in rga_mm_get_external_buffer()
1721 if (job->rga_command_base.render_mode != UPDATE_PALETTE_TABLE_MODE) in rga_mm_get_external_buffer()
1722 src1 = job->rga_command_base.bsfilter_flag ? in rga_mm_get_external_buffer()
1723 &job->rga_command_base.pat : NULL; in rga_mm_get_external_buffer()
1725 els = &job->rga_command_base.pat; in rga_mm_get_external_buffer()
1727 dst = &job->rga_command_base.dst; in rga_mm_get_external_buffer()
1730 mmu_flag = ((job->rga_command_base.mmu_info.mmu_flag >> 8) & 1); in rga_mm_get_external_buffer()
1731 ret = rga_mm_get_channel_external_buffer(mmu_flag, src0, &job->src_buffer); in rga_mm_get_external_buffer()
1739 mmu_flag = ((job->rga_command_base.mmu_info.mmu_flag >> 10) & 1); in rga_mm_get_external_buffer()
1740 ret = rga_mm_get_channel_external_buffer(mmu_flag, dst, &job->dst_buffer); in rga_mm_get_external_buffer()
1748 mmu_flag = ((job->rga_command_base.mmu_info.mmu_flag >> 9) & 1); in rga_mm_get_external_buffer()
1749 ret = rga_mm_get_channel_external_buffer(mmu_flag, src1, &job->src1_buffer); in rga_mm_get_external_buffer()
1757 mmu_flag = ((job->rga_command_base.mmu_info.mmu_flag >> 11) & 1); in rga_mm_get_external_buffer()
1758 ret = rga_mm_get_channel_external_buffer(mmu_flag, els, &job->els_buffer); in rga_mm_get_external_buffer()
1767 rga_mm_put_external_buffer(job); in rga_mm_get_external_buffer()
1771 static void rga_mm_unmap_channel_job_buffer(struct rga_job *job, in rga_mm_unmap_channel_job_buffer() argument
1776 if (rga_mm_sync_dma_sg_for_cpu(job_buffer->addr, job, dir)) in rga_mm_unmap_channel_job_buffer()
1785 static int rga_mm_map_channel_job_buffer(struct rga_job *job, in rga_mm_map_channel_job_buffer() argument
1800 ret = rga_mm_map_buffer(job_buffer->ex_addr, buffer, job, write_flag); in rga_mm_map_channel_job_buffer()
1806 ret = rga_mm_get_buffer_info(job, buffer, &img->yrgb_addr); in rga_mm_map_channel_job_buffer()
1813 ret = rga_mm_sync_dma_sg_for_device(buffer, job, dir); in rga_mm_map_channel_job_buffer()
1824 if (job->scheduler->data->mmu == RGA_MMU && in rga_mm_map_channel_job_buffer()
1825 rga_mm_is_need_mmu(job, job_buffer->addr)) { in rga_mm_map_channel_job_buffer()
1826 ret = rga_mm_set_mmu_base(job, img, job_buffer); in rga_mm_map_channel_job_buffer()
1844 static void rga_mm_unmap_buffer_info(struct rga_job *job) in rga_mm_unmap_buffer_info() argument
1846 if (job->src_buffer.addr) in rga_mm_unmap_buffer_info()
1847 rga_mm_unmap_channel_job_buffer(job, &job->src_buffer, DMA_NONE); in rga_mm_unmap_buffer_info()
1848 if (job->dst_buffer.addr) in rga_mm_unmap_buffer_info()
1849 rga_mm_unmap_channel_job_buffer(job, &job->dst_buffer, DMA_FROM_DEVICE); in rga_mm_unmap_buffer_info()
1850 if (job->src1_buffer.addr) in rga_mm_unmap_buffer_info()
1851 rga_mm_unmap_channel_job_buffer(job, &job->src1_buffer, DMA_NONE); in rga_mm_unmap_buffer_info()
1852 if (job->els_buffer.addr) in rga_mm_unmap_buffer_info()
1853 rga_mm_unmap_channel_job_buffer(job, &job->els_buffer, DMA_NONE); in rga_mm_unmap_buffer_info()
1855 rga_mm_put_external_buffer(job); in rga_mm_unmap_buffer_info()
1858 static int rga_mm_map_buffer_info(struct rga_job *job) in rga_mm_map_buffer_info() argument
1864 ret = rga_mm_get_external_buffer(job); in rga_mm_map_buffer_info()
1870 req = &job->rga_command_base; in rga_mm_map_buffer_info()
1872 if (likely(job->src_buffer.ex_addr)) { in rga_mm_map_buffer_info()
1873 ret = rga_mm_map_channel_job_buffer(job, &req->src, in rga_mm_map_buffer_info()
1874 &job->src_buffer, in rga_mm_map_buffer_info()
1882 if (likely(job->dst_buffer.ex_addr)) { in rga_mm_map_buffer_info()
1883 ret = rga_mm_map_channel_job_buffer(job, &req->dst, in rga_mm_map_buffer_info()
1884 &job->dst_buffer, in rga_mm_map_buffer_info()
1892 if (job->src1_buffer.ex_addr) { in rga_mm_map_buffer_info()
1898 ret = rga_mm_map_channel_job_buffer(job, &req->pat, in rga_mm_map_buffer_info()
1899 &job->src1_buffer, in rga_mm_map_buffer_info()
1907 if (job->els_buffer.ex_addr) { in rga_mm_map_buffer_info()
1908 ret = rga_mm_map_channel_job_buffer(job, &req->pat, in rga_mm_map_buffer_info()
1909 &job->els_buffer, in rga_mm_map_buffer_info()
1917 rga_mm_set_mmu_flag(job); in rga_mm_map_buffer_info()
1921 rga_mm_unmap_buffer_info(job); in rga_mm_map_buffer_info()
1926 int rga_mm_map_job_info(struct rga_job *job) in rga_mm_map_job_info() argument
1930 if (job->flags & RGA_JOB_USE_HANDLE) { in rga_mm_map_job_info()
1931 ret = rga_mm_get_handle_info(job); in rga_mm_map_job_info()
1937 ret = rga_mm_map_buffer_info(job); in rga_mm_map_job_info()
1947 void rga_mm_unmap_job_info(struct rga_job *job) in rga_mm_unmap_job_info() argument
1949 if (job->flags & RGA_JOB_USE_HANDLE) in rga_mm_unmap_job_info()
1950 rga_mm_put_handle_info(job); in rga_mm_unmap_job_info()
1952 rga_mm_unmap_buffer_info(job); in rga_mm_unmap_job_info()