Home
last modified time | relevance | path

Searched refs:ctxs (Results 1 – 17 of 17) sorted by relevance

/OK3568_Linux_fs/kernel/drivers/video/fbdev/omap2/omapfb/
H A Dvrfb.c67 static struct vrfb_ctx *ctxs; variable
88 omap2_sms_write_rot_control(ctxs[ctx].control, ctx); in restore_hw_context()
89 omap2_sms_write_rot_size(ctxs[ctx].size, ctx); in restore_hw_context()
90 omap2_sms_write_rot_physical_ba(ctxs[ctx].physical_ba, ctx); in restore_hw_context()
211 ctxs[ctx].physical_ba = paddr; in omap_vrfb_setup()
212 ctxs[ctx].size = size; in omap_vrfb_setup()
213 ctxs[ctx].control = control; in omap_vrfb_setup()
308 paddr = ctxs[ctx].base + SMS_ROT_VIRT_BASE(rot); in omap_vrfb_request_ctx()
348 ctxs = devm_kcalloc(&pdev->dev, in vrfb_probe()
352 if (!ctxs) in vrfb_probe()
[all …]
/OK3568_Linux_fs/kernel/arch/x86/mm/
H A Dtlb.c197 this_cpu_write(cpu_tlbstate.ctxs[asid].ctx_id, 0); in clear_asid_other()
220 if (this_cpu_read(cpu_tlbstate.ctxs[asid].ctx_id) != in choose_new_asid()
225 *need_flush = (this_cpu_read(cpu_tlbstate.ctxs[asid].tlb_gen) < in choose_new_asid()
487 VM_WARN_ON(this_cpu_read(cpu_tlbstate.ctxs[prev_asid].ctx_id) != in switch_mm_irqs_off()
515 if (this_cpu_read(cpu_tlbstate.ctxs[prev_asid].tlb_gen) == in switch_mm_irqs_off()
559 this_cpu_write(cpu_tlbstate.ctxs[new_asid].ctx_id, next->context.ctx_id); in switch_mm_irqs_off()
560 this_cpu_write(cpu_tlbstate.ctxs[new_asid].tlb_gen, next_tlb_gen); in switch_mm_irqs_off()
642 this_cpu_write(cpu_tlbstate.ctxs[0].ctx_id, mm->context.ctx_id); in initialize_tlbstate_and_flush()
643 this_cpu_write(cpu_tlbstate.ctxs[0].tlb_gen, tlb_gen); in initialize_tlbstate_and_flush()
646 this_cpu_write(cpu_tlbstate.ctxs[i].ctx_id, 0); in initialize_tlbstate_and_flush()
[all …]
/OK3568_Linux_fs/external/mpp/test/
H A Dmpi_dec_multi_test.c589 MpiDecMultiCtxInfo *ctxs = NULL; in main() local
605 ctxs = mpp_calloc(MpiDecMultiCtxInfo, cmd->nthreads); in main()
606 if (NULL == ctxs) { in main()
612 ctxs[i].cmd = cmd; in main()
614 ret = pthread_create(&ctxs[i].thd, NULL, multi_dec_decode, &ctxs[i]); in main()
629 ctxs[i].ctx.loop_end = 1; in main()
633 pthread_join(ctxs[i].thd, NULL); in main()
636 MpiDecMultiCtxRet *dec_ret = &ctxs[i].ret; in main()
644 mpp_free(ctxs); in main()
645 ctxs = NULL; in main()
H A Dmpi_enc_mt_test.cpp1012 MpiEncMtCtxInfo *ctxs = NULL; in enc_test_mt() local
1017 ctxs = mpp_calloc(MpiEncMtCtxInfo, cmd->nthreads); in enc_test_mt()
1018 if (NULL == ctxs) { in enc_test_mt()
1024 ctxs[i].cmd = cmd; in enc_test_mt()
1025 ctxs[i].name = name; in enc_test_mt()
1026 ctxs[i].chn = i; in enc_test_mt()
1028 ret = mt_test_ctx_init(&ctxs[i]); in enc_test_mt()
1034 ret = mt_test_res_init(&ctxs[i]); in enc_test_mt()
1040 ret = pthread_create(&ctxs[i].thd_out, NULL, enc_test_output, &ctxs[i]); in enc_test_mt()
1046 ret = pthread_create(&ctxs[i].thd_in, NULL, enc_test_input, &ctxs[i]); in enc_test_mt()
[all …]
H A Dmpi_enc_test.c1000 MpiEncMultiCtxInfo *ctxs = NULL; in enc_test_multi() local
1005 ctxs = mpp_calloc(MpiEncMultiCtxInfo, cmd->nthreads); in enc_test_multi()
1006 if (NULL == ctxs) { in enc_test_multi()
1012 ctxs[i].cmd = cmd; in enc_test_multi()
1013 ctxs[i].name = name; in enc_test_multi()
1014 ctxs[i].chn = i; in enc_test_multi()
1016 ret = pthread_create(&ctxs[i].thd, NULL, enc_test, &ctxs[i]); in enc_test_multi()
1031 ctxs[i].ctx.loop_end = 1; in enc_test_multi()
1035 pthread_join(ctxs[i].thd, NULL); in enc_test_multi()
1038 MpiEncMultiCtxRet *enc_ret = &ctxs[i].ret; in enc_test_multi()
[all …]
/OK3568_Linux_fs/kernel/block/
H A Dblk-mq-sysfs.c20 struct blk_mq_ctxs *ctxs = container_of(kobj, struct blk_mq_ctxs, kobj); in blk_mq_sysfs_release() local
22 free_percpu(ctxs->queue_ctx); in blk_mq_sysfs_release()
23 kfree(ctxs); in blk_mq_sysfs_release()
31 kobject_put(&ctx->ctxs->kobj); in blk_mq_ctx_sysfs_release()
44 kfree(hctx->ctxs); in blk_mq_hw_sysfs_release()
H A Dblk-mq.c1013 struct blk_mq_ctx *ctx = hctx->ctxs[bitnr]; in flush_busy_ctx()
1048 struct blk_mq_ctx *ctx = hctx->ctxs[bitnr]; in dispatch_rq_from_ctx()
2812 hctx->ctxs = kmalloc_array_node(nr_cpu_ids, sizeof(void *), in blk_mq_alloc_hctx()
2814 if (!hctx->ctxs) in blk_mq_alloc_hctx()
2839 kfree(hctx->ctxs); in blk_mq_alloc_hctx()
2964 hctx->ctxs[hctx->nr_ctx++] = ctx; in blk_mq_map_swqueue()
3084 struct blk_mq_ctxs *ctxs; in blk_mq_alloc_ctxs() local
3087 ctxs = kzalloc(sizeof(*ctxs), GFP_KERNEL); in blk_mq_alloc_ctxs()
3088 if (!ctxs) in blk_mq_alloc_ctxs()
3091 ctxs->queue_ctx = alloc_percpu(struct blk_mq_ctx); in blk_mq_alloc_ctxs()
[all …]
H A Dblk-mq.h36 struct blk_mq_ctxs *ctxs; member
H A Dblk-mq-sched.c220 return hctx->ctxs[idx]; in blk_mq_next_ctx()
/OK3568_Linux_fs/kernel/drivers/iommu/arm/arm-smmu/
H A Dqcom_iommu.c57 struct qcom_iommu_ctx *ctxs[]; /* indexed by asid-1 */ member
99 return qcom_iommu->ctxs[asid - 1]; in to_ctx()
725 qcom_iommu->ctxs[ctx->asid - 1] = ctx; in qcom_iommu_ctx_probe()
737 qcom_iommu->ctxs[ctx->asid - 1] = NULL; in qcom_iommu_ctx_remove()
786 qcom_iommu = devm_kzalloc(dev, struct_size(qcom_iommu, ctxs, max_asid), in qcom_iommu_device_probe()
/OK3568_Linux_fs/kernel/include/linux/
H A Ddamon.h496 int damon_start(struct damon_ctx **ctxs, int nr_ctxs);
497 int damon_stop(struct damon_ctx **ctxs, int nr_ctxs);
H A Dblk-mq.h96 struct blk_mq_ctx **ctxs; member
580 ({ ctx = (hctx)->ctxs[(i)]; 1; }); (i)++)
/OK3568_Linux_fs/kernel/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
H A Dnv04.h9 unsigned ctxs:5; member
H A Ddmanv04.c101 u32 cm = ((1ULL << c->bits) - 1) << c->ctxs; in nv04_fifo_dma_fini()
104 nvkm_wo32(fctx, c->ctxp + data, cv | (rv << c->ctxs)); in nv04_fifo_dma_fini()
/OK3568_Linux_fs/kernel/arch/x86/include/asm/
H A Dtlbflush.h152 struct tlb_context ctxs[TLB_NR_DYN_ASIDS]; member
/OK3568_Linux_fs/kernel/mm/damon/
H A Dcore.c413 int damon_start(struct damon_ctx **ctxs, int nr_ctxs) in damon_start() argument
425 err = __damon_start(ctxs[i]); in damon_start()
466 int damon_stop(struct damon_ctx **ctxs, int nr_ctxs) in damon_stop() argument
472 err = __damon_stop(ctxs[i]); in damon_stop()
/OK3568_Linux_fs/kernel/kernel/
H A Dworkqueue.c5315 LIST_HEAD(ctxs); in workqueue_apply_unbound_cpumask()
5335 list_add_tail(&ctx->list, &ctxs); in workqueue_apply_unbound_cpumask()
5338 list_for_each_entry_safe(ctx, n, &ctxs, list) { in workqueue_apply_unbound_cpumask()