| /optee_os/core/kernel/ |
| H A D | mutex_lockdep.c | 44 cpu_spin_unlock_xrestore(&graph_lock, exceptions); in mutex_lock_check() 54 cpu_spin_unlock_xrestore(&graph_lock, exceptions); in mutex_trylock_check() 64 cpu_spin_unlock_xrestore(&graph_lock, exceptions); in mutex_unlock_check() 72 cpu_spin_unlock_xrestore(&graph_lock, exceptions); in mutex_destroy_check()
|
| H A D | notif.c | 52 cpu_spin_unlock_xrestore(¬if_lock, old_itr_status); in notif_async_is_started() 69 cpu_spin_unlock_xrestore(¬if_lock, old_itr_status); in notif_register_driver() 80 cpu_spin_unlock_xrestore(¬if_lock, old_itr_status); in notif_unregister_driver() 112 cpu_spin_unlock_xrestore(¬if_lock, old_itr_status); in notif_deliver_atomic_event() 139 cpu_spin_unlock_xrestore(¬if_lock, old_itr_status); in notif_deliver_event() 153 cpu_spin_unlock_xrestore(¬if_lock, old_itr_status); in notif_deliver_event()
|
| H A D | wait_queue.c | 75 cpu_spin_unlock_xrestore(&wq_spin_lock, old_itr_status); in wq_wait_init_condvar() 91 cpu_spin_unlock_xrestore(&wq_spin_lock, old_itr_status); in wq_wait_final_helper() 145 cpu_spin_unlock_xrestore(&wq_spin_lock, old_itr_status); in wq_wake_next() 190 cpu_spin_unlock_xrestore(&wq_spin_lock, old_itr_status); in wq_promote_condvar() 208 cpu_spin_unlock_xrestore(&wq_spin_lock, old_itr_status); in wq_have_condvar() 222 cpu_spin_unlock_xrestore(&wq_spin_lock, old_itr_status); in wq_is_empty()
|
| H A D | notif_default.c | 72 cpu_spin_unlock_xrestore(¬if_default_lock, old_itr_status); in notif_alloc_async_value() 96 cpu_spin_unlock_xrestore(¬if_default_lock, old_itr_status); in notif_free_async_value() 128 cpu_spin_unlock_xrestore(¬if_default_lock, old_itr_status); in notif_get_value() 155 cpu_spin_unlock_xrestore(¬if_default_lock, old_itr_status); in notif_send_async()
|
| H A D | mutex.c | 57 cpu_spin_unlock_xrestore(&m->spin_lock, old_itr_status); in __mutex_lock() 107 cpu_spin_unlock_xrestore(&m->spin_lock, old_itr_status); in __mutex_unlock() 143 cpu_spin_unlock_xrestore(&m->spin_lock, old_itr_status); in __mutex_trylock() 166 cpu_spin_unlock_xrestore(&m->spin_lock, old_itr_status); in __mutex_read_unlock() 202 cpu_spin_unlock_xrestore(&m->spin_lock, old_itr_status); in __mutex_read_lock() 231 cpu_spin_unlock_xrestore(&m->spin_lock, old_itr_status); in __mutex_read_trylock() 400 cpu_spin_unlock_xrestore(&cv->spin_lock, old_itr_status); in cv_signal() 467 cpu_spin_unlock_xrestore(&m->spin_lock, old_itr_status); in __condvar_wait_timeout()
|
| H A D | callout.c | 78 cpu_spin_unlock_xrestore(&callout_lock, state); in callout_rem() 104 cpu_spin_unlock_xrestore(&callout_lock, state); in callout_add() 145 cpu_spin_unlock_xrestore(&callout_lock, state); in callout_service_init()
|
| H A D | pm.c | 66 cpu_spin_unlock_xrestore(&pm_list_lock, exceptions); in register_pm_cb() 88 cpu_spin_unlock_xrestore(&pm_list_lock, exceptions); in unregister_pm_cb()
|
| /optee_os/core/mm/ |
| H A D | mobj_dyn_shm.c | 146 cpu_spin_unlock_xrestore(®_shm_map_lock, exceptions); in reg_shm_free_helper() 167 cpu_spin_unlock_xrestore(®_shm_slist_lock, exceptions); in mobj_reg_shm_free() 175 cpu_spin_unlock_xrestore(®_shm_slist_lock, exceptions); in mobj_reg_shm_free() 214 cpu_spin_unlock_xrestore(®_shm_map_lock, exceptions); in mobj_reg_shm_inc_map() 241 cpu_spin_unlock_xrestore(®_shm_map_lock, exceptions); in mobj_reg_shm_inc_map() 266 cpu_spin_unlock_xrestore(®_shm_map_lock, exceptions); in mobj_reg_shm_dec_map() 388 cpu_spin_unlock_xrestore(®_shm_slist_lock, exceptions); in mobj_reg_shm_alloc() 404 cpu_spin_unlock_xrestore(®_shm_slist_lock, exceptions); in mobj_reg_shm_unguard() 447 cpu_spin_unlock_xrestore(®_shm_slist_lock, exceptions); in mobj_reg_shm_get_by_cookie() 470 cpu_spin_unlock_xrestore(®_shm_slist_lock, exceptions); in mobj_reg_shm_release_by_cookie() [all …]
|
| H A D | tee_mm.c | 106 cpu_spin_unlock_xrestore(&pool->lock, exceptions); in tee_mm_get_pool_stats() 204 cpu_spin_unlock_xrestore(&pool->lock, exceptions); in tee_mm_alloc_flags() 207 cpu_spin_unlock_xrestore(&pool->lock, exceptions); in tee_mm_alloc_flags() 281 cpu_spin_unlock_xrestore(&pool->lock, exceptions); in tee_mm_alloc2() 284 cpu_spin_unlock_xrestore(&pool->lock, exceptions); in tee_mm_alloc2() 308 cpu_spin_unlock_xrestore(&p->pool->lock, exceptions); in tee_mm_free() 337 cpu_spin_unlock_xrestore(&pool->lock, exceptions); in tee_mm_is_empty() 358 cpu_spin_unlock_xrestore(&((tee_mm_pool_t *)pool)->lock, in tee_mm_find() 364 cpu_spin_unlock_xrestore(&((tee_mm_pool_t *)pool)->lock, exceptions); in tee_mm_find()
|
| /optee_os/core/arch/arm/mm/ |
| H A D | sp_mem.c | 173 cpu_spin_unlock_xrestore(&sp_mem_lock, exceptions); in inactivate() 205 cpu_spin_unlock_xrestore(&sp_mem_lock, exceptions); in sp_mem_get() 235 cpu_spin_unlock_xrestore(&sp_mem_lock, exceptions); in sp_mem_new() 248 cpu_spin_unlock_xrestore(&sp_mem_lock, exceptions); in sp_mem_new() 259 cpu_spin_unlock_xrestore(&sp_mem_lock, exceptions); in sp_mem_add() 281 cpu_spin_unlock_xrestore(&sp_mem_lock, in sp_mem_is_shared() 289 cpu_spin_unlock_xrestore(&sp_mem_lock, exceptions); in sp_mem_is_shared() 334 cpu_spin_unlock_xrestore(&sp_mem_lock, exceptions); in sp_mem_remove()
|
| H A D | mobj_ffa.c | 264 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in mobj_ffa_sel1_spmc_new() 392 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in mobj_ffa_sel1_spmc_delete() 507 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in mobj_ffa_push_to_inactive() 565 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in mobj_ffa_sel1_spmc_reclaim() 635 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in mobj_ffa_unregister_by_cookie() 731 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in mobj_ffa_get_by_cookie() 824 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in ffa_inactivate() 874 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in ffa_shm_inc_map() 901 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in ffa_shm_inc_map() 917 cpu_spin_unlock_xrestore(&shm_lock, exceptions); in ffa_shm_dec_map() [all …]
|
| H A D | core_mmu.c | 171 cpu_spin_unlock_xrestore(&g_asid_spinlock, exceptions); in asid_alloc() 189 cpu_spin_unlock_xrestore(&g_asid_spinlock, exceptions); in asid_free()
|
| /optee_os/core/arch/arm/kernel/ |
| H A D | timer_a64.c | 31 cpu_spin_unlock_xrestore(&timer_lock, exceptions); in generic_timer_start() 43 cpu_spin_unlock_xrestore(&timer_lock, exceptions); in generic_timer_stop()
|
| H A D | virtualization.c | 343 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_guest_created() 413 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_next_guest() 438 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_get_guest() 462 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_put_guest() 495 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_guest_destroyed() 502 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_guest_destroyed() 618 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_add_cookie_to_current_guest() 636 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_remove_cookie() 650 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_find_guest_by_cookie() 710 cpu_spin_unlock_xrestore(&prtn_list_lock, exceptions); in virt_reclaim_cookie_from_destroyed_guest()
|
| H A D | spmc_sp_handler.c | 683 cpu_spin_unlock_xrestore(&mem_ref_lock, exceptions); in ffa_mem_retrieve() 693 cpu_spin_unlock_xrestore(&mem_ref_lock, exceptions); in ffa_mem_retrieve() 709 cpu_spin_unlock_xrestore(&mem_ref_lock, exceptions); in ffa_mem_retrieve() 714 cpu_spin_unlock_xrestore(&mem_ref_lock, exceptions); in ffa_mem_retrieve() 767 cpu_spin_unlock_xrestore(&rxtx->spinlock, exceptions); in ffa_mem_relinquish() 780 cpu_spin_unlock_xrestore(&mem_ref_lock, exceptions); in ffa_mem_relinquish() 787 cpu_spin_unlock_xrestore(&mem_ref_lock, exceptions); in ffa_mem_relinquish() 794 cpu_spin_unlock_xrestore(&rxtx->spinlock, exceptions); in ffa_mem_relinquish() 798 cpu_spin_unlock_xrestore(&mem_ref_lock, exceptions); in ffa_mem_relinquish() 854 cpu_spin_unlock_xrestore(&mem_ref_lock, exceptions); in ffa_mem_reclaim() [all …]
|
| /optee_os/core/drivers/wdt/ |
| H A D | watchdog_sm.c | 30 cpu_spin_unlock_xrestore(&wdt_lock, exceptions); in __wdt_sm_handler() 44 cpu_spin_unlock_xrestore(&wdt_lock, exceptions); in __wdt_sm_handler()
|
| /optee_os/core/drivers/ |
| H A D | hi16xx_rng.c | 81 cpu_spin_unlock_xrestore(&rng_lock, exceptions); in hw_get_random_bytes() 94 cpu_spin_unlock_xrestore(&rng_lock, exceptions); in hw_get_random_bytes()
|
| H A D | stm32_exti.c | 183 cpu_spin_unlock_xrestore(&exti->lock, exceptions); in stm32_exti_set_type() 197 cpu_spin_unlock_xrestore(&exti->lock, exceptions); in stm32_exti_mask() 212 cpu_spin_unlock_xrestore(&exti->lock, exceptions); in stm32_exti_unmask() 226 cpu_spin_unlock_xrestore(&exti->lock, exceptions); in stm32_exti_enable_wake() 240 cpu_spin_unlock_xrestore(&exti->lock, exceptions); in stm32_exti_disable_wake() 254 cpu_spin_unlock_xrestore(&exti->lock, exceptions); in stm32_exti_clear() 268 cpu_spin_unlock_xrestore(&exti->lock, exceptions); in stm32_exti_set_tz()
|
| H A D | hisi_trng.c | 37 cpu_spin_unlock_xrestore(&trng_lock, exceptions); in trng_read()
|
| H A D | atmel_trng.c | 43 cpu_spin_unlock_xrestore(&trng_lock, exceptions); in atmel_trng_read32()
|
| /optee_os/core/arch/arm/plat-stm32mp2/ |
| H A D | stm32_util.h | 15 #define may_spin_unlock(lock, exceptions) cpu_spin_unlock_xrestore(lock, \
|
| /optee_os/core/drivers/rstctrl/ |
| H A D | rstctrl.c | 28 cpu_spin_unlock_xrestore(&rstctrl_lock, exceptions); in rstctrl_get_exclusive()
|
| /optee_os/core/arch/arm/plat-rcar/ |
| H A D | hw_rng.c | 51 cpu_spin_unlock_xrestore(&spin_lock, exceptions); in hw_get_random_bytes()
|
| /optee_os/core/drivers/imx/mu/ |
| H A D | imx_mu.c | 173 cpu_spin_unlock_xrestore(&mu_spinlock, exceptions); in imx_mu_init() 191 cpu_spin_unlock_xrestore(&mu_spinlock, exceptions); in imx_mu_call()
|
| /optee_os/core/arch/arm/plat-synquacer/ |
| H A D | rng_pta.c | 253 cpu_spin_unlock_xrestore(&entropy_lock, exceptions); in rng_collect_entropy() 310 cpu_spin_unlock_xrestore(&entropy_lock, exceptions); in rng_get_entropy()
|