Lines Matching refs:sop

647 	struct sembuf *sop;  in perform_atomic_semop_slow()  local
656 for (sop = sops; sop < sops + nsops; sop++) { in perform_atomic_semop_slow()
657 int idx = array_index_nospec(sop->sem_num, sma->sem_nsems); in perform_atomic_semop_slow()
659 sem_op = sop->sem_op; in perform_atomic_semop_slow()
671 if (sop->sem_flg & SEM_UNDO) { in perform_atomic_semop_slow()
672 int undo = un->semadj[sop->sem_num] - sem_op; in perform_atomic_semop_slow()
676 un->semadj[sop->sem_num] = undo; in perform_atomic_semop_slow()
682 sop--; in perform_atomic_semop_slow()
684 while (sop >= sops) { in perform_atomic_semop_slow()
685 ipc_update_pid(&sma->sems[sop->sem_num].sempid, pid); in perform_atomic_semop_slow()
686 sop--; in perform_atomic_semop_slow()
696 q->blocking = sop; in perform_atomic_semop_slow()
698 if (sop->sem_flg & IPC_NOWAIT) in perform_atomic_semop_slow()
704 sop--; in perform_atomic_semop_slow()
705 while (sop >= sops) { in perform_atomic_semop_slow()
706 sem_op = sop->sem_op; in perform_atomic_semop_slow()
707 sma->sems[sop->sem_num].semval -= sem_op; in perform_atomic_semop_slow()
708 if (sop->sem_flg & SEM_UNDO) in perform_atomic_semop_slow()
709 un->semadj[sop->sem_num] += sem_op; in perform_atomic_semop_slow()
710 sop--; in perform_atomic_semop_slow()
719 struct sembuf *sop; in perform_atomic_semop() local
737 for (sop = sops; sop < sops + nsops; sop++) { in perform_atomic_semop()
738 int idx = array_index_nospec(sop->sem_num, sma->sem_nsems); in perform_atomic_semop()
741 sem_op = sop->sem_op; in perform_atomic_semop()
754 if (sop->sem_flg & SEM_UNDO) { in perform_atomic_semop()
755 int undo = un->semadj[sop->sem_num] - sem_op; in perform_atomic_semop()
763 for (sop = sops; sop < sops + nsops; sop++) { in perform_atomic_semop()
764 curr = &sma->sems[sop->sem_num]; in perform_atomic_semop()
765 sem_op = sop->sem_op; in perform_atomic_semop()
768 if (sop->sem_flg & SEM_UNDO) { in perform_atomic_semop()
769 int undo = un->semadj[sop->sem_num] - sem_op; in perform_atomic_semop()
771 un->semadj[sop->sem_num] = undo; in perform_atomic_semop()
780 q->blocking = sop; in perform_atomic_semop()
781 return sop->sem_flg & IPC_NOWAIT ? -EAGAIN : 1; in perform_atomic_semop()
1073 struct sembuf *sop = q->blocking; in check_qop() local
1086 if (sop->sem_num != semnum) in check_qop()
1089 if (count_zero && sop->sem_op == 0) in check_qop()
1091 if (!count_zero && sop->sem_op < 0) in check_qop()
1989 struct sembuf *sops = fast_sops, *sop; in do_semtimedop() local
2024 for (sop = sops; sop < sops + nsops; sop++) { in do_semtimedop()
2025 unsigned long mask = 1ULL << ((sop->sem_num) % BITS_PER_LONG); in do_semtimedop()
2027 if (sop->sem_num >= max) in do_semtimedop()
2028 max = sop->sem_num; in do_semtimedop()
2029 if (sop->sem_flg & SEM_UNDO) in do_semtimedop()
2040 if (sop->sem_op != 0) { in do_semtimedop()