Lines Matching refs:asid

38 #define asid2idx(asid)		((asid) & ~ASID_MASK)  argument
44 u32 asid; in get_cpu_asid_bits() local
54 asid = 8; in get_cpu_asid_bits()
57 asid = 16; in get_cpu_asid_bits()
60 return asid; in get_cpu_asid_bits()
66 u32 asid = get_cpu_asid_bits(); in verify_cpu_asid_bits() local
68 if (asid < asid_bits) { in verify_cpu_asid_bits()
74 smp_processor_id(), asid, asid_bits); in verify_cpu_asid_bits()
101 #define asid_gen_match(asid) \ argument
102 (!(((asid) ^ atomic64_read(&asid_generation)) >> asid_bits))
107 u64 asid; in flush_context() local
113 asid = atomic64_xchg_relaxed(&per_cpu(active_asids, i), 0); in flush_context()
121 if (asid == 0) in flush_context()
122 asid = per_cpu(reserved_asids, i); in flush_context()
123 __set_bit(asid2idx(asid), asid_map); in flush_context()
124 per_cpu(reserved_asids, i) = asid; in flush_context()
134 static bool check_update_reserved_asid(u64 asid, u64 newasid) in check_update_reserved_asid() argument
149 if (per_cpu(reserved_asids, cpu) == asid) { in check_update_reserved_asid()
161 u64 asid = atomic64_read(&mm->context.id); in new_context() local
164 if (asid != 0) { in new_context()
165 u64 newasid = generation | (asid & ~ASID_MASK); in new_context()
171 if (check_update_reserved_asid(asid, newasid)) in new_context()
186 if (!__test_and_set_bit(asid2idx(asid), asid_map)) in new_context()
197 asid = find_next_zero_bit(asid_map, NUM_USER_ASIDS, cur_idx); in new_context()
198 if (asid != NUM_USER_ASIDS) in new_context()
207 asid = find_next_zero_bit(asid_map, NUM_USER_ASIDS, 1); in new_context()
210 __set_bit(asid, asid_map); in new_context()
211 cur_idx = asid; in new_context()
212 return idx2asid(asid) | generation; in new_context()
219 u64 asid, old_active_asid; in check_and_switch_context() local
224 asid = atomic64_read(&mm->context.id); in check_and_switch_context()
241 if (old_active_asid && asid_gen_match(asid) && in check_and_switch_context()
243 old_active_asid, asid)) in check_and_switch_context()
248 asid = atomic64_read(&mm->context.id); in check_and_switch_context()
249 if (!asid_gen_match(asid)) { in check_and_switch_context()
250 asid = new_context(mm); in check_and_switch_context()
251 atomic64_set(&mm->context.id, asid); in check_and_switch_context()
258 atomic64_set(this_cpu_ptr(&active_asids), asid); in check_and_switch_context()
276 u64 asid; in arm64_mm_context_get() local
283 asid = atomic64_read(&mm->context.id); in arm64_mm_context_get()
289 asid = 0; in arm64_mm_context_get()
293 if (!asid_gen_match(asid)) { in arm64_mm_context_get()
298 asid = new_context(mm); in arm64_mm_context_get()
299 atomic64_set(&mm->context.id, asid); in arm64_mm_context_get()
303 __set_bit(asid2idx(asid), pinned_asid_map); in arm64_mm_context_get()
309 asid &= ~ASID_MASK; in arm64_mm_context_get()
312 if (asid && arm64_kernel_unmapped_at_el0()) in arm64_mm_context_get()
313 asid |= 1; in arm64_mm_context_get()
315 return asid; in arm64_mm_context_get()
322 u64 asid = atomic64_read(&mm->context.id); in arm64_mm_context_put() local
330 __clear_bit(asid2idx(asid), pinned_asid_map); in arm64_mm_context_put()
352 unsigned long asid = ASID(mm); in cpu_do_switch_mm() local
356 if (system_supports_cnp() && asid) in cpu_do_switch_mm()
361 ttbr0 |= FIELD_PREP(TTBR_ASID_MASK, asid); in cpu_do_switch_mm()
365 ttbr1 |= FIELD_PREP(TTBR_ASID_MASK, asid); in cpu_do_switch_mm()