Lines Matching refs:gmap
35 static struct gmap *gmap_alloc(unsigned long limit) in gmap_alloc()
37 struct gmap *gmap; in gmap_alloc() local
59 gmap = kzalloc(sizeof(struct gmap), GFP_KERNEL); in gmap_alloc()
60 if (!gmap) in gmap_alloc()
62 INIT_LIST_HEAD(&gmap->crst_list); in gmap_alloc()
63 INIT_LIST_HEAD(&gmap->children); in gmap_alloc()
64 INIT_LIST_HEAD(&gmap->pt_list); in gmap_alloc()
65 INIT_RADIX_TREE(&gmap->guest_to_host, GFP_KERNEL); in gmap_alloc()
66 INIT_RADIX_TREE(&gmap->host_to_guest, GFP_ATOMIC); in gmap_alloc()
67 INIT_RADIX_TREE(&gmap->host_to_rmap, GFP_ATOMIC); in gmap_alloc()
68 spin_lock_init(&gmap->guest_table_lock); in gmap_alloc()
69 spin_lock_init(&gmap->shadow_lock); in gmap_alloc()
70 refcount_set(&gmap->ref_count, 1); in gmap_alloc()
75 list_add(&page->lru, &gmap->crst_list); in gmap_alloc()
78 gmap->table = table; in gmap_alloc()
79 gmap->asce = atype | _ASCE_TABLE_LENGTH | in gmap_alloc()
81 gmap->asce_end = limit; in gmap_alloc()
82 return gmap; in gmap_alloc()
85 kfree(gmap); in gmap_alloc()
97 struct gmap *gmap_create(struct mm_struct *mm, unsigned long limit) in gmap_create()
99 struct gmap *gmap; in gmap_create() local
102 gmap = gmap_alloc(limit); in gmap_create()
103 if (!gmap) in gmap_create()
105 gmap->mm = mm; in gmap_create()
107 list_add_rcu(&gmap->list, &mm->context.gmap_list); in gmap_create()
109 gmap_asce = gmap->asce; in gmap_create()
114 return gmap; in gmap_create()
118 static void gmap_flush_tlb(struct gmap *gmap) in gmap_flush_tlb() argument
121 __tlb_flush_idte(gmap->asce); in gmap_flush_tlb()
183 static void gmap_free(struct gmap *gmap) in gmap_free() argument
188 if (!(gmap_is_shadow(gmap) && gmap->removed)) in gmap_free()
189 gmap_flush_tlb(gmap); in gmap_free()
191 list_for_each_entry_safe(page, next, &gmap->crst_list, lru) in gmap_free()
193 gmap_radix_tree_free(&gmap->guest_to_host); in gmap_free()
194 gmap_radix_tree_free(&gmap->host_to_guest); in gmap_free()
197 if (gmap_is_shadow(gmap)) { in gmap_free()
199 list_for_each_entry_safe(page, next, &gmap->pt_list, lru) in gmap_free()
201 gmap_rmap_radix_tree_free(&gmap->host_to_rmap); in gmap_free()
203 gmap_put(gmap->parent); in gmap_free()
206 kfree(gmap); in gmap_free()
215 struct gmap *gmap_get(struct gmap *gmap) in gmap_get() argument
217 refcount_inc(&gmap->ref_count); in gmap_get()
218 return gmap; in gmap_get()
228 void gmap_put(struct gmap *gmap) in gmap_put() argument
230 if (refcount_dec_and_test(&gmap->ref_count)) in gmap_put()
231 gmap_free(gmap); in gmap_put()
239 void gmap_remove(struct gmap *gmap) in gmap_remove() argument
241 struct gmap *sg, *next; in gmap_remove()
245 if (!list_empty(&gmap->children)) { in gmap_remove()
246 spin_lock(&gmap->shadow_lock); in gmap_remove()
247 list_for_each_entry_safe(sg, next, &gmap->children, list) { in gmap_remove()
251 spin_unlock(&gmap->shadow_lock); in gmap_remove()
254 spin_lock(&gmap->mm->context.lock); in gmap_remove()
255 list_del_rcu(&gmap->list); in gmap_remove()
256 if (list_empty(&gmap->mm->context.gmap_list)) in gmap_remove()
258 else if (list_is_singular(&gmap->mm->context.gmap_list)) in gmap_remove()
259 gmap_asce = list_first_entry(&gmap->mm->context.gmap_list, in gmap_remove()
260 struct gmap, list)->asce; in gmap_remove()
263 WRITE_ONCE(gmap->mm->context.gmap_asce, gmap_asce); in gmap_remove()
264 spin_unlock(&gmap->mm->context.lock); in gmap_remove()
267 gmap_put(gmap); in gmap_remove()
275 void gmap_enable(struct gmap *gmap) in gmap_enable() argument
277 S390_lowcore.gmap = (unsigned long) gmap; in gmap_enable()
285 void gmap_disable(struct gmap *gmap) in gmap_disable() argument
287 S390_lowcore.gmap = 0UL; in gmap_disable()
296 struct gmap *gmap_get_enabled(void) in gmap_get_enabled()
298 return (struct gmap *) S390_lowcore.gmap; in gmap_get_enabled()
305 static int gmap_alloc_table(struct gmap *gmap, unsigned long *table, in gmap_alloc_table() argument
317 spin_lock(&gmap->guest_table_lock); in gmap_alloc_table()
319 list_add(&page->lru, &gmap->crst_list); in gmap_alloc_table()
325 spin_unlock(&gmap->guest_table_lock); in gmap_alloc_table()
356 static int __gmap_unlink_by_vmaddr(struct gmap *gmap, unsigned long vmaddr) in __gmap_unlink_by_vmaddr() argument
361 BUG_ON(gmap_is_shadow(gmap)); in __gmap_unlink_by_vmaddr()
362 spin_lock(&gmap->guest_table_lock); in __gmap_unlink_by_vmaddr()
363 entry = radix_tree_delete(&gmap->host_to_guest, vmaddr >> PMD_SHIFT); in __gmap_unlink_by_vmaddr()
368 spin_unlock(&gmap->guest_table_lock); in __gmap_unlink_by_vmaddr()
379 static int __gmap_unmap_by_gaddr(struct gmap *gmap, unsigned long gaddr) in __gmap_unmap_by_gaddr() argument
383 vmaddr = (unsigned long) radix_tree_delete(&gmap->guest_to_host, in __gmap_unmap_by_gaddr()
385 return vmaddr ? __gmap_unlink_by_vmaddr(gmap, vmaddr) : 0; in __gmap_unmap_by_gaddr()
396 int gmap_unmap_segment(struct gmap *gmap, unsigned long to, unsigned long len) in gmap_unmap_segment() argument
401 BUG_ON(gmap_is_shadow(gmap)); in gmap_unmap_segment()
408 mmap_write_lock(gmap->mm); in gmap_unmap_segment()
410 flush |= __gmap_unmap_by_gaddr(gmap, to + off); in gmap_unmap_segment()
411 mmap_write_unlock(gmap->mm); in gmap_unmap_segment()
413 gmap_flush_tlb(gmap); in gmap_unmap_segment()
427 int gmap_map_segment(struct gmap *gmap, unsigned long from, in gmap_map_segment() argument
433 BUG_ON(gmap_is_shadow(gmap)); in gmap_map_segment()
437 from + len - 1 > TASK_SIZE_MAX || to + len - 1 > gmap->asce_end) in gmap_map_segment()
441 mmap_write_lock(gmap->mm); in gmap_map_segment()
444 flush |= __gmap_unmap_by_gaddr(gmap, to + off); in gmap_map_segment()
446 if (radix_tree_insert(&gmap->guest_to_host, in gmap_map_segment()
451 mmap_write_unlock(gmap->mm); in gmap_map_segment()
453 gmap_flush_tlb(gmap); in gmap_map_segment()
456 gmap_unmap_segment(gmap, to, len); in gmap_map_segment()
474 unsigned long __gmap_translate(struct gmap *gmap, unsigned long gaddr) in __gmap_translate() argument
479 radix_tree_lookup(&gmap->guest_to_host, gaddr >> PMD_SHIFT); in __gmap_translate()
494 unsigned long gmap_translate(struct gmap *gmap, unsigned long gaddr) in gmap_translate() argument
498 mmap_read_lock(gmap->mm); in gmap_translate()
499 rc = __gmap_translate(gmap, gaddr); in gmap_translate()
500 mmap_read_unlock(gmap->mm); in gmap_translate()
514 struct gmap *gmap; in gmap_unlink() local
518 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in gmap_unlink()
519 flush = __gmap_unlink_by_vmaddr(gmap, vmaddr); in gmap_unlink()
521 gmap_flush_tlb(gmap); in gmap_unlink()
526 static void gmap_pmdp_xchg(struct gmap *gmap, pmd_t *old, pmd_t new,
540 int __gmap_link(struct gmap *gmap, unsigned long gaddr, unsigned long vmaddr) in __gmap_link() argument
552 BUG_ON(gmap_is_shadow(gmap)); in __gmap_link()
554 table = gmap->table; in __gmap_link()
555 if ((gmap->asce & _ASCE_TYPE_MASK) >= _ASCE_TYPE_REGION1) { in __gmap_link()
558 gmap_alloc_table(gmap, table, _REGION2_ENTRY_EMPTY, in __gmap_link()
563 if ((gmap->asce & _ASCE_TYPE_MASK) >= _ASCE_TYPE_REGION2) { in __gmap_link()
566 gmap_alloc_table(gmap, table, _REGION3_ENTRY_EMPTY, in __gmap_link()
571 if ((gmap->asce & _ASCE_TYPE_MASK) >= _ASCE_TYPE_REGION3) { in __gmap_link()
574 gmap_alloc_table(gmap, table, _SEGMENT_ENTRY_EMPTY, in __gmap_link()
581 mm = gmap->mm; in __gmap_link()
594 if (pmd_large(*pmd) && !gmap->mm->context.allow_gmap_hpage_1m) in __gmap_link()
601 spin_lock(&gmap->guest_table_lock); in __gmap_link()
603 rc = radix_tree_insert(&gmap->host_to_guest, in __gmap_link()
619 gmap_pmdp_xchg(gmap, (pmd_t *)table, __pmd(unprot), gaddr); in __gmap_link()
621 spin_unlock(&gmap->guest_table_lock); in __gmap_link()
636 int gmap_fault(struct gmap *gmap, unsigned long gaddr, in gmap_fault() argument
643 mmap_read_lock(gmap->mm); in gmap_fault()
647 vmaddr = __gmap_translate(gmap, gaddr); in gmap_fault()
652 if (fixup_user_fault(gmap->mm, vmaddr, fault_flags, in gmap_fault()
664 rc = __gmap_link(gmap, gaddr, vmaddr); in gmap_fault()
666 mmap_read_unlock(gmap->mm); in gmap_fault()
674 void __gmap_zap(struct gmap *gmap, unsigned long gaddr) in __gmap_zap() argument
681 vmaddr = (unsigned long) radix_tree_lookup(&gmap->guest_to_host, in __gmap_zap()
686 ptep = get_locked_pte(gmap->mm, vmaddr, &ptl); in __gmap_zap()
688 ptep_zap_unused(gmap->mm, vmaddr, ptep, 0); in __gmap_zap()
695 void gmap_discard(struct gmap *gmap, unsigned long from, unsigned long to) in gmap_discard() argument
700 mmap_read_lock(gmap->mm); in gmap_discard()
705 radix_tree_lookup(&gmap->guest_to_host, in gmap_discard()
711 vma = find_vma(gmap->mm, vmaddr); in gmap_discard()
723 mmap_read_unlock(gmap->mm); in gmap_discard()
761 static void gmap_call_notifier(struct gmap *gmap, unsigned long start, in gmap_call_notifier() argument
767 nb->notifier_call(gmap, start, end); in gmap_call_notifier()
788 static inline unsigned long *gmap_table_walk(struct gmap *gmap, in gmap_table_walk() argument
791 const int asce_type = gmap->asce & _ASCE_TYPE_MASK; in gmap_table_walk()
792 unsigned long *table = gmap->table; in gmap_table_walk()
794 if (gmap_is_shadow(gmap) && gmap->removed) in gmap_table_walk()
850 static pte_t *gmap_pte_op_walk(struct gmap *gmap, unsigned long gaddr, in gmap_pte_op_walk() argument
855 BUG_ON(gmap_is_shadow(gmap)); in gmap_pte_op_walk()
857 table = gmap_table_walk(gmap, gaddr, 1); /* get segment pointer */ in gmap_pte_op_walk()
860 return pte_alloc_map_lock(gmap->mm, (pmd_t *) table, gaddr, ptl); in gmap_pte_op_walk()
874 static int gmap_pte_op_fixup(struct gmap *gmap, unsigned long gaddr, in gmap_pte_op_fixup() argument
877 struct mm_struct *mm = gmap->mm; in gmap_pte_op_fixup()
881 BUG_ON(gmap_is_shadow(gmap)); in gmap_pte_op_fixup()
889 return __gmap_link(gmap, gaddr, vmaddr); in gmap_pte_op_fixup()
910 static inline pmd_t *gmap_pmd_op_walk(struct gmap *gmap, unsigned long gaddr) in gmap_pmd_op_walk() argument
914 BUG_ON(gmap_is_shadow(gmap)); in gmap_pmd_op_walk()
915 pmdp = (pmd_t *) gmap_table_walk(gmap, gaddr, 1); in gmap_pmd_op_walk()
920 if (!gmap->mm->context.allow_gmap_hpage_1m) in gmap_pmd_op_walk()
923 spin_lock(&gmap->guest_table_lock); in gmap_pmd_op_walk()
925 spin_unlock(&gmap->guest_table_lock); in gmap_pmd_op_walk()
931 spin_unlock(&gmap->guest_table_lock); in gmap_pmd_op_walk()
940 static inline void gmap_pmd_op_end(struct gmap *gmap, pmd_t *pmdp) in gmap_pmd_op_end() argument
943 spin_unlock(&gmap->guest_table_lock); in gmap_pmd_op_end()
960 static int gmap_protect_pmd(struct gmap *gmap, unsigned long gaddr, in gmap_protect_pmd() argument
973 gmap_pmdp_xchg(gmap, pmdp, new, gaddr); in gmap_protect_pmd()
979 gmap_pmdp_xchg(gmap, pmdp, new, gaddr); in gmap_protect_pmd()
1005 static int gmap_protect_pte(struct gmap *gmap, unsigned long gaddr, in gmap_protect_pte() argument
1016 ptep = pte_alloc_map_lock(gmap->mm, pmdp, gaddr, &ptl); in gmap_protect_pte()
1023 rc = ptep_force_prot(gmap->mm, gaddr, ptep, prot, pbits); in gmap_protect_pte()
1041 static int gmap_protect_range(struct gmap *gmap, unsigned long gaddr, in gmap_protect_range() argument
1048 BUG_ON(gmap_is_shadow(gmap)); in gmap_protect_range()
1051 pmdp = gmap_pmd_op_walk(gmap, gaddr); in gmap_protect_range()
1054 rc = gmap_protect_pte(gmap, gaddr, pmdp, prot, in gmap_protect_range()
1061 rc = gmap_protect_pmd(gmap, gaddr, pmdp, prot, in gmap_protect_range()
1069 gmap_pmd_op_end(gmap, pmdp); in gmap_protect_range()
1076 vmaddr = __gmap_translate(gmap, gaddr); in gmap_protect_range()
1079 rc = gmap_pte_op_fixup(gmap, gaddr, vmaddr, prot); in gmap_protect_range()
1101 int gmap_mprotect_notify(struct gmap *gmap, unsigned long gaddr, in gmap_mprotect_notify() argument
1106 if ((gaddr & ~PAGE_MASK) || (len & ~PAGE_MASK) || gmap_is_shadow(gmap)) in gmap_mprotect_notify()
1110 mmap_read_lock(gmap->mm); in gmap_mprotect_notify()
1111 rc = gmap_protect_range(gmap, gaddr, len, prot, GMAP_NOTIFY_MPROT); in gmap_mprotect_notify()
1112 mmap_read_unlock(gmap->mm); in gmap_mprotect_notify()
1130 int gmap_read_table(struct gmap *gmap, unsigned long gaddr, unsigned long *val) in gmap_read_table() argument
1137 if (gmap_is_shadow(gmap)) in gmap_read_table()
1142 ptep = gmap_pte_op_walk(gmap, gaddr, &ptl); in gmap_read_table()
1157 vmaddr = __gmap_translate(gmap, gaddr); in gmap_read_table()
1162 rc = gmap_pte_op_fixup(gmap, gaddr, vmaddr, PROT_READ); in gmap_read_table()
1178 static inline void gmap_insert_rmap(struct gmap *sg, unsigned long vmaddr, in gmap_insert_rmap()
1206 static int gmap_protect_rmap(struct gmap *sg, unsigned long raddr, in gmap_protect_rmap()
1209 struct gmap *parent; in gmap_protect_rmap()
1287 static void gmap_unshadow_page(struct gmap *sg, unsigned long raddr) in gmap_unshadow_page()
1307 static void __gmap_unshadow_pgt(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_pgt()
1324 static void gmap_unshadow_pgt(struct gmap *sg, unsigned long raddr) in gmap_unshadow_pgt()
1353 static void __gmap_unshadow_sgt(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_sgt()
1381 static void gmap_unshadow_sgt(struct gmap *sg, unsigned long raddr) in gmap_unshadow_sgt()
1410 static void __gmap_unshadow_r3t(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_r3t()
1438 static void gmap_unshadow_r3t(struct gmap *sg, unsigned long raddr) in gmap_unshadow_r3t()
1467 static void __gmap_unshadow_r2t(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_r2t()
1495 static void gmap_unshadow_r2t(struct gmap *sg, unsigned long raddr) in gmap_unshadow_r2t()
1524 static void __gmap_unshadow_r1t(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_r1t()
1554 static void gmap_unshadow(struct gmap *sg) in gmap_unshadow()
1591 static struct gmap *gmap_find_shadow(struct gmap *parent, unsigned long asce, in gmap_find_shadow()
1594 struct gmap *sg; in gmap_find_shadow()
1620 int gmap_shadow_valid(struct gmap *sg, unsigned long asce, int edat_level) in gmap_shadow_valid()
1643 struct gmap *gmap_shadow(struct gmap *parent, unsigned long asce, in gmap_shadow()
1646 struct gmap *sg, *new; in gmap_shadow()
1735 int gmap_shadow_r2t(struct gmap *sg, unsigned long saddr, unsigned long r2t, in gmap_shadow_r2t()
1819 int gmap_shadow_r3t(struct gmap *sg, unsigned long saddr, unsigned long r3t, in gmap_shadow_r3t()
1903 int gmap_shadow_sgt(struct gmap *sg, unsigned long saddr, unsigned long sgt, in gmap_shadow_sgt()
1987 int gmap_shadow_pgt_lookup(struct gmap *sg, unsigned long saddr, in gmap_shadow_pgt_lookup()
2027 int gmap_shadow_pgt(struct gmap *sg, unsigned long saddr, unsigned long pgt, in gmap_shadow_pgt()
2106 int gmap_shadow_page(struct gmap *sg, unsigned long saddr, pte_t pte) in gmap_shadow_page()
2108 struct gmap *parent; in gmap_shadow_page()
2174 static void gmap_shadow_notify(struct gmap *sg, unsigned long vmaddr, in gmap_shadow_notify()
2241 struct gmap *gmap, *sg, *next; in ptep_notify() local
2246 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in ptep_notify()
2247 spin_lock(&gmap->guest_table_lock); in ptep_notify()
2248 table = radix_tree_lookup(&gmap->host_to_guest, in ptep_notify()
2252 spin_unlock(&gmap->guest_table_lock); in ptep_notify()
2256 if (!list_empty(&gmap->children) && (bits & PGSTE_VSIE_BIT)) { in ptep_notify()
2257 spin_lock(&gmap->shadow_lock); in ptep_notify()
2259 &gmap->children, list) in ptep_notify()
2261 spin_unlock(&gmap->shadow_lock); in ptep_notify()
2264 gmap_call_notifier(gmap, gaddr, gaddr + PAGE_SIZE - 1); in ptep_notify()
2270 static void pmdp_notify_gmap(struct gmap *gmap, pmd_t *pmdp, in pmdp_notify_gmap() argument
2274 gmap_call_notifier(gmap, gaddr, gaddr + HPAGE_SIZE - 1); in pmdp_notify_gmap()
2287 static void gmap_pmdp_xchg(struct gmap *gmap, pmd_t *pmdp, pmd_t new, in gmap_pmdp_xchg() argument
2291 pmdp_notify_gmap(gmap, pmdp, gaddr); in gmap_pmdp_xchg()
2294 __pmdp_idte(gaddr, (pmd_t *)pmdp, IDTE_GUEST_ASCE, gmap->asce, in gmap_pmdp_xchg()
2307 struct gmap *gmap; in gmap_pmdp_clear() local
2311 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in gmap_pmdp_clear()
2312 spin_lock(&gmap->guest_table_lock); in gmap_pmdp_clear()
2313 pmdp = (pmd_t *)radix_tree_delete(&gmap->host_to_guest, in gmap_pmdp_clear()
2317 pmdp_notify_gmap(gmap, pmdp, gaddr); in gmap_pmdp_clear()
2324 spin_unlock(&gmap->guest_table_lock); in gmap_pmdp_clear()
2360 struct gmap *gmap; in gmap_pmdp_idte_local() local
2364 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in gmap_pmdp_idte_local()
2365 spin_lock(&gmap->guest_table_lock); in gmap_pmdp_idte_local()
2366 entry = radix_tree_delete(&gmap->host_to_guest, in gmap_pmdp_idte_local()
2371 pmdp_notify_gmap(gmap, pmdp, gaddr); in gmap_pmdp_idte_local()
2376 gmap->asce, IDTE_LOCAL); in gmap_pmdp_idte_local()
2381 spin_unlock(&gmap->guest_table_lock); in gmap_pmdp_idte_local()
2395 struct gmap *gmap; in gmap_pmdp_idte_global() local
2399 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in gmap_pmdp_idte_global()
2400 spin_lock(&gmap->guest_table_lock); in gmap_pmdp_idte_global()
2401 entry = radix_tree_delete(&gmap->host_to_guest, in gmap_pmdp_idte_global()
2406 pmdp_notify_gmap(gmap, pmdp, gaddr); in gmap_pmdp_idte_global()
2411 gmap->asce, IDTE_GLOBAL); in gmap_pmdp_idte_global()
2418 spin_unlock(&gmap->guest_table_lock); in gmap_pmdp_idte_global()
2433 static bool gmap_test_and_clear_dirty_pmd(struct gmap *gmap, pmd_t *pmdp, in gmap_test_and_clear_dirty_pmd() argument
2446 gmap_protect_pmd(gmap, gaddr, pmdp, PROT_READ, 0); in gmap_test_and_clear_dirty_pmd()
2460 void gmap_sync_dirty_log_pmd(struct gmap *gmap, unsigned long bitmap[4], in gmap_sync_dirty_log_pmd() argument
2468 pmdp = gmap_pmd_op_walk(gmap, gaddr); in gmap_sync_dirty_log_pmd()
2473 if (gmap_test_and_clear_dirty_pmd(gmap, pmdp, gaddr)) in gmap_sync_dirty_log_pmd()
2477 ptep = pte_alloc_map_lock(gmap->mm, pmdp, vmaddr, &ptl); in gmap_sync_dirty_log_pmd()
2480 if (ptep_test_and_clear_uc(gmap->mm, vmaddr, ptep)) in gmap_sync_dirty_log_pmd()
2485 gmap_pmd_op_end(gmap, pmdp); in gmap_sync_dirty_log_pmd()
2742 void s390_unlist_old_asce(struct gmap *gmap) in s390_unlist_old_asce() argument
2746 old = virt_to_page(gmap->table); in s390_unlist_old_asce()
2747 spin_lock(&gmap->guest_table_lock); in s390_unlist_old_asce()
2764 spin_unlock(&gmap->guest_table_lock); in s390_unlist_old_asce()
2778 int s390_replace_asce(struct gmap *gmap) in s390_replace_asce() argument
2784 s390_unlist_old_asce(gmap); in s390_replace_asce()
2790 memcpy(table, gmap->table, 1UL << (CRST_ALLOC_ORDER + PAGE_SHIFT)); in s390_replace_asce()
2797 spin_lock(&gmap->guest_table_lock); in s390_replace_asce()
2798 list_add(&page->lru, &gmap->crst_list); in s390_replace_asce()
2799 spin_unlock(&gmap->guest_table_lock); in s390_replace_asce()
2802 asce = (gmap->asce & ~_ASCE_ORIGIN) | __pa(table); in s390_replace_asce()
2803 WRITE_ONCE(gmap->asce, asce); in s390_replace_asce()
2804 WRITE_ONCE(gmap->mm->context.gmap_asce, asce); in s390_replace_asce()
2805 WRITE_ONCE(gmap->table, table); in s390_replace_asce()