Lines Matching full:slots

90 	 * be enough slots to hold all possible variants
106 * @slots: pointer to the structure holding buddy slots
120 struct z3fold_buddy_slots *slots; member
213 struct z3fold_buddy_slots *slots; in alloc_slots() local
215 slots = kmem_cache_zalloc(pool->c_handle, in alloc_slots()
218 if (slots) { in alloc_slots()
220 kmemleak_not_leak(slots); in alloc_slots()
221 slots->pool = (unsigned long)pool; in alloc_slots()
222 rwlock_init(&slots->lock); in alloc_slots()
225 return slots; in alloc_slots()
260 struct z3fold_buddy_slots *slots; in __get_z3fold_header() local
265 slots = handle_to_slots(handle); in __get_z3fold_header()
269 read_lock(&slots->lock); in __get_z3fold_header()
274 read_unlock(&slots->lock); in __get_z3fold_header()
308 struct z3fold_buddy_slots *slots; in free_handle() local
318 slots = handle_to_slots(handle); in free_handle()
319 write_lock(&slots->lock); in free_handle()
322 if (test_bit(HANDLES_NOFREE, &slots->pool)) { in free_handle()
323 write_unlock(&slots->lock); in free_handle()
327 if (zhdr->slots != slots) in free_handle()
332 if (slots->slot[i]) { in free_handle()
337 write_unlock(&slots->lock); in free_handle()
340 struct z3fold_pool *pool = slots_to_pool(slots); in free_handle()
342 if (zhdr->slots == slots) in free_handle()
343 zhdr->slots = NULL; in free_handle()
344 kmem_cache_free(pool->c_handle, slots); in free_handle()
401 struct z3fold_buddy_slots *slots; in init_z3fold_page() local
412 slots = alloc_slots(pool, gfp); in init_z3fold_page()
413 if (!slots) in init_z3fold_page()
426 zhdr->slots = slots; in init_z3fold_page()
456 struct z3fold_buddy_slots *slots, in __encode_handle() argument
475 write_lock(&slots->lock); in __encode_handle()
476 slots->slot[idx] = h; in __encode_handle()
477 write_unlock(&slots->lock); in __encode_handle()
478 return (unsigned long)&slots->slot[idx]; in __encode_handle()
483 return __encode_handle(zhdr, zhdr->slots, bud); in encode_handle()
489 struct z3fold_buddy_slots *slots = handle_to_slots(handle); in handle_to_chunks() local
492 read_lock(&slots->lock); in handle_to_chunks()
494 read_unlock(&slots->lock); in handle_to_chunks()
506 struct z3fold_buddy_slots *slots = handle_to_slots(handle); in handle_to_buddy() local
509 read_lock(&slots->lock); in handle_to_buddy()
512 read_unlock(&slots->lock); in handle_to_buddy()
688 * No need to protect slots here -- all the slots are "local" and in compact_single_buddy()
691 if (zhdr->first_chunks && zhdr->slots->slot[first_idx]) { in compact_single_buddy()
694 old_handle = (unsigned long)&zhdr->slots->slot[first_idx]; in compact_single_buddy()
696 } else if (zhdr->middle_chunks && zhdr->slots->slot[middle_idx]) { in compact_single_buddy()
699 old_handle = (unsigned long)&zhdr->slots->slot[middle_idx]; in compact_single_buddy()
701 } else if (zhdr->last_chunks && zhdr->slots->slot[last_idx]) { in compact_single_buddy()
704 old_handle = (unsigned long)&zhdr->slots->slot[last_idx]; in compact_single_buddy()
742 write_lock(&zhdr->slots->lock); in compact_single_buddy()
748 write_unlock(&zhdr->slots->lock); in compact_single_buddy()
970 if (zhdr && !zhdr->slots) in __z3fold_alloc()
971 zhdr->slots = alloc_slots(pool, in __z3fold_alloc()
1335 struct z3fold_buddy_slots slots __attribute__((aligned(SLOTS_ALIGN))); in z3fold_reclaim_page() local
1337 rwlock_init(&slots.lock); in z3fold_reclaim_page()
1338 slots.pool = (unsigned long)pool | (1 << HANDLES_NOFREE); in z3fold_reclaim_page()
1411 * use our local slots structure because z3fold_free in z3fold_reclaim_page()
1412 * can zero out zhdr->slots and we can't do much in z3fold_reclaim_page()
1418 memset(slots.slot, 0, sizeof(slots.slot)); in z3fold_reclaim_page()
1420 first_handle = __encode_handle(zhdr, &slots, in z3fold_reclaim_page()
1423 middle_handle = __encode_handle(zhdr, &slots, in z3fold_reclaim_page()
1426 last_handle = __encode_handle(zhdr, &slots, in z3fold_reclaim_page()
1465 struct z3fold_buddy_slots *slots = zhdr->slots; in z3fold_reclaim_page() local
1469 kmem_cache_free(pool->c_handle, slots); in z3fold_reclaim_page()