Lines Matching refs:chunk

95 	struct nouveau_dmem_chunk *chunk = nouveau_page_to_chunk(page);  in page_to_drm()  local
97 return chunk->drm; in page_to_drm()
102 struct nouveau_dmem_chunk *chunk = nouveau_page_to_chunk(page); in nouveau_dmem_page_addr() local
104 chunk->pagemap.range.start; in nouveau_dmem_page_addr()
106 return chunk->bo->offset + off; in nouveau_dmem_page_addr()
111 struct nouveau_dmem_chunk *chunk = nouveau_page_to_chunk(page); in nouveau_dmem_page_free() local
112 struct nouveau_dmem *dmem = chunk->drm->dmem; in nouveau_dmem_page_free()
118 WARN_ON(!chunk->callocated); in nouveau_dmem_page_free()
119 chunk->callocated--; in nouveau_dmem_page_free()
229 struct nouveau_dmem_chunk *chunk; in nouveau_dmem_chunk_alloc() local
236 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in nouveau_dmem_chunk_alloc()
237 if (chunk == NULL) { in nouveau_dmem_chunk_alloc()
250 chunk->drm = drm; in nouveau_dmem_chunk_alloc()
251 chunk->pagemap.type = MEMORY_DEVICE_PRIVATE; in nouveau_dmem_chunk_alloc()
252 chunk->pagemap.range.start = res->start; in nouveau_dmem_chunk_alloc()
253 chunk->pagemap.range.end = res->end; in nouveau_dmem_chunk_alloc()
254 chunk->pagemap.nr_range = 1; in nouveau_dmem_chunk_alloc()
255 chunk->pagemap.ops = &nouveau_dmem_pagemap_ops; in nouveau_dmem_chunk_alloc()
256 chunk->pagemap.owner = drm->dev; in nouveau_dmem_chunk_alloc()
260 &chunk->bo); in nouveau_dmem_chunk_alloc()
264 ret = nouveau_bo_pin(chunk->bo, NOUVEAU_GEM_DOMAIN_VRAM, false); in nouveau_dmem_chunk_alloc()
268 ptr = memremap_pages(&chunk->pagemap, numa_node_id()); in nouveau_dmem_chunk_alloc()
275 list_add(&chunk->list, &drm->dmem->chunks); in nouveau_dmem_chunk_alloc()
278 pfn_first = chunk->pagemap.range.start >> PAGE_SHIFT; in nouveau_dmem_chunk_alloc()
286 chunk->callocated++; in nouveau_dmem_chunk_alloc()
295 nouveau_bo_unpin(chunk->bo); in nouveau_dmem_chunk_alloc()
297 nouveau_bo_ref(NULL, &chunk->bo); in nouveau_dmem_chunk_alloc()
299 release_mem_region(chunk->pagemap.range.start, range_len(&chunk->pagemap.range)); in nouveau_dmem_chunk_alloc()
301 kfree(chunk); in nouveau_dmem_chunk_alloc()
309 struct nouveau_dmem_chunk *chunk; in nouveau_dmem_page_alloc_locked() local
317 chunk = nouveau_page_to_chunk(page); in nouveau_dmem_page_alloc_locked()
318 chunk->callocated++; in nouveau_dmem_page_alloc_locked()
342 struct nouveau_dmem_chunk *chunk; in nouveau_dmem_resume() local
349 list_for_each_entry(chunk, &drm->dmem->chunks, list) { in nouveau_dmem_resume()
350 ret = nouveau_bo_pin(chunk->bo, NOUVEAU_GEM_DOMAIN_VRAM, false); in nouveau_dmem_resume()
360 struct nouveau_dmem_chunk *chunk; in nouveau_dmem_suspend() local
366 list_for_each_entry(chunk, &drm->dmem->chunks, list) in nouveau_dmem_suspend()
367 nouveau_bo_unpin(chunk->bo); in nouveau_dmem_suspend()
374 struct nouveau_dmem_chunk *chunk, *tmp; in nouveau_dmem_fini() local
381 list_for_each_entry_safe(chunk, tmp, &drm->dmem->chunks, list) { in nouveau_dmem_fini()
382 nouveau_bo_unpin(chunk->bo); in nouveau_dmem_fini()
383 nouveau_bo_ref(NULL, &chunk->bo); in nouveau_dmem_fini()
384 list_del(&chunk->list); in nouveau_dmem_fini()
385 memunmap_pages(&chunk->pagemap); in nouveau_dmem_fini()
386 release_mem_region(chunk->pagemap.range.start, in nouveau_dmem_fini()
387 range_len(&chunk->pagemap.range)); in nouveau_dmem_fini()
388 kfree(chunk); in nouveau_dmem_fini()