Lines Matching refs:chunk
53 static void i40iw_free_vmalloc_mem(struct i40iw_hw *hw, struct i40iw_chunk *chunk);
63 struct i40iw_chunk *chunk; in i40iw_destroy_pble_pool() local
68 chunk = list_entry(clist, struct i40iw_chunk, list); in i40iw_destroy_pble_pool()
69 if (chunk->type == I40IW_VMALLOC) in i40iw_destroy_pble_pool()
70 i40iw_free_vmalloc_mem(dev->hw, chunk); in i40iw_destroy_pble_pool()
71 kfree(chunk); in i40iw_destroy_pble_pool()
138 struct i40iw_chunk *chunk = info->chunk; in add_sd_direct() local
151 chunk->type = I40IW_DMA_COHERENT; in add_sd_direct()
155 chunk->size = info->pages << I40IW_HMC_PAGED_BP_SHIFT; in add_sd_direct()
156 chunk->vaddr = ((u8 *)sd_entry->u.bp.addr.va + offset); in add_sd_direct()
157 chunk->fpm_addr = pble_rsrc->next_fpm_addr; in add_sd_direct()
159 chunk->size, chunk->size, chunk->vaddr, chunk->fpm_addr); in add_sd_direct()
168 static void i40iw_free_vmalloc_mem(struct i40iw_hw *hw, struct i40iw_chunk *chunk) in i40iw_free_vmalloc_mem() argument
173 if (!chunk->pg_cnt) in i40iw_free_vmalloc_mem()
175 for (i = 0; i < chunk->pg_cnt; i++) in i40iw_free_vmalloc_mem()
176 dma_unmap_page(&pcidev->dev, chunk->dmaaddrs[i], PAGE_SIZE, DMA_BIDIRECTIONAL); in i40iw_free_vmalloc_mem()
179 kfree(chunk->dmaaddrs); in i40iw_free_vmalloc_mem()
180 chunk->dmaaddrs = NULL; in i40iw_free_vmalloc_mem()
181 vfree(chunk->vaddr); in i40iw_free_vmalloc_mem()
182 chunk->vaddr = NULL; in i40iw_free_vmalloc_mem()
183 chunk->type = 0; in i40iw_free_vmalloc_mem()
193 struct i40iw_chunk *chunk, in i40iw_get_vmalloc_mem() argument
202 chunk->dmaaddrs = kzalloc(pg_cnt << 3, GFP_KERNEL); in i40iw_get_vmalloc_mem()
203 if (!chunk->dmaaddrs) in i40iw_get_vmalloc_mem()
206 chunk->vaddr = vmalloc(size); in i40iw_get_vmalloc_mem()
207 if (!chunk->vaddr) { in i40iw_get_vmalloc_mem()
208 kfree(chunk->dmaaddrs); in i40iw_get_vmalloc_mem()
209 chunk->dmaaddrs = NULL; in i40iw_get_vmalloc_mem()
212 chunk->size = size; in i40iw_get_vmalloc_mem()
213 addr = (u8 *)chunk->vaddr; in i40iw_get_vmalloc_mem()
218 chunk->dmaaddrs[i] = dma_map_page(&pcidev->dev, page, 0, in i40iw_get_vmalloc_mem()
220 if (dma_mapping_error(&pcidev->dev, chunk->dmaaddrs[i])) in i40iw_get_vmalloc_mem()
225 chunk->pg_cnt = i; in i40iw_get_vmalloc_mem()
226 chunk->type = I40IW_VMALLOC; in i40iw_get_vmalloc_mem()
230 i40iw_free_vmalloc_mem(hw, chunk); in i40iw_get_vmalloc_mem()
259 struct i40iw_chunk *chunk = info->chunk; in add_bp_pages() local
266 status = i40iw_get_vmalloc_mem(dev->hw, chunk, info->pages); in add_bp_pages()
284 addr = chunk->vaddr; in add_bp_pages()
286 mem.pa = chunk->dmaaddrs[i]; in add_bp_pages()
312 chunk->fpm_addr = pble_rsrc->next_fpm_addr; in add_bp_pages()
315 i40iw_free_vmalloc_mem(dev->hw, chunk); in add_bp_pages()
329 struct i40iw_chunk *chunk; in add_pble_pool() local
343 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in add_pble_pool()
344 if (!chunk) in add_pble_pool()
347 chunk->fpm_addr = pble_rsrc->next_fpm_addr; in add_pble_pool()
353 info.chunk = chunk; in add_pble_pool()
385 if (gen_pool_add_virt(pble_rsrc->pinfo.pool, (unsigned long)chunk->vaddr, in add_pble_pool()
386 (phys_addr_t)chunk->fpm_addr, chunk->size, -1)) { in add_pble_pool()
391 pble_rsrc->next_fpm_addr += chunk->size; in add_pble_pool()
393 pble_rsrc->next_fpm_addr, chunk->size, chunk->size); in add_pble_pool()
394 pble_rsrc->unallocated_pble -= (chunk->size >> 3); in add_pble_pool()
408 list_add(&chunk->list, &pble_rsrc->pinfo.clist); in add_pble_pool()
411 kfree(chunk); in add_pble_pool()