Home
last modified time | relevance | path

Searched refs:nents (Results 1 – 25 of 276) sorted by relevance

12345678910>>...12

/OK3568_Linux_fs/kernel/lib/
H A Dscatterlist.c47 int nents; in sg_nents() local
48 for (nents = 0; sg; sg = sg_next(sg)) in sg_nents()
49 nents++; in sg_nents()
50 return nents; in sg_nents()
70 int nents; in sg_nents_for_len() local
76 for (nents = 0, total = 0; sg; sg = sg_next(sg)) { in sg_nents_for_len()
77 nents++; in sg_nents_for_len()
80 return nents; in sg_nents_for_len()
101 struct scatterlist *sg_last(struct scatterlist *sgl, unsigned int nents) in sg_last() argument
106 for_each_sg(sgl, sg, nents, i) in sg_last()
[all …]
H A Dsg_pool.c40 static inline unsigned int sg_pool_index(unsigned short nents) in sg_pool_index() argument
44 BUG_ON(nents > SG_CHUNK_SIZE); in sg_pool_index()
46 if (nents <= 8) in sg_pool_index()
49 index = get_count_order(nents) - 3; in sg_pool_index()
54 static void sg_pool_free(struct scatterlist *sgl, unsigned int nents) in sg_pool_free() argument
58 sgp = sg_pools + sg_pool_index(nents); in sg_pool_free()
62 static struct scatterlist *sg_pool_alloc(unsigned int nents, gfp_t gfp_mask) in sg_pool_alloc() argument
66 sgp = sg_pools + sg_pool_index(nents); in sg_pool_alloc()
111 int sg_alloc_table_chained(struct sg_table *table, int nents, in sg_alloc_table_chained() argument
116 BUG_ON(!nents); in sg_alloc_table_chained()
[all …]
H A Dsg_split.c13 int nents; member
20 static int sg_calculate_split(struct scatterlist *in, int nents, int nb_splits, in sg_calculate_split() argument
32 splitters[i].nents = 0; in sg_calculate_split()
35 for_each_sg(in, sg, nents, i) { in sg_calculate_split()
48 curr->nents++; in sg_calculate_split()
59 curr->nents = 1; in sg_calculate_split()
86 for (j = 0; j < split->nents; j++, out_sg++) { in sg_split_phys()
112 for (j = 0; j < split->nents; j++, out_sg++) { in sg_split_mapped()
168 splitters[i].out_sg = kmalloc_array(splitters[i].nents, in sg_split()
190 out_mapped_nents[i] = splitters[i].nents; in sg_split()
H A Dkfifo.c296 int nents, unsigned int len) in setup_sgl_buf() argument
303 if (!nents) in setup_sgl_buf()
323 if (++n == nents || sgl == NULL) in setup_sgl_buf()
335 int nents, unsigned int len, unsigned int off) in setup_sgl() argument
350 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); in setup_sgl()
351 n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); in setup_sgl()
357 struct scatterlist *sgl, int nents, unsigned int len) in __kfifo_dma_in_prepare() argument
365 return setup_sgl(fifo, sgl, nents, len, fifo->in); in __kfifo_dma_in_prepare()
370 struct scatterlist *sgl, int nents, unsigned int len) in __kfifo_dma_out_prepare() argument
378 return setup_sgl(fifo, sgl, nents, len, fifo->out); in __kfifo_dma_out_prepare()
[all …]
/OK3568_Linux_fs/kernel/drivers/gpu/drm/virtio/
H A Dvirtgpu_gem.c156 struct virtio_gpu_object_array *virtio_gpu_array_alloc(u32 nents) in virtio_gpu_array_alloc() argument
160 objs = kmalloc(struct_size(objs, objs, nents), GFP_KERNEL); in virtio_gpu_array_alloc()
164 objs->nents = 0; in virtio_gpu_array_alloc()
165 objs->total = nents; in virtio_gpu_array_alloc()
175 virtio_gpu_array_from_handles(struct drm_file *drm_file, u32 *handles, u32 nents) in virtio_gpu_array_from_handles() argument
180 objs = virtio_gpu_array_alloc(nents); in virtio_gpu_array_from_handles()
184 for (i = 0; i < nents; i++) { in virtio_gpu_array_from_handles()
187 objs->nents = i; in virtio_gpu_array_from_handles()
192 objs->nents = i; in virtio_gpu_array_from_handles()
199 if (WARN_ON_ONCE(objs->nents == objs->total)) in virtio_gpu_array_add_obj()
[all …]
H A Dvirtgpu_object.c142 unsigned int *nents) in virtio_gpu_object_shmem_init() argument
170 *nents = shmem->mapped = shmem->pages->nents; in virtio_gpu_object_shmem_init()
172 *nents = shmem->pages->orig_nents; in virtio_gpu_object_shmem_init()
175 *ents = kvmalloc_array(*nents, in virtio_gpu_object_shmem_init()
209 unsigned int nents; in virtio_gpu_object_create() local
246 ret = virtio_gpu_object_shmem_init(vgdev, bo, &ents, &nents); in virtio_gpu_object_create()
252 virtio_gpu_object_attach(vgdev, bo, ents, nents); in virtio_gpu_object_create()
/OK3568_Linux_fs/kernel/include/linux/
H A Dscatterlist.h44 unsigned int nents; /* number of mapped entries */ member
166 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i)
268 unsigned int nents) in sg_init_marker() argument
270 sg_mark_end(&sgl[nents - 1]); in sg_init_marker()
309 void sgl_free_n_order(struct scatterlist *sgl, int nents, int order);
314 size_t sg_copy_buffer(struct scatterlist *sgl, unsigned int nents, void *buf,
317 size_t sg_copy_from_buffer(struct scatterlist *sgl, unsigned int nents,
319 size_t sg_copy_to_buffer(struct scatterlist *sgl, unsigned int nents,
322 size_t sg_pcopy_from_buffer(struct scatterlist *sgl, unsigned int nents,
324 size_t sg_pcopy_to_buffer(struct scatterlist *sgl, unsigned int nents,
[all …]
H A Dpci-p2pdma.h30 unsigned int *nents, u32 length);
34 int nents, enum dma_data_direction dir, unsigned long attrs);
36 int nents, enum dma_data_direction dir, unsigned long attrs);
75 unsigned int *nents, u32 length) in pci_p2pmem_alloc_sgl() argument
87 struct scatterlist *sg, int nents, enum dma_data_direction dir, in pci_p2pdma_map_sg_attrs() argument
93 struct scatterlist *sg, int nents, enum dma_data_direction dir, in pci_p2pdma_unmap_sg_attrs() argument
123 int nents, enum dma_data_direction dir) in pci_p2pdma_map_sg() argument
125 return pci_p2pdma_map_sg_attrs(dev, sg, nents, dir, 0); in pci_p2pdma_map_sg()
129 struct scatterlist *sg, int nents, enum dma_data_direction dir) in pci_p2pdma_unmap_sg() argument
131 pci_p2pdma_unmap_sg_attrs(dev, sg, nents, dir, 0); in pci_p2pdma_unmap_sg()
/OK3568_Linux_fs/kernel/samples/kfifo/
H A Ddma-example.c25 unsigned int nents; in example_init() local
63 nents = kfifo_dma_in_prepare(&fifo, sg, ARRAY_SIZE(sg), FIFO_SIZE); in example_init()
64 printk(KERN_INFO "DMA sgl entries: %d\n", nents); in example_init()
65 if (!nents) { in example_init()
73 for (i = 0; i < nents; i++) { in example_init()
93 nents = kfifo_dma_out_prepare(&fifo, sg, ARRAY_SIZE(sg), 8); in example_init()
94 printk(KERN_INFO "DMA sgl entries: %d\n", nents); in example_init()
95 if (!nents) { in example_init()
102 for (i = 0; i < nents; i++) { in example_init()
/OK3568_Linux_fs/kernel/arch/arm64/mm/
H A Ddma-mapping-noalias.c217 static void arm64_noalias_unmap_sg(struct device *dev, struct scatterlist *sgl, int nents, in arm64_noalias_unmap_sg() argument
225 for_each_sg(sgl, sg, nents, i) in arm64_noalias_unmap_sg()
230 static int arm64_noalias_map_sg(struct device *dev, struct scatterlist *sgl, int nents, in arm64_noalias_map_sg() argument
236 for_each_sg(sgl, sg, nents, i) { in arm64_noalias_map_sg()
244 return nents; in arm64_noalias_map_sg()
264 int nents, enum dma_data_direction dir) in arm64_noalias_sync_sg_for_device() argument
269 for_each_sg(sgl, sg, nents, i) in arm64_noalias_sync_sg_for_device()
274 int nents, enum dma_data_direction dir) in arm64_noalias_sync_sg_for_cpu() argument
279 for_each_sg(sgl, sg, nents, i) in arm64_noalias_sync_sg_for_cpu()
385 static int arm64_iommu_map_sg(struct device *dev, struct scatterlist *sgl, int nents, in arm64_iommu_map_sg() argument
[all …]
/OK3568_Linux_fs/kernel/arch/arm/mm/
H A Ddma-mapping-nommu.c119 int nents, enum dma_data_direction dir, in arm_nommu_dma_map_sg() argument
125 for_each_sg(sgl, sg, nents, i) { in arm_nommu_dma_map_sg()
131 return nents; in arm_nommu_dma_map_sg()
135 int nents, enum dma_data_direction dir, in arm_nommu_dma_unmap_sg() argument
141 for_each_sg(sgl, sg, nents, i) in arm_nommu_dma_unmap_sg()
158 int nents, enum dma_data_direction dir) in arm_nommu_dma_sync_sg_for_device() argument
163 for_each_sg(sgl, sg, nents, i) in arm_nommu_dma_sync_sg_for_device()
168 int nents, enum dma_data_direction dir) in arm_nommu_dma_sync_sg_for_cpu() argument
173 for_each_sg(sgl, sg, nents, i) in arm_nommu_dma_sync_sg_for_cpu()
/OK3568_Linux_fs/kernel/drivers/crypto/ccree/
H A Dcc_buffer_mgr.c25 int nents[MAX_NUM_OF_BUFFERS_IN_MLLI]; member
77 unsigned int nents = 0; in cc_get_sgl_nents() local
82 nents++; in cc_get_sgl_nents()
90 dev_dbg(dev, "nents %d last bytes %d\n", nents, *lbytes); in cc_get_sgl_nents()
91 return nents; in cc_get_sgl_nents()
109 u32 nents; in cc_copy_sg_portion() local
111 nents = sg_nents_for_len(sg, end); in cc_copy_sg_portion()
112 sg_copy_buffer(sg, nents, dest, (end - to_skip + 1), to_skip, in cc_copy_sg_portion()
236 unsigned int nents, struct scatterlist *sgl, in cc_add_sg_entry() argument
243 index, nents, sgl, data_len, is_last_table); in cc_add_sg_entry()
[all …]
/OK3568_Linux_fs/kernel/drivers/spi/
H A Dspi-dw-dma.c240 u32 nents; in dw_spi_dma_wait_tx_done() local
242 nents = dw_readl(dws, DW_SPI_TXFLR); in dw_spi_dma_wait_tx_done()
244 delay.value = nents * dws->n_bytes * BITS_PER_BYTE; in dw_spi_dma_wait_tx_done()
288 unsigned int nents) in dw_spi_dma_submit_tx() argument
294 txdesc = dmaengine_prep_slave_sg(dws->txchan, sgl, nents, in dw_spi_dma_submit_tx()
325 u32 nents; in dw_spi_dma_wait_rx_done() local
336 nents = dw_readl(dws, DW_SPI_RXFLR); in dw_spi_dma_wait_rx_done()
337 ns = 4U * NSEC_PER_SEC / dws->max_freq * nents; in dw_spi_dma_wait_rx_done()
389 unsigned int nents) in dw_spi_dma_submit_rx() argument
395 rxdesc = dmaengine_prep_slave_sg(dws->rxchan, sgl, nents, in dw_spi_dma_submit_rx()
[all …]
H A Dspi-ep93xx.c285 int i, ret, nents; in ep93xx_spi_dma_prepare() local
325 nents = DIV_ROUND_UP(len, PAGE_SIZE); in ep93xx_spi_dma_prepare()
326 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare()
329 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare()
335 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare()
355 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
356 if (!nents) in ep93xx_spi_dma_prepare()
359 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, conf.direction, in ep93xx_spi_dma_prepare()
362 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
391 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_finish()
/OK3568_Linux_fs/kernel/drivers/hwtracing/intel_th/
H A Dmsu-sink.c54 unsigned int nents; in msu_sink_alloc_window() local
62 nents = DIV_ROUND_UP(size, PAGE_SIZE); in msu_sink_alloc_window()
64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window()
70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window()
80 return nents; in msu_sink_alloc_window()
90 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window()
/OK3568_Linux_fs/kernel/kernel/dma/
H A Ddirect.h20 int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, int nents,
27 int nents, enum dma_data_direction dir);
30 struct scatterlist *sgl, int nents, enum dma_data_direction dir) in dma_direct_sync_sg_for_device() argument
39 int nents, enum dma_data_direction dir, unsigned long attrs);
41 struct scatterlist *sgl, int nents, enum dma_data_direction dir);
44 struct scatterlist *sgl, int nents, enum dma_data_direction dir, in dma_direct_unmap_sg() argument
49 struct scatterlist *sgl, int nents, enum dma_data_direction dir) in dma_direct_sync_sg_for_cpu() argument
/OK3568_Linux_fs/kernel/drivers/parisc/
H A Diommu-helpers.h15 iommu_fill_pdir(struct ioc *ioc, struct scatterlist *startsg, int nents, in iommu_fill_pdir() argument
30 while (nents-- > 0) { in iommu_fill_pdir()
34 DBG_RUN_SG(" %d : %08lx/%05x %p/%05x\n", nents, in iommu_fill_pdir()
102 struct scatterlist *startsg, int nents, in iommu_coalesce_chunks() argument
114 while (nents > 0) { in iommu_coalesce_chunks()
131 while(--nents > 0) { in iommu_coalesce_chunks()
/OK3568_Linux_fs/kernel/drivers/target/iscsi/cxgbit/
H A Dcxgbit_ddp.c153 unsigned int nents) in cxgbit_ddp_sgl_check() argument
155 unsigned int last_sgidx = nents - 1; in cxgbit_ddp_sgl_check()
158 for (i = 0; i < nents; i++, sg = sg_next(sg)) { in cxgbit_ddp_sgl_check()
177 unsigned int sgcnt = ttinfo->nents; in cxgbit_ddp_reserve()
184 xferlen, ttinfo->nents); in cxgbit_ddp_reserve()
246 ttinfo->nents = cmd->se_cmd.t_data_nents; in cxgbit_get_r2t_ttt()
251 csk, cmd, cmd->se_cmd.data_length, ttinfo->nents); in cxgbit_get_r2t_ttt()
254 ttinfo->nents = 0; in cxgbit_get_r2t_ttt()
286 ttinfo->nents, DMA_FROM_DEVICE); in cxgbit_unmap_cmd()
287 ttinfo->nents = 0; in cxgbit_unmap_cmd()
/OK3568_Linux_fs/kernel/drivers/crypto/mediatek/
H A Dmtk-aes.c193 int nents; in mtk_aes_check_aligned() local
198 for (nents = 0; sg; sg = sg_next(sg), ++nents) { in mtk_aes_check_aligned()
206 dma->nents = nents + 1; in mtk_aes_check_aligned()
231 int nents = dma->nents; in mtk_aes_restore_sg() local
236 while (--nents > 0 && sg) in mtk_aes_restore_sg()
266 int nents; in mtk_aes_xmit() local
269 for (nents = 0; nents < slen; ++nents, ssg = sg_next(ssg)) { in mtk_aes_xmit()
274 if (nents == 0) { in mtk_aes_xmit()
289 for (nents = 0; nents < dlen; ++nents, dsg = sg_next(dsg)) { in mtk_aes_xmit()
294 if (nents == 0) in mtk_aes_xmit()
[all …]
/OK3568_Linux_fs/kernel/drivers/crypto/cavium/nitrox/
H A Dnitrox_req.h551 static inline void *alloc_req_buf(int nents, int extralen, gfp_t gfp) in alloc_req_buf() argument
555 size = sizeof(struct scatterlist) * nents; in alloc_req_buf()
617 int nents, int ivsize) in alloc_src_req_buf() argument
621 nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); in alloc_src_req_buf()
639 int nents, int ivsize, in nitrox_creq_set_src_sg() argument
648 sg_init_table(sg, nents); in nitrox_creq_set_src_sg()
663 int nents) in alloc_dst_req_buf() argument
668 nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); in alloc_dst_req_buf()
697 int nents, int ivsize, in nitrox_creq_set_dst_sg() argument
706 sg_init_table(sg, nents); in nitrox_creq_set_dst_sg()
H A Dnitrox_reqmgr.c161 int i, nents, ret = 0; in dma_map_inbufs() local
163 nents = dma_map_sg(dev, req->src, sg_nents(req->src), in dma_map_inbufs()
165 if (!nents) in dma_map_inbufs()
168 for_each_sg(req->src, sg, nents, i) in dma_map_inbufs()
172 sr->in.sgmap_cnt = nents; in dma_map_inbufs()
180 dma_unmap_sg(dev, req->src, nents, DMA_BIDIRECTIONAL); in dma_map_inbufs()
189 int nents, ret = 0; in dma_map_outbufs() local
191 nents = dma_map_sg(dev, req->dst, sg_nents(req->dst), in dma_map_outbufs()
193 if (!nents) in dma_map_outbufs()
197 sr->out.sgmap_cnt = nents; in dma_map_outbufs()
[all …]
H A Dnitrox_aead.c98 int nents = sg_nents_for_len(src, buflen); in alloc_src_sglist() local
101 if (nents < 0) in alloc_src_sglist()
102 return nents; in alloc_src_sglist()
105 nents += 1; in alloc_src_sglist()
107 ret = alloc_src_req_buf(nkreq, nents, ivsize); in alloc_src_sglist()
112 nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen); in alloc_src_sglist()
120 int nents = sg_nents_for_len(dst, buflen); in alloc_dst_sglist() local
123 if (nents < 0) in alloc_dst_sglist()
124 return nents; in alloc_dst_sglist()
127 nents += 3; in alloc_dst_sglist()
[all …]
/OK3568_Linux_fs/kernel/drivers/mmc/core/
H A Dsdio_ops.c122 unsigned int nents, left_size, i; in mmc_io_rw_extended() local
152 nents = DIV_ROUND_UP(left_size, seg_size); in mmc_io_rw_extended()
153 if (nents > 1) { in mmc_io_rw_extended()
154 if (sg_alloc_table(&sgtable, nents, GFP_KERNEL)) in mmc_io_rw_extended()
158 data.sg_len = nents; in mmc_io_rw_extended()
196 if (nents > 1) in mmc_io_rw_extended()
/OK3568_Linux_fs/kernel/arch/ia64/hp/common/
H A Dsba_iommu.c387 sba_dump_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) in sba_dump_sg() argument
389 while (nents-- > 0) { in sba_dump_sg()
390 printk(KERN_DEBUG " %d : DMA %08lx/%05x CPU %p\n", nents, in sba_dump_sg()
398 sba_check_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) in sba_check_sg() argument
401 int the_nents = nents; in sba_check_sg()
405 sba_dump_sg(NULL, startsg, nents); in sba_check_sg()
1200 int nents) in sba_fill_pdir() argument
1207 while (nents-- > 0) { in sba_fill_pdir()
1214 nents, startsg->dma_address, cnt, in sba_fill_pdir()
1218 nents, startsg->dma_address, cnt, in sba_fill_pdir()
[all …]
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c584 for (i = 0; i < reg->cpu_alloc->imported.alias.nents; i++) in kbase_mem_query()
746 0, alloc->nents); in kbase_mem_evictable_reclaim_scan_objects()
757 alloc->evicted = alloc->nents; in kbase_mem_evictable_reclaim_scan_objects()
817 kbase_process_page_usage_dec(kctx, alloc->nents); in kbase_mem_evictable_mark_reclaim()
818 new_page_count = atomic_sub_return(alloc->nents, in kbase_mem_evictable_mark_reclaim()
820 atomic_sub(alloc->nents, &kctx->kbdev->memdev.used_pages); in kbase_mem_evictable_mark_reclaim()
826 kbase_trace_gpu_mem_usage_dec(kbdev, kctx, alloc->nents); in kbase_mem_evictable_mark_reclaim()
840 new_page_count = atomic_add_return(alloc->nents, in kbase_mem_evictable_unmark_reclaim()
842 atomic_add(alloc->nents, &kctx->kbdev->memdev.used_pages); in kbase_mem_evictable_unmark_reclaim()
847 kbase_process_page_usage_inc(kctx, alloc->nents); in kbase_mem_evictable_unmark_reclaim()
[all …]

12345678910>>...12