Lines Matching refs:hwdesc
178 struct udma_hwdesc hwdesc[]; member
391 return d->hwdesc[idx].cppi5_desc_paddr; in udma_curr_cppi5_desc_paddr()
396 return d->hwdesc[idx].cppi5_desc_vaddr; in udma_curr_cppi5_desc_vaddr()
432 if (!d->hwdesc[i].cppi5_desc_vaddr) in udma_free_hwdesc()
436 d->hwdesc[i].cppi5_desc_vaddr, in udma_free_hwdesc()
437 d->hwdesc[i].cppi5_desc_paddr); in udma_free_hwdesc()
439 d->hwdesc[i].cppi5_desc_vaddr = NULL; in udma_free_hwdesc()
441 } else if (d->hwdesc[0].cppi5_desc_vaddr) { in udma_free_hwdesc()
444 dma_free_coherent(ud->dev, d->hwdesc[0].cppi5_desc_size, in udma_free_hwdesc()
445 d->hwdesc[0].cppi5_desc_vaddr, in udma_free_hwdesc()
446 d->hwdesc[0].cppi5_desc_paddr); in udma_free_hwdesc()
448 d->hwdesc[0].cppi5_desc_vaddr = NULL; in udma_free_hwdesc()
922 h_desc = d->hwdesc[d->desc_idx].cppi5_desc_vaddr; in udma_cyclic_packet_elapsed()
930 struct cppi5_host_desc_t *h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_fetch_epib()
1901 struct udma_hwdesc *hwdesc; in udma_alloc_tr_desc() local
1919 d = kzalloc(sizeof(*d) + sizeof(d->hwdesc[0]), GFP_NOWAIT); in udma_alloc_tr_desc()
1926 hwdesc = &d->hwdesc[0]; in udma_alloc_tr_desc()
1930 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_alloc_tr_desc()
1931 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_alloc_tr_desc()
1933 &hwdesc->cppi5_desc_paddr); in udma_alloc_tr_desc()
1935 hwdesc->cppi5_desc_size = cppi5_trdesc_calc_size(tr_size, in udma_alloc_tr_desc()
1937 hwdesc->cppi5_desc_size = ALIGN(hwdesc->cppi5_desc_size, in udma_alloc_tr_desc()
1939 hwdesc->cppi5_desc_vaddr = dma_alloc_coherent(uc->ud->dev, in udma_alloc_tr_desc()
1940 hwdesc->cppi5_desc_size, in udma_alloc_tr_desc()
1941 &hwdesc->cppi5_desc_paddr, in udma_alloc_tr_desc()
1945 if (!hwdesc->cppi5_desc_vaddr) { in udma_alloc_tr_desc()
1951 hwdesc->tr_req_base = hwdesc->cppi5_desc_vaddr + tr_size; in udma_alloc_tr_desc()
1953 hwdesc->tr_resp_base = hwdesc->tr_req_base + tr_size * tr_count; in udma_alloc_tr_desc()
1955 tr_desc = hwdesc->cppi5_desc_vaddr; in udma_alloc_tr_desc()
2048 tr_req = d->hwdesc[0].tr_req_base; in udma_prep_slave_sg_tr()
2160 d = kzalloc(struct_size(d, hwdesc, sglen), GFP_NOWAIT); in udma_prep_slave_sg_pkt()
2173 struct udma_hwdesc *hwdesc = &d->hwdesc[i]; in udma_prep_slave_sg_pkt() local
2178 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_slave_sg_pkt()
2180 &hwdesc->cppi5_desc_paddr); in udma_prep_slave_sg_pkt()
2181 if (!hwdesc->cppi5_desc_vaddr) { in udma_prep_slave_sg_pkt()
2191 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_slave_sg_pkt()
2192 desc = hwdesc->cppi5_desc_vaddr; in udma_prep_slave_sg_pkt()
2211 hwdesc->cppi5_desc_paddr); in udma_prep_slave_sg_pkt()
2226 h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_prep_slave_sg_pkt()
2250 h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_attach_metadata()
2278 h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_get_metadata_ptr()
2307 h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_set_metadata_len()
2417 tr_req = d->hwdesc[0].tr_req_base; in udma_prep_dma_cyclic_tr()
2471 d = kzalloc(struct_size(d, hwdesc, periods), GFP_NOWAIT); in udma_prep_dma_cyclic_pkt()
2484 struct udma_hwdesc *hwdesc = &d->hwdesc[i]; in udma_prep_dma_cyclic_pkt() local
2488 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_dma_cyclic_pkt()
2490 &hwdesc->cppi5_desc_paddr); in udma_prep_dma_cyclic_pkt()
2491 if (!hwdesc->cppi5_desc_vaddr) { in udma_prep_dma_cyclic_pkt()
2500 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_dma_cyclic_pkt()
2501 h_desc = hwdesc->cppi5_desc_vaddr; in udma_prep_dma_cyclic_pkt()
2624 tr_req = d->hwdesc[0].tr_req_base; in udma_prep_dma_memcpy()
3349 struct udma_hwdesc *hwdesc; in udma_setup_rx_flush() local
3366 hwdesc = &rx_flush->hwdescs[0]; in udma_setup_rx_flush()
3368 hwdesc->cppi5_desc_size = cppi5_trdesc_calc_size(tr_size, 1); in udma_setup_rx_flush()
3369 hwdesc->cppi5_desc_size = ALIGN(hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
3372 hwdesc->cppi5_desc_vaddr = devm_kzalloc(dev, hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
3374 if (!hwdesc->cppi5_desc_vaddr) in udma_setup_rx_flush()
3377 hwdesc->cppi5_desc_paddr = dma_map_single(dev, hwdesc->cppi5_desc_vaddr, in udma_setup_rx_flush()
3378 hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
3380 if (dma_mapping_error(dev, hwdesc->cppi5_desc_paddr)) in udma_setup_rx_flush()
3384 hwdesc->tr_req_base = hwdesc->cppi5_desc_vaddr + tr_size; in udma_setup_rx_flush()
3386 hwdesc->tr_resp_base = hwdesc->tr_req_base + tr_size; in udma_setup_rx_flush()
3388 tr_desc = hwdesc->cppi5_desc_vaddr; in udma_setup_rx_flush()
3393 tr_req = hwdesc->tr_req_base; in udma_setup_rx_flush()
3402 dma_sync_single_for_device(dev, hwdesc->cppi5_desc_paddr, in udma_setup_rx_flush()
3403 hwdesc->cppi5_desc_size, DMA_TO_DEVICE); in udma_setup_rx_flush()
3406 hwdesc = &rx_flush->hwdescs[1]; in udma_setup_rx_flush()
3407 hwdesc->cppi5_desc_size = ALIGN(sizeof(struct cppi5_host_desc_t) + in udma_setup_rx_flush()
3412 hwdesc->cppi5_desc_vaddr = devm_kzalloc(dev, hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
3414 if (!hwdesc->cppi5_desc_vaddr) in udma_setup_rx_flush()
3417 hwdesc->cppi5_desc_paddr = dma_map_single(dev, hwdesc->cppi5_desc_vaddr, in udma_setup_rx_flush()
3418 hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
3420 if (dma_mapping_error(dev, hwdesc->cppi5_desc_paddr)) in udma_setup_rx_flush()
3423 desc = hwdesc->cppi5_desc_vaddr; in udma_setup_rx_flush()
3432 dma_sync_single_for_device(dev, hwdesc->cppi5_desc_paddr, in udma_setup_rx_flush()
3433 hwdesc->cppi5_desc_size, DMA_TO_DEVICE); in udma_setup_rx_flush()