Lines Matching refs:umem

65 	struct xsk_umem *umem;  member
104 int xsk_umem__fd(const struct xsk_umem *umem) in xsk_umem__fd() argument
106 return umem ? umem->fd : -EINVAL; in xsk_umem__fd()
216 static int xsk_create_umem_rings(struct xsk_umem *umem, int fd, in xsk_create_umem_rings() argument
225 &umem->config.fill_size, in xsk_create_umem_rings()
226 sizeof(umem->config.fill_size)); in xsk_create_umem_rings()
231 &umem->config.comp_size, in xsk_create_umem_rings()
232 sizeof(umem->config.comp_size)); in xsk_create_umem_rings()
240 map = mmap(NULL, off.fr.desc + umem->config.fill_size * sizeof(__u64), in xsk_create_umem_rings()
246 fill->mask = umem->config.fill_size - 1; in xsk_create_umem_rings()
247 fill->size = umem->config.fill_size; in xsk_create_umem_rings()
252 fill->cached_cons = umem->config.fill_size; in xsk_create_umem_rings()
254 map = mmap(NULL, off.cr.desc + umem->config.comp_size * sizeof(__u64), in xsk_create_umem_rings()
262 comp->mask = umem->config.comp_size - 1; in xsk_create_umem_rings()
263 comp->size = umem->config.comp_size; in xsk_create_umem_rings()
272 munmap(map, off.fr.desc + umem->config.fill_size * sizeof(__u64)); in xsk_create_umem_rings()
282 struct xsk_umem *umem; in xsk_umem__create_v0_0_4() local
290 umem = calloc(1, sizeof(*umem)); in xsk_umem__create_v0_0_4()
291 if (!umem) in xsk_umem__create_v0_0_4()
294 umem->fd = socket(AF_XDP, SOCK_RAW, 0); in xsk_umem__create_v0_0_4()
295 if (umem->fd < 0) { in xsk_umem__create_v0_0_4()
300 umem->umem_area = umem_area; in xsk_umem__create_v0_0_4()
301 INIT_LIST_HEAD(&umem->ctx_list); in xsk_umem__create_v0_0_4()
302 xsk_set_umem_config(&umem->config, usr_config); in xsk_umem__create_v0_0_4()
307 mr.chunk_size = umem->config.frame_size; in xsk_umem__create_v0_0_4()
308 mr.headroom = umem->config.frame_headroom; in xsk_umem__create_v0_0_4()
309 mr.flags = umem->config.flags; in xsk_umem__create_v0_0_4()
311 err = setsockopt(umem->fd, SOL_XDP, XDP_UMEM_REG, &mr, sizeof(mr)); in xsk_umem__create_v0_0_4()
317 err = xsk_create_umem_rings(umem, umem->fd, fill, comp); in xsk_umem__create_v0_0_4()
321 umem->fill_save = fill; in xsk_umem__create_v0_0_4()
322 umem->comp_save = comp; in xsk_umem__create_v0_0_4()
323 *umem_ptr = umem; in xsk_umem__create_v0_0_4()
327 close(umem->fd); in xsk_umem__create_v0_0_4()
329 free(umem); in xsk_umem__create_v0_0_4()
615 static struct xsk_ctx *xsk_get_ctx(struct xsk_umem *umem, int ifindex, in xsk_get_ctx() argument
620 if (list_empty(&umem->ctx_list)) in xsk_get_ctx()
623 list_for_each_entry(ctx, &umem->ctx_list, list) { in xsk_get_ctx()
635 struct xsk_umem *umem = ctx->umem; in xsk_put_ctx() local
645 err = xsk_get_mmap_offsets(umem->fd, &off); in xsk_put_ctx()
649 munmap(ctx->fill->ring - off.fr.desc, off.fr.desc + umem->config.fill_size * in xsk_put_ctx()
651 munmap(ctx->comp->ring - off.cr.desc, off.cr.desc + umem->config.comp_size * in xsk_put_ctx()
660 struct xsk_umem *umem, int ifindex, in xsk_create_ctx() argument
672 if (!umem->fill_save) { in xsk_create_ctx()
673 err = xsk_create_umem_rings(umem, xsk->fd, fill, comp); in xsk_create_ctx()
678 } else if (umem->fill_save != fill || umem->comp_save != comp) { in xsk_create_ctx()
680 memcpy(fill, umem->fill_save, sizeof(*fill)); in xsk_create_ctx()
681 memcpy(comp, umem->comp_save, sizeof(*comp)); in xsk_create_ctx()
686 ctx->umem = umem; in xsk_create_ctx()
693 list_add(&ctx->list, &umem->ctx_list); in xsk_create_ctx()
699 __u32 queue_id, struct xsk_umem *umem, in xsk_socket__create_shared() argument
714 if (!umem || !xsk_ptr || !(rx || tx)) in xsk_socket__create_shared()
717 unmap = umem->fill_save != fill; in xsk_socket__create_shared()
734 if (umem->refcount++ > 0) { in xsk_socket__create_shared()
741 xsk->fd = umem->fd; in xsk_socket__create_shared()
742 rx_setup_done = umem->rx_ring_setup_done; in xsk_socket__create_shared()
743 tx_setup_done = umem->tx_ring_setup_done; in xsk_socket__create_shared()
746 ctx = xsk_get_ctx(umem, ifindex, queue_id); in xsk_socket__create_shared()
753 ctx = xsk_create_ctx(xsk, umem, ifindex, ifname, queue_id, in xsk_socket__create_shared()
770 if (xsk->fd == umem->fd) in xsk_socket__create_shared()
771 umem->rx_ring_setup_done = true; in xsk_socket__create_shared()
781 if (xsk->fd == umem->fd) in xsk_socket__create_shared()
782 umem->tx_ring_setup_done = true; in xsk_socket__create_shared()
839 if (umem->refcount > 1) { in xsk_socket__create_shared()
841 sxdp.sxdp_shared_umem_fd = umem->fd; in xsk_socket__create_shared()
859 umem->fill_save = NULL; in xsk_socket__create_shared()
860 umem->comp_save = NULL; in xsk_socket__create_shared()
874 if (--umem->refcount) in xsk_socket__create_shared()
882 __u32 queue_id, struct xsk_umem *umem, in xsk_socket__create() argument
886 if (!umem) in xsk_socket__create()
889 return xsk_socket__create_shared(xsk_ptr, ifname, queue_id, umem, in xsk_socket__create()
890 rx, tx, umem->fill_save, in xsk_socket__create()
891 umem->comp_save, usr_config); in xsk_socket__create()
894 int xsk_umem__delete(struct xsk_umem *umem) in xsk_umem__delete() argument
899 if (!umem) in xsk_umem__delete()
902 if (umem->refcount) in xsk_umem__delete()
905 err = xsk_get_mmap_offsets(umem->fd, &off); in xsk_umem__delete()
906 if (!err && umem->fill_save && umem->comp_save) { in xsk_umem__delete()
907 munmap(umem->fill_save->ring - off.fr.desc, in xsk_umem__delete()
908 off.fr.desc + umem->config.fill_size * sizeof(__u64)); in xsk_umem__delete()
909 munmap(umem->comp_save->ring - off.cr.desc, in xsk_umem__delete()
910 off.cr.desc + umem->config.comp_size * sizeof(__u64)); in xsk_umem__delete()
913 close(umem->fd); in xsk_umem__delete()
914 free(umem); in xsk_umem__delete()
923 struct xsk_umem *umem; in xsk_socket__delete() local
931 umem = ctx->umem; in xsk_socket__delete()
952 umem->refcount--; in xsk_socket__delete()
956 if (xsk->fd != umem->fd) in xsk_socket__delete()