Lines Matching refs:xsks

155 struct xsk_socket_info *xsks[MAX_SOCKS];  variable
221 for (i = 0; i < num_socks && xsks[i]; i++) { in dump_app_stats()
226 rx_empty_polls_ps = (xsks[i]->app_stats.rx_empty_polls - in dump_app_stats()
227 xsks[i]->app_stats.prev_rx_empty_polls) * 1000000000. / dt; in dump_app_stats()
228 fill_fail_polls_ps = (xsks[i]->app_stats.fill_fail_polls - in dump_app_stats()
229 xsks[i]->app_stats.prev_fill_fail_polls) * 1000000000. / dt; in dump_app_stats()
230 copy_tx_sendtos_ps = (xsks[i]->app_stats.copy_tx_sendtos - in dump_app_stats()
231 xsks[i]->app_stats.prev_copy_tx_sendtos) * 1000000000. / dt; in dump_app_stats()
232 tx_wakeup_sendtos_ps = (xsks[i]->app_stats.tx_wakeup_sendtos - in dump_app_stats()
233 xsks[i]->app_stats.prev_tx_wakeup_sendtos) in dump_app_stats()
235 opt_polls_ps = (xsks[i]->app_stats.opt_polls - in dump_app_stats()
236 xsks[i]->app_stats.prev_opt_polls) * 1000000000. / dt; in dump_app_stats()
239 printf(fmt, "rx empty polls", rx_empty_polls_ps, xsks[i]->app_stats.rx_empty_polls); in dump_app_stats()
241 xsks[i]->app_stats.fill_fail_polls); in dump_app_stats()
243 xsks[i]->app_stats.copy_tx_sendtos); in dump_app_stats()
245 xsks[i]->app_stats.tx_wakeup_sendtos); in dump_app_stats()
246 printf(fmt, "opt polls", opt_polls_ps, xsks[i]->app_stats.opt_polls); in dump_app_stats()
248 xsks[i]->app_stats.prev_rx_empty_polls = xsks[i]->app_stats.rx_empty_polls; in dump_app_stats()
249 xsks[i]->app_stats.prev_fill_fail_polls = xsks[i]->app_stats.fill_fail_polls; in dump_app_stats()
250 xsks[i]->app_stats.prev_copy_tx_sendtos = xsks[i]->app_stats.copy_tx_sendtos; in dump_app_stats()
251 xsks[i]->app_stats.prev_tx_wakeup_sendtos = xsks[i]->app_stats.tx_wakeup_sendtos; in dump_app_stats()
252 xsks[i]->app_stats.prev_opt_polls = xsks[i]->app_stats.opt_polls; in dump_app_stats()
329 for (i = 0; i < num_socks && xsks[i]; i++) { in dump_driver_stats()
338 xsks[i]->drv_stats.intrs = n_ints - irqs_at_init; in dump_driver_stats()
340 intrs_ps = (xsks[i]->drv_stats.intrs - xsks[i]->drv_stats.prev_intrs) * in dump_driver_stats()
344 printf(fmt, "irqs", intrs_ps, xsks[i]->drv_stats.intrs); in dump_driver_stats()
346 xsks[i]->drv_stats.prev_intrs = xsks[i]->drv_stats.intrs; in dump_driver_stats()
358 for (i = 0; i < num_socks && xsks[i]; i++) { in dump_stats()
363 rx_pps = (xsks[i]->ring_stats.rx_npkts - xsks[i]->ring_stats.prev_rx_npkts) * in dump_stats()
365 tx_pps = (xsks[i]->ring_stats.tx_npkts - xsks[i]->ring_stats.prev_tx_npkts) * in dump_stats()
374 printf(fmt, "rx", rx_pps, xsks[i]->ring_stats.rx_npkts); in dump_stats()
375 printf(fmt, "tx", tx_pps, xsks[i]->ring_stats.tx_npkts); in dump_stats()
377 xsks[i]->ring_stats.prev_rx_npkts = xsks[i]->ring_stats.rx_npkts; in dump_stats()
378 xsks[i]->ring_stats.prev_tx_npkts = xsks[i]->ring_stats.tx_npkts; in dump_stats()
381 if (!xsk_get_xdp_stats(xsk_socket__fd(xsks[i]->xsk), xsks[i])) { in dump_stats()
382 dropped_pps = (xsks[i]->ring_stats.rx_dropped_npkts - in dump_stats()
383 xsks[i]->ring_stats.prev_rx_dropped_npkts) * in dump_stats()
385 rx_invalid_pps = (xsks[i]->ring_stats.rx_invalid_npkts - in dump_stats()
386 xsks[i]->ring_stats.prev_rx_invalid_npkts) * in dump_stats()
388 tx_invalid_pps = (xsks[i]->ring_stats.tx_invalid_npkts - in dump_stats()
389 xsks[i]->ring_stats.prev_tx_invalid_npkts) * in dump_stats()
391 full_pps = (xsks[i]->ring_stats.rx_full_npkts - in dump_stats()
392 xsks[i]->ring_stats.prev_rx_full_npkts) * in dump_stats()
394 fill_empty_pps = (xsks[i]->ring_stats.rx_fill_empty_npkts - in dump_stats()
395 xsks[i]->ring_stats.prev_rx_fill_empty_npkts) * in dump_stats()
397 tx_empty_pps = (xsks[i]->ring_stats.tx_empty_npkts - in dump_stats()
398 xsks[i]->ring_stats.prev_tx_empty_npkts) * in dump_stats()
402 xsks[i]->ring_stats.rx_dropped_npkts); in dump_stats()
404 xsks[i]->ring_stats.rx_invalid_npkts); in dump_stats()
406 xsks[i]->ring_stats.tx_invalid_npkts); in dump_stats()
408 xsks[i]->ring_stats.rx_full_npkts); in dump_stats()
410 xsks[i]->ring_stats.rx_fill_empty_npkts); in dump_stats()
412 xsks[i]->ring_stats.tx_empty_npkts); in dump_stats()
414 xsks[i]->ring_stats.prev_rx_dropped_npkts = in dump_stats()
415 xsks[i]->ring_stats.rx_dropped_npkts; in dump_stats()
416 xsks[i]->ring_stats.prev_rx_invalid_npkts = in dump_stats()
417 xsks[i]->ring_stats.rx_invalid_npkts; in dump_stats()
418 xsks[i]->ring_stats.prev_tx_invalid_npkts = in dump_stats()
419 xsks[i]->ring_stats.tx_invalid_npkts; in dump_stats()
420 xsks[i]->ring_stats.prev_rx_full_npkts = in dump_stats()
421 xsks[i]->ring_stats.rx_full_npkts; in dump_stats()
422 xsks[i]->ring_stats.prev_rx_fill_empty_npkts = in dump_stats()
423 xsks[i]->ring_stats.rx_fill_empty_npkts; in dump_stats()
424 xsks[i]->ring_stats.prev_tx_empty_npkts = in dump_stats()
425 xsks[i]->ring_stats.tx_empty_npkts; in dump_stats()
483 struct xsk_umem *umem = xsks[0]->umem->umem; in xdpsock_cleanup()
488 xsk_socket__delete(xsks[i]->xsk); in xdpsock_cleanup()
1224 fds[i].fd = xsk_socket__fd(xsks[i]->xsk); in rx_drop_all()
1231 xsks[i]->app_stats.opt_polls++; in rx_drop_all()
1238 rx_drop(xsks[i], fds); in rx_drop_all()
1290 if (xsks[i]->outstanding_tx) { in complete_tx_only_all()
1291 complete_tx_only(xsks[i], opt_batch_size); in complete_tx_only_all()
1292 pending = !!xsks[i]->outstanding_tx; in complete_tx_only_all()
1306 fds[0].fd = xsk_socket__fd(xsks[i]->xsk); in tx_only_all()
1315 xsks[i]->app_stats.opt_polls++; in tx_only_all()
1325 tx_only(xsks[i], &frame_nb[i], batch_size); in tx_only_all()
1394 fds[i].fd = xsk_socket__fd(xsks[i]->xsk); in l2fwd_all()
1401 xsks[i]->app_stats.opt_polls++; in l2fwd_all()
1408 l2fwd(xsks[i], fds); in l2fwd_all()
1454 int fd = xsk_socket__fd(xsks[i]->xsk); in enter_xsks_into_map()
1505 xsks[num_socks++] = xsk_configure_socket(umem, rx, tx); in main()