Lines Matching refs:bat_v
93 queue_delayed_work(batadv_event_workqueue, &hard_iface->bat_v.aggr_wq, in batadv_v_ogm_start_queue_timer()
107 if (delayed_work_pending(&bat_priv->bat_v.ogm_wq)) in batadv_v_ogm_start_timer()
112 queue_delayed_work(batadv_event_workqueue, &bat_priv->bat_v.ogm_wq, in batadv_v_ogm_start_timer()
167 lockdep_assert_held(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_queue_left()
169 return hard_iface->bat_v.aggr_len + ogm_len <= max; in batadv_v_ogm_queue_left()
182 lockdep_assert_held(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_aggr_list_free()
184 __skb_queue_purge(&hard_iface->bat_v.aggr_list); in batadv_v_ogm_aggr_list_free()
185 hard_iface->bat_v.aggr_len = 0; in batadv_v_ogm_aggr_list_free()
201 unsigned int aggr_len = hard_iface->bat_v.aggr_len; in batadv_v_ogm_aggr_send()
206 lockdep_assert_held(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_aggr_send()
220 while ((skb = __skb_dequeue(&hard_iface->bat_v.aggr_list))) { in batadv_v_ogm_aggr_send()
221 hard_iface->bat_v.aggr_len -= batadv_v_ogm_len(skb); in batadv_v_ogm_aggr_send()
247 spin_lock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_queue_on_if()
251 hard_iface->bat_v.aggr_len += batadv_v_ogm_len(skb); in batadv_v_ogm_queue_on_if()
252 __skb_queue_tail(&hard_iface->bat_v.aggr_list, skb); in batadv_v_ogm_queue_on_if()
253 spin_unlock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_queue_on_if()
270 lockdep_assert_held(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_send_softif()
275 ogm_buff = bat_priv->bat_v.ogm_buff; in batadv_v_ogm_send_softif()
276 ogm_buff_len = bat_priv->bat_v.ogm_buff_len; in batadv_v_ogm_send_softif()
285 bat_priv->bat_v.ogm_buff = ogm_buff; in batadv_v_ogm_send_softif()
286 bat_priv->bat_v.ogm_buff_len = ogm_buff_len; in batadv_v_ogm_send_softif()
296 ogm_packet->seqno = htonl(atomic_read(&bat_priv->bat_v.ogm_seqno)); in batadv_v_ogm_send_softif()
297 atomic_inc(&bat_priv->bat_v.ogm_seqno); in batadv_v_ogm_send_softif()
367 struct batadv_priv_bat_v *bat_v; in batadv_v_ogm_send() local
370 bat_v = container_of(work, struct batadv_priv_bat_v, ogm_wq.work); in batadv_v_ogm_send()
371 bat_priv = container_of(bat_v, struct batadv_priv, bat_v); in batadv_v_ogm_send()
373 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_send()
375 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_send()
390 hard_iface = container_of(batv, struct batadv_hard_iface, bat_v); in batadv_v_ogm_aggr_work()
392 spin_lock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_aggr_work()
394 spin_unlock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_aggr_work()
423 cancel_delayed_work_sync(&hard_iface->bat_v.aggr_wq); in batadv_v_ogm_iface_disable()
425 spin_lock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_iface_disable()
427 spin_unlock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_iface_disable()
439 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_primary_iface_set()
440 if (!bat_priv->bat_v.ogm_buff) in batadv_v_ogm_primary_iface_set()
443 ogm_packet = (struct batadv_ogm2_packet *)bat_priv->bat_v.ogm_buff; in batadv_v_ogm_primary_iface_set()
447 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_primary_iface_set()
496 !(if_incoming->bat_v.flags & BATADV_FULL_DUPLEX)) in batadv_v_forward_penalty()
575 ogm_forward->throughput = htonl(neigh_ifinfo->bat_v.throughput); in batadv_v_ogm_forward()
662 neigh_ifinfo->bat_v.throughput = path_throughput; in batadv_v_ogm_metric_update()
663 neigh_ifinfo->bat_v.last_seqno = ntohl(ogm2->seqno); in batadv_v_ogm_metric_update()
752 neigh_last_seqno = neigh_ifinfo->bat_v.last_seqno; in batadv_v_ogm_route_update()
753 router_last_seqno = router_ifinfo->bat_v.last_seqno; in batadv_v_ogm_route_update()
755 router_throughput = router_ifinfo->bat_v.throughput; in batadv_v_ogm_route_update()
756 neigh_throughput = neigh_ifinfo->bat_v.throughput; in batadv_v_ogm_route_update()
924 link_throughput = ewma_throughput_read(&hardif_neigh->bat_v.throughput); in batadv_v_ogm_process()
1059 bat_priv->bat_v.ogm_buff_len = BATADV_OGM2_HLEN; in batadv_v_ogm_init()
1060 ogm_buff = kzalloc(bat_priv->bat_v.ogm_buff_len, GFP_ATOMIC); in batadv_v_ogm_init()
1064 bat_priv->bat_v.ogm_buff = ogm_buff; in batadv_v_ogm_init()
1074 atomic_set(&bat_priv->bat_v.ogm_seqno, random_seqno); in batadv_v_ogm_init()
1075 INIT_DELAYED_WORK(&bat_priv->bat_v.ogm_wq, batadv_v_ogm_send); in batadv_v_ogm_init()
1077 mutex_init(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_init()
1088 cancel_delayed_work_sync(&bat_priv->bat_v.ogm_wq); in batadv_v_ogm_free()
1090 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_free()
1092 kfree(bat_priv->bat_v.ogm_buff); in batadv_v_ogm_free()
1093 bat_priv->bat_v.ogm_buff = NULL; in batadv_v_ogm_free()
1094 bat_priv->bat_v.ogm_buff_len = 0; in batadv_v_ogm_free()
1096 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_free()