Lines Matching refs:bat_v
94 queue_delayed_work(batadv_event_workqueue, &hard_iface->bat_v.aggr_wq, in batadv_v_ogm_start_queue_timer()
108 if (delayed_work_pending(&bat_priv->bat_v.ogm_wq)) in batadv_v_ogm_start_timer()
113 queue_delayed_work(batadv_event_workqueue, &bat_priv->bat_v.ogm_wq, in batadv_v_ogm_start_timer()
168 lockdep_assert_held(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_queue_left()
170 return hard_iface->bat_v.aggr_len + ogm_len <= max; in batadv_v_ogm_queue_left()
183 lockdep_assert_held(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_aggr_list_free()
185 __skb_queue_purge(&hard_iface->bat_v.aggr_list); in batadv_v_ogm_aggr_list_free()
186 hard_iface->bat_v.aggr_len = 0; in batadv_v_ogm_aggr_list_free()
202 unsigned int aggr_len = hard_iface->bat_v.aggr_len; in batadv_v_ogm_aggr_send()
207 lockdep_assert_held(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_aggr_send()
221 while ((skb = __skb_dequeue(&hard_iface->bat_v.aggr_list))) { in batadv_v_ogm_aggr_send()
222 hard_iface->bat_v.aggr_len -= batadv_v_ogm_len(skb); in batadv_v_ogm_aggr_send()
248 spin_lock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_queue_on_if()
252 hard_iface->bat_v.aggr_len += batadv_v_ogm_len(skb); in batadv_v_ogm_queue_on_if()
253 __skb_queue_tail(&hard_iface->bat_v.aggr_list, skb); in batadv_v_ogm_queue_on_if()
254 spin_unlock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_queue_on_if()
271 lockdep_assert_held(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_send_softif()
276 ogm_buff = bat_priv->bat_v.ogm_buff; in batadv_v_ogm_send_softif()
277 ogm_buff_len = bat_priv->bat_v.ogm_buff_len; in batadv_v_ogm_send_softif()
286 bat_priv->bat_v.ogm_buff = ogm_buff; in batadv_v_ogm_send_softif()
287 bat_priv->bat_v.ogm_buff_len = ogm_buff_len; in batadv_v_ogm_send_softif()
297 ogm_packet->seqno = htonl(atomic_read(&bat_priv->bat_v.ogm_seqno)); in batadv_v_ogm_send_softif()
298 atomic_inc(&bat_priv->bat_v.ogm_seqno); in batadv_v_ogm_send_softif()
368 struct batadv_priv_bat_v *bat_v; in batadv_v_ogm_send() local
371 bat_v = container_of(work, struct batadv_priv_bat_v, ogm_wq.work); in batadv_v_ogm_send()
372 bat_priv = container_of(bat_v, struct batadv_priv, bat_v); in batadv_v_ogm_send()
374 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_send()
376 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_send()
391 hard_iface = container_of(batv, struct batadv_hard_iface, bat_v); in batadv_v_ogm_aggr_work()
393 spin_lock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_aggr_work()
395 spin_unlock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_aggr_work()
424 cancel_delayed_work_sync(&hard_iface->bat_v.aggr_wq); in batadv_v_ogm_iface_disable()
426 spin_lock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_iface_disable()
428 spin_unlock_bh(&hard_iface->bat_v.aggr_list.lock); in batadv_v_ogm_iface_disable()
440 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_primary_iface_set()
441 if (!bat_priv->bat_v.ogm_buff) in batadv_v_ogm_primary_iface_set()
444 ogm_packet = (struct batadv_ogm2_packet *)bat_priv->bat_v.ogm_buff; in batadv_v_ogm_primary_iface_set()
448 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_primary_iface_set()
497 !(if_incoming->bat_v.flags & BATADV_FULL_DUPLEX)) in batadv_v_forward_penalty()
576 ogm_forward->throughput = htonl(neigh_ifinfo->bat_v.throughput); in batadv_v_ogm_forward()
660 neigh_ifinfo->bat_v.throughput = path_throughput; in batadv_v_ogm_metric_update()
661 neigh_ifinfo->bat_v.last_seqno = ntohl(ogm2->seqno); in batadv_v_ogm_metric_update()
748 neigh_last_seqno = neigh_ifinfo->bat_v.last_seqno; in batadv_v_ogm_route_update()
749 router_last_seqno = router_ifinfo->bat_v.last_seqno; in batadv_v_ogm_route_update()
751 router_throughput = router_ifinfo->bat_v.throughput; in batadv_v_ogm_route_update()
752 neigh_throughput = neigh_ifinfo->bat_v.throughput; in batadv_v_ogm_route_update()
915 link_throughput = ewma_throughput_read(&hardif_neigh->bat_v.throughput); in batadv_v_ogm_process()
1047 bat_priv->bat_v.ogm_buff_len = BATADV_OGM2_HLEN; in batadv_v_ogm_init()
1048 ogm_buff = kzalloc(bat_priv->bat_v.ogm_buff_len, GFP_ATOMIC); in batadv_v_ogm_init()
1052 bat_priv->bat_v.ogm_buff = ogm_buff; in batadv_v_ogm_init()
1062 atomic_set(&bat_priv->bat_v.ogm_seqno, random_seqno); in batadv_v_ogm_init()
1063 INIT_DELAYED_WORK(&bat_priv->bat_v.ogm_wq, batadv_v_ogm_send); in batadv_v_ogm_init()
1065 mutex_init(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_init()
1076 cancel_delayed_work_sync(&bat_priv->bat_v.ogm_wq); in batadv_v_ogm_free()
1078 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_free()
1080 kfree(bat_priv->bat_v.ogm_buff); in batadv_v_ogm_free()
1081 bat_priv->bat_v.ogm_buff = NULL; in batadv_v_ogm_free()
1082 bat_priv->bat_v.ogm_buff_len = 0; in batadv_v_ogm_free()
1084 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex); in batadv_v_ogm_free()