Lines Matching refs:tunnel
42 #define __TB_TUNNEL_PRINT(level, tunnel, fmt, arg...) \ argument
44 struct tb_tunnel *__tunnel = (tunnel); \
54 #define tb_tunnel_WARN(tunnel, fmt, arg...) \ argument
55 __TB_TUNNEL_PRINT(tb_WARN, tunnel, fmt, ##arg)
56 #define tb_tunnel_warn(tunnel, fmt, arg...) \ argument
57 __TB_TUNNEL_PRINT(tb_warn, tunnel, fmt, ##arg)
58 #define tb_tunnel_info(tunnel, fmt, arg...) \ argument
59 __TB_TUNNEL_PRINT(tb_info, tunnel, fmt, ##arg)
60 #define tb_tunnel_dbg(tunnel, fmt, arg...) \ argument
61 __TB_TUNNEL_PRINT(tb_dbg, tunnel, fmt, ##arg)
66 struct tb_tunnel *tunnel; in tb_tunnel_alloc() local
68 tunnel = kzalloc(sizeof(*tunnel), GFP_KERNEL); in tb_tunnel_alloc()
69 if (!tunnel) in tb_tunnel_alloc()
72 tunnel->paths = kcalloc(npaths, sizeof(tunnel->paths[0]), GFP_KERNEL); in tb_tunnel_alloc()
73 if (!tunnel->paths) { in tb_tunnel_alloc()
74 tb_tunnel_free(tunnel); in tb_tunnel_alloc()
78 INIT_LIST_HEAD(&tunnel->list); in tb_tunnel_alloc()
79 tunnel->tb = tb; in tb_tunnel_alloc()
80 tunnel->npaths = npaths; in tb_tunnel_alloc()
81 tunnel->type = type; in tb_tunnel_alloc()
83 return tunnel; in tb_tunnel_alloc()
86 static int tb_pci_activate(struct tb_tunnel *tunnel, bool activate) in tb_pci_activate() argument
90 res = tb_pci_port_enable(tunnel->src_port, activate); in tb_pci_activate()
94 if (tb_port_is_pcie_up(tunnel->dst_port)) in tb_pci_activate()
95 return tb_pci_port_enable(tunnel->dst_port, activate); in tb_pci_activate()
143 struct tb_tunnel *tunnel; in tb_tunnel_discover_pci() local
149 tunnel = tb_tunnel_alloc(tb, 2, TB_TUNNEL_PCI); in tb_tunnel_discover_pci()
150 if (!tunnel) in tb_tunnel_discover_pci()
153 tunnel->activate = tb_pci_activate; in tb_tunnel_discover_pci()
154 tunnel->src_port = down; in tb_tunnel_discover_pci()
162 &tunnel->dst_port, "PCIe Up"); in tb_tunnel_discover_pci()
168 tunnel->paths[TB_PCI_PATH_UP] = path; in tb_tunnel_discover_pci()
169 tb_pci_init_path(tunnel->paths[TB_PCI_PATH_UP]); in tb_tunnel_discover_pci()
171 path = tb_path_discover(tunnel->dst_port, -1, down, TB_PCI_HOPID, NULL, in tb_tunnel_discover_pci()
175 tunnel->paths[TB_PCI_PATH_DOWN] = path; in tb_tunnel_discover_pci()
176 tb_pci_init_path(tunnel->paths[TB_PCI_PATH_DOWN]); in tb_tunnel_discover_pci()
179 if (!tb_port_is_pcie_up(tunnel->dst_port)) { in tb_tunnel_discover_pci()
180 tb_port_warn(tunnel->dst_port, in tb_tunnel_discover_pci()
185 if (down != tunnel->src_port) { in tb_tunnel_discover_pci()
186 tb_tunnel_warn(tunnel, "path is not complete, cleaning up\n"); in tb_tunnel_discover_pci()
190 if (!tb_pci_port_is_enabled(tunnel->dst_port)) { in tb_tunnel_discover_pci()
191 tb_tunnel_warn(tunnel, in tb_tunnel_discover_pci()
196 tb_tunnel_dbg(tunnel, "discovered\n"); in tb_tunnel_discover_pci()
197 return tunnel; in tb_tunnel_discover_pci()
200 tb_tunnel_deactivate(tunnel); in tb_tunnel_discover_pci()
202 tb_tunnel_free(tunnel); in tb_tunnel_discover_pci()
221 struct tb_tunnel *tunnel; in tb_tunnel_alloc_pci() local
224 tunnel = tb_tunnel_alloc(tb, 2, TB_TUNNEL_PCI); in tb_tunnel_alloc_pci()
225 if (!tunnel) in tb_tunnel_alloc_pci()
228 tunnel->activate = tb_pci_activate; in tb_tunnel_alloc_pci()
229 tunnel->src_port = down; in tb_tunnel_alloc_pci()
230 tunnel->dst_port = up; in tb_tunnel_alloc_pci()
235 tb_tunnel_free(tunnel); in tb_tunnel_alloc_pci()
239 tunnel->paths[TB_PCI_PATH_DOWN] = path; in tb_tunnel_alloc_pci()
244 tb_tunnel_free(tunnel); in tb_tunnel_alloc_pci()
248 tunnel->paths[TB_PCI_PATH_UP] = path; in tb_tunnel_alloc_pci()
250 return tunnel; in tb_tunnel_alloc_pci()
421 static int tb_dp_xchg_caps(struct tb_tunnel *tunnel) in tb_dp_xchg_caps() argument
424 struct tb_port *out = tunnel->dst_port; in tb_dp_xchg_caps()
425 struct tb_port *in = tunnel->src_port; in tb_dp_xchg_caps()
476 max_bw = tunnel->max_down; in tb_dp_xchg_caps()
478 max_bw = tunnel->max_up; in tb_dp_xchg_caps()
507 static int tb_dp_activate(struct tb_tunnel *tunnel, bool active) in tb_dp_activate() argument
515 paths = tunnel->paths; in tb_dp_activate()
518 tb_dp_port_set_hops(tunnel->src_port, in tb_dp_activate()
523 tb_dp_port_set_hops(tunnel->dst_port, in tb_dp_activate()
528 tb_dp_port_hpd_clear(tunnel->src_port); in tb_dp_activate()
529 tb_dp_port_set_hops(tunnel->src_port, 0, 0, 0); in tb_dp_activate()
530 if (tb_port_is_dpout(tunnel->dst_port)) in tb_dp_activate()
531 tb_dp_port_set_hops(tunnel->dst_port, 0, 0, 0); in tb_dp_activate()
534 ret = tb_dp_port_enable(tunnel->src_port, active); in tb_dp_activate()
538 if (tb_port_is_dpout(tunnel->dst_port)) in tb_dp_activate()
539 return tb_dp_port_enable(tunnel->dst_port, active); in tb_dp_activate()
544 static int tb_dp_consumed_bandwidth(struct tb_tunnel *tunnel, int *consumed_up, in tb_dp_consumed_bandwidth() argument
547 struct tb_port *in = tunnel->src_port; in tb_dp_consumed_bandwidth()
594 if (in->sw->config.depth < tunnel->dst_port->sw->config.depth) { in tb_dp_consumed_bandwidth()
656 struct tb_tunnel *tunnel; in tb_tunnel_discover_dp() local
663 tunnel = tb_tunnel_alloc(tb, 3, TB_TUNNEL_DP); in tb_tunnel_discover_dp()
664 if (!tunnel) in tb_tunnel_discover_dp()
667 tunnel->init = tb_dp_xchg_caps; in tb_tunnel_discover_dp()
668 tunnel->activate = tb_dp_activate; in tb_tunnel_discover_dp()
669 tunnel->consumed_bandwidth = tb_dp_consumed_bandwidth; in tb_tunnel_discover_dp()
670 tunnel->src_port = in; in tb_tunnel_discover_dp()
673 &tunnel->dst_port, "Video"); in tb_tunnel_discover_dp()
679 tunnel->paths[TB_DP_VIDEO_PATH_OUT] = path; in tb_tunnel_discover_dp()
680 tb_dp_init_video_path(tunnel->paths[TB_DP_VIDEO_PATH_OUT], true); in tb_tunnel_discover_dp()
685 tunnel->paths[TB_DP_AUX_PATH_OUT] = path; in tb_tunnel_discover_dp()
686 tb_dp_init_aux_path(tunnel->paths[TB_DP_AUX_PATH_OUT]); in tb_tunnel_discover_dp()
688 path = tb_path_discover(tunnel->dst_port, -1, in, TB_DP_AUX_RX_HOPID, in tb_tunnel_discover_dp()
692 tunnel->paths[TB_DP_AUX_PATH_IN] = path; in tb_tunnel_discover_dp()
693 tb_dp_init_aux_path(tunnel->paths[TB_DP_AUX_PATH_IN]); in tb_tunnel_discover_dp()
696 if (!tb_port_is_dpout(tunnel->dst_port)) { in tb_tunnel_discover_dp()
701 if (!tb_dp_port_is_enabled(tunnel->dst_port)) in tb_tunnel_discover_dp()
704 if (!tb_dp_port_hpd_is_active(tunnel->dst_port)) in tb_tunnel_discover_dp()
707 if (port != tunnel->src_port) { in tb_tunnel_discover_dp()
708 tb_tunnel_warn(tunnel, "path is not complete, cleaning up\n"); in tb_tunnel_discover_dp()
712 tb_tunnel_dbg(tunnel, "discovered\n"); in tb_tunnel_discover_dp()
713 return tunnel; in tb_tunnel_discover_dp()
716 tb_tunnel_deactivate(tunnel); in tb_tunnel_discover_dp()
718 tb_tunnel_free(tunnel); in tb_tunnel_discover_dp()
742 struct tb_tunnel *tunnel; in tb_tunnel_alloc_dp() local
749 tunnel = tb_tunnel_alloc(tb, 3, TB_TUNNEL_DP); in tb_tunnel_alloc_dp()
750 if (!tunnel) in tb_tunnel_alloc_dp()
753 tunnel->init = tb_dp_xchg_caps; in tb_tunnel_alloc_dp()
754 tunnel->activate = tb_dp_activate; in tb_tunnel_alloc_dp()
755 tunnel->consumed_bandwidth = tb_dp_consumed_bandwidth; in tb_tunnel_alloc_dp()
756 tunnel->src_port = in; in tb_tunnel_alloc_dp()
757 tunnel->dst_port = out; in tb_tunnel_alloc_dp()
758 tunnel->max_up = max_up; in tb_tunnel_alloc_dp()
759 tunnel->max_down = max_down; in tb_tunnel_alloc_dp()
761 paths = tunnel->paths; in tb_tunnel_alloc_dp()
784 return tunnel; in tb_tunnel_alloc_dp()
787 tb_tunnel_free(tunnel); in tb_tunnel_alloc_dp()
800 static int tb_dma_activate(struct tb_tunnel *tunnel, bool active) in tb_dma_activate() argument
802 struct tb_port *nhi = tunnel->src_port; in tb_dma_activate()
845 struct tb_tunnel *tunnel; in tb_tunnel_alloc_dma() local
849 tunnel = tb_tunnel_alloc(tb, 2, TB_TUNNEL_DMA); in tb_tunnel_alloc_dma()
850 if (!tunnel) in tb_tunnel_alloc_dma()
853 tunnel->activate = tb_dma_activate; in tb_tunnel_alloc_dma()
854 tunnel->src_port = nhi; in tb_tunnel_alloc_dma()
855 tunnel->dst_port = dst; in tb_tunnel_alloc_dma()
861 tb_tunnel_free(tunnel); in tb_tunnel_alloc_dma()
866 tunnel->paths[TB_DMA_PATH_IN] = path; in tb_tunnel_alloc_dma()
870 tb_tunnel_free(tunnel); in tb_tunnel_alloc_dma()
874 tunnel->paths[TB_DMA_PATH_OUT] = path; in tb_tunnel_alloc_dma()
876 return tunnel; in tb_tunnel_alloc_dma()
896 static int tb_usb3_init(struct tb_tunnel *tunnel) in tb_usb3_init() argument
898 tb_tunnel_dbg(tunnel, "allocating initial bandwidth %d/%d Mb/s\n", in tb_usb3_init()
899 tunnel->allocated_up, tunnel->allocated_down); in tb_usb3_init()
901 return usb4_usb3_port_allocate_bandwidth(tunnel->src_port, in tb_usb3_init()
902 &tunnel->allocated_up, in tb_usb3_init()
903 &tunnel->allocated_down); in tb_usb3_init()
906 static int tb_usb3_activate(struct tb_tunnel *tunnel, bool activate) in tb_usb3_activate() argument
910 res = tb_usb3_port_enable(tunnel->src_port, activate); in tb_usb3_activate()
914 if (tb_port_is_usb3_up(tunnel->dst_port)) in tb_usb3_activate()
915 return tb_usb3_port_enable(tunnel->dst_port, activate); in tb_usb3_activate()
920 static int tb_usb3_consumed_bandwidth(struct tb_tunnel *tunnel, in tb_usb3_consumed_bandwidth() argument
927 *consumed_up = tunnel->allocated_up * (3 + 1) / 3; in tb_usb3_consumed_bandwidth()
928 *consumed_down = tunnel->allocated_down * (3 + 1) / 3; in tb_usb3_consumed_bandwidth()
932 static int tb_usb3_release_unused_bandwidth(struct tb_tunnel *tunnel) in tb_usb3_release_unused_bandwidth() argument
936 ret = usb4_usb3_port_release_bandwidth(tunnel->src_port, in tb_usb3_release_unused_bandwidth()
937 &tunnel->allocated_up, in tb_usb3_release_unused_bandwidth()
938 &tunnel->allocated_down); in tb_usb3_release_unused_bandwidth()
942 tb_tunnel_dbg(tunnel, "decreased bandwidth allocation to %d/%d Mb/s\n", in tb_usb3_release_unused_bandwidth()
943 tunnel->allocated_up, tunnel->allocated_down); in tb_usb3_release_unused_bandwidth()
947 static void tb_usb3_reclaim_available_bandwidth(struct tb_tunnel *tunnel, in tb_usb3_reclaim_available_bandwidth() argument
953 ret = usb4_usb3_port_actual_link_rate(tunnel->src_port); in tb_usb3_reclaim_available_bandwidth()
955 tb_tunnel_warn(tunnel, "failed to read actual link rate\n"); in tb_usb3_reclaim_available_bandwidth()
959 ret = usb4_usb3_port_max_link_rate(tunnel->src_port); in tb_usb3_reclaim_available_bandwidth()
961 tb_tunnel_warn(tunnel, "failed to read maximum link rate\n"); in tb_usb3_reclaim_available_bandwidth()
973 if (tunnel->allocated_up >= max_rate && in tb_usb3_reclaim_available_bandwidth()
974 tunnel->allocated_down >= max_rate) in tb_usb3_reclaim_available_bandwidth()
979 if (allocate_up < tunnel->allocated_up) in tb_usb3_reclaim_available_bandwidth()
980 allocate_up = tunnel->allocated_up; in tb_usb3_reclaim_available_bandwidth()
983 if (allocate_down < tunnel->allocated_down) in tb_usb3_reclaim_available_bandwidth()
984 allocate_down = tunnel->allocated_down; in tb_usb3_reclaim_available_bandwidth()
987 if (allocate_up == tunnel->allocated_up && in tb_usb3_reclaim_available_bandwidth()
988 allocate_down == tunnel->allocated_down) in tb_usb3_reclaim_available_bandwidth()
991 ret = usb4_usb3_port_allocate_bandwidth(tunnel->src_port, &allocate_up, in tb_usb3_reclaim_available_bandwidth()
994 tb_tunnel_info(tunnel, "failed to allocate bandwidth\n"); in tb_usb3_reclaim_available_bandwidth()
998 tunnel->allocated_up = allocate_up; in tb_usb3_reclaim_available_bandwidth()
999 *available_up -= tunnel->allocated_up; in tb_usb3_reclaim_available_bandwidth()
1001 tunnel->allocated_down = allocate_down; in tb_usb3_reclaim_available_bandwidth()
1002 *available_down -= tunnel->allocated_down; in tb_usb3_reclaim_available_bandwidth()
1004 tb_tunnel_dbg(tunnel, "increased bandwidth allocation to %d/%d Mb/s\n", in tb_usb3_reclaim_available_bandwidth()
1005 tunnel->allocated_up, tunnel->allocated_down); in tb_usb3_reclaim_available_bandwidth()
1035 struct tb_tunnel *tunnel; in tb_tunnel_discover_usb3() local
1041 tunnel = tb_tunnel_alloc(tb, 2, TB_TUNNEL_USB3); in tb_tunnel_discover_usb3()
1042 if (!tunnel) in tb_tunnel_discover_usb3()
1045 tunnel->activate = tb_usb3_activate; in tb_tunnel_discover_usb3()
1046 tunnel->src_port = down; in tb_tunnel_discover_usb3()
1054 &tunnel->dst_port, "USB3 Down"); in tb_tunnel_discover_usb3()
1060 tunnel->paths[TB_USB3_PATH_DOWN] = path; in tb_tunnel_discover_usb3()
1061 tb_usb3_init_path(tunnel->paths[TB_USB3_PATH_DOWN]); in tb_tunnel_discover_usb3()
1063 path = tb_path_discover(tunnel->dst_port, -1, down, TB_USB3_HOPID, NULL, in tb_tunnel_discover_usb3()
1067 tunnel->paths[TB_USB3_PATH_UP] = path; in tb_tunnel_discover_usb3()
1068 tb_usb3_init_path(tunnel->paths[TB_USB3_PATH_UP]); in tb_tunnel_discover_usb3()
1071 if (!tb_port_is_usb3_up(tunnel->dst_port)) { in tb_tunnel_discover_usb3()
1072 tb_port_warn(tunnel->dst_port, in tb_tunnel_discover_usb3()
1077 if (down != tunnel->src_port) { in tb_tunnel_discover_usb3()
1078 tb_tunnel_warn(tunnel, "path is not complete, cleaning up\n"); in tb_tunnel_discover_usb3()
1082 if (!tb_usb3_port_is_enabled(tunnel->dst_port)) { in tb_tunnel_discover_usb3()
1083 tb_tunnel_warn(tunnel, in tb_tunnel_discover_usb3()
1096 &tunnel->allocated_up, &tunnel->allocated_down); in tb_tunnel_discover_usb3()
1100 tb_tunnel_dbg(tunnel, "currently allocated bandwidth %d/%d Mb/s\n", in tb_tunnel_discover_usb3()
1101 tunnel->allocated_up, tunnel->allocated_down); in tb_tunnel_discover_usb3()
1103 tunnel->init = tb_usb3_init; in tb_tunnel_discover_usb3()
1104 tunnel->consumed_bandwidth = tb_usb3_consumed_bandwidth; in tb_tunnel_discover_usb3()
1105 tunnel->release_unused_bandwidth = in tb_tunnel_discover_usb3()
1107 tunnel->reclaim_available_bandwidth = in tb_tunnel_discover_usb3()
1111 tb_tunnel_dbg(tunnel, "discovered\n"); in tb_tunnel_discover_usb3()
1112 return tunnel; in tb_tunnel_discover_usb3()
1115 tb_tunnel_deactivate(tunnel); in tb_tunnel_discover_usb3()
1117 tb_tunnel_free(tunnel); in tb_tunnel_discover_usb3()
1141 struct tb_tunnel *tunnel; in tb_tunnel_alloc_usb3() local
1165 tunnel = tb_tunnel_alloc(tb, 2, TB_TUNNEL_USB3); in tb_tunnel_alloc_usb3()
1166 if (!tunnel) in tb_tunnel_alloc_usb3()
1169 tunnel->activate = tb_usb3_activate; in tb_tunnel_alloc_usb3()
1170 tunnel->src_port = down; in tb_tunnel_alloc_usb3()
1171 tunnel->dst_port = up; in tb_tunnel_alloc_usb3()
1172 tunnel->max_up = max_up; in tb_tunnel_alloc_usb3()
1173 tunnel->max_down = max_down; in tb_tunnel_alloc_usb3()
1178 tb_tunnel_free(tunnel); in tb_tunnel_alloc_usb3()
1182 tunnel->paths[TB_USB3_PATH_DOWN] = path; in tb_tunnel_alloc_usb3()
1187 tb_tunnel_free(tunnel); in tb_tunnel_alloc_usb3()
1191 tunnel->paths[TB_USB3_PATH_UP] = path; in tb_tunnel_alloc_usb3()
1194 tunnel->allocated_up = max_rate; in tb_tunnel_alloc_usb3()
1195 tunnel->allocated_down = max_rate; in tb_tunnel_alloc_usb3()
1197 tunnel->init = tb_usb3_init; in tb_tunnel_alloc_usb3()
1198 tunnel->consumed_bandwidth = tb_usb3_consumed_bandwidth; in tb_tunnel_alloc_usb3()
1199 tunnel->release_unused_bandwidth = in tb_tunnel_alloc_usb3()
1201 tunnel->reclaim_available_bandwidth = in tb_tunnel_alloc_usb3()
1205 return tunnel; in tb_tunnel_alloc_usb3()
1214 void tb_tunnel_free(struct tb_tunnel *tunnel) in tb_tunnel_free() argument
1218 if (!tunnel) in tb_tunnel_free()
1221 for (i = 0; i < tunnel->npaths; i++) { in tb_tunnel_free()
1222 if (tunnel->paths[i]) in tb_tunnel_free()
1223 tb_path_free(tunnel->paths[i]); in tb_tunnel_free()
1226 kfree(tunnel->paths); in tb_tunnel_free()
1227 kfree(tunnel); in tb_tunnel_free()
1234 bool tb_tunnel_is_invalid(struct tb_tunnel *tunnel) in tb_tunnel_is_invalid() argument
1238 for (i = 0; i < tunnel->npaths; i++) { in tb_tunnel_is_invalid()
1239 WARN_ON(!tunnel->paths[i]->activated); in tb_tunnel_is_invalid()
1240 if (tb_path_is_invalid(tunnel->paths[i])) in tb_tunnel_is_invalid()
1253 int tb_tunnel_restart(struct tb_tunnel *tunnel) in tb_tunnel_restart() argument
1257 tb_tunnel_dbg(tunnel, "activating\n"); in tb_tunnel_restart()
1263 for (i = 0; i < tunnel->npaths; i++) { in tb_tunnel_restart()
1264 if (tunnel->paths[i]->activated) { in tb_tunnel_restart()
1265 tb_path_deactivate(tunnel->paths[i]); in tb_tunnel_restart()
1266 tunnel->paths[i]->activated = false; in tb_tunnel_restart()
1270 if (tunnel->init) { in tb_tunnel_restart()
1271 res = tunnel->init(tunnel); in tb_tunnel_restart()
1276 for (i = 0; i < tunnel->npaths; i++) { in tb_tunnel_restart()
1277 res = tb_path_activate(tunnel->paths[i]); in tb_tunnel_restart()
1282 if (tunnel->activate) { in tb_tunnel_restart()
1283 res = tunnel->activate(tunnel, true); in tb_tunnel_restart()
1291 tb_tunnel_warn(tunnel, "activation failed\n"); in tb_tunnel_restart()
1292 tb_tunnel_deactivate(tunnel); in tb_tunnel_restart()
1302 int tb_tunnel_activate(struct tb_tunnel *tunnel) in tb_tunnel_activate() argument
1306 for (i = 0; i < tunnel->npaths; i++) { in tb_tunnel_activate()
1307 if (tunnel->paths[i]->activated) { in tb_tunnel_activate()
1308 tb_tunnel_WARN(tunnel, in tb_tunnel_activate()
1314 return tb_tunnel_restart(tunnel); in tb_tunnel_activate()
1321 void tb_tunnel_deactivate(struct tb_tunnel *tunnel) in tb_tunnel_deactivate() argument
1325 tb_tunnel_dbg(tunnel, "deactivating\n"); in tb_tunnel_deactivate()
1327 if (tunnel->activate) in tb_tunnel_deactivate()
1328 tunnel->activate(tunnel, false); in tb_tunnel_deactivate()
1330 for (i = 0; i < tunnel->npaths; i++) { in tb_tunnel_deactivate()
1331 if (tunnel->paths[i] && tunnel->paths[i]->activated) in tb_tunnel_deactivate()
1332 tb_path_deactivate(tunnel->paths[i]); in tb_tunnel_deactivate()
1344 bool tb_tunnel_port_on_path(const struct tb_tunnel *tunnel, in tb_tunnel_port_on_path() argument
1349 for (i = 0; i < tunnel->npaths; i++) { in tb_tunnel_port_on_path()
1350 if (!tunnel->paths[i]) in tb_tunnel_port_on_path()
1353 if (tb_path_port_on_path(tunnel->paths[i], port)) in tb_tunnel_port_on_path()
1360 static bool tb_tunnel_is_active(const struct tb_tunnel *tunnel) in tb_tunnel_is_active() argument
1364 for (i = 0; i < tunnel->npaths; i++) { in tb_tunnel_is_active()
1365 if (!tunnel->paths[i]) in tb_tunnel_is_active()
1367 if (!tunnel->paths[i]->activated) in tb_tunnel_is_active()
1386 int tb_tunnel_consumed_bandwidth(struct tb_tunnel *tunnel, int *consumed_up, in tb_tunnel_consumed_bandwidth() argument
1391 if (!tb_tunnel_is_active(tunnel)) in tb_tunnel_consumed_bandwidth()
1394 if (tunnel->consumed_bandwidth) { in tb_tunnel_consumed_bandwidth()
1397 ret = tunnel->consumed_bandwidth(tunnel, &up_bw, &down_bw); in tb_tunnel_consumed_bandwidth()
1401 tb_tunnel_dbg(tunnel, "consumed bandwidth %d/%d Mb/s\n", up_bw, in tb_tunnel_consumed_bandwidth()
1423 int tb_tunnel_release_unused_bandwidth(struct tb_tunnel *tunnel) in tb_tunnel_release_unused_bandwidth() argument
1425 if (!tb_tunnel_is_active(tunnel)) in tb_tunnel_release_unused_bandwidth()
1428 if (tunnel->release_unused_bandwidth) { in tb_tunnel_release_unused_bandwidth()
1431 ret = tunnel->release_unused_bandwidth(tunnel); in tb_tunnel_release_unused_bandwidth()
1450 void tb_tunnel_reclaim_available_bandwidth(struct tb_tunnel *tunnel, in tb_tunnel_reclaim_available_bandwidth() argument
1454 if (!tb_tunnel_is_active(tunnel)) in tb_tunnel_reclaim_available_bandwidth()
1457 if (tunnel->reclaim_available_bandwidth) in tb_tunnel_reclaim_available_bandwidth()
1458 tunnel->reclaim_available_bandwidth(tunnel, available_up, in tb_tunnel_reclaim_available_bandwidth()