Lines Matching refs:dhd

252 typedef uint8 (* d2h_sync_cb_t)(dhd_pub_t *dhd, struct msgbuf_ring *ring,
263 typedef int (* d2h_edl_sync_cb_t)(dhd_pub_t *dhd, struct msgbuf_ring *ring,
538 static void dhd_msgbuf_send_msg_tx_ts(dhd_pub_t *dhd, void *pkt,
540 static void dhd_msgbuf_send_msg_rx_ts(dhd_pub_t *dhd, void *pkt,
582 void *dhd; member
774 int dhd_ring_write(dhd_pub_t *dhd, msgbuf_ring_t *ring, void *file,
778 int dhd_edl_ring_hdr_write(dhd_pub_t *dhd, msgbuf_ring_t *ring, void *file, const void *user_buf,
788 static int dhd_dma_buf_audit(dhd_pub_t *dhd, dhd_dma_buf_t *dma_buf);
789 static void dhd_dma_buf_reset(dhd_pub_t *dhd, dhd_dma_buf_t *dma_buf);
792 static int dhd_prot_allocate_bufs(dhd_pub_t *dhd, dhd_prot_t *prot);
793 static int dhd_prot_ring_attach(dhd_pub_t *dhd, msgbuf_ring_t *ring,
795 static void dhd_prot_ring_init(dhd_pub_t *dhd, msgbuf_ring_t *ring);
796 static void dhd_prot_ring_reset(dhd_pub_t *dhd, msgbuf_ring_t *ring);
797 static void dhd_prot_ring_detach(dhd_pub_t *dhd, msgbuf_ring_t *ring);
798 static void dhd_prot_process_fw_timestamp(dhd_pub_t *dhd, void* buf);
801 static int dhd_prot_flowrings_pool_attach(dhd_pub_t *dhd);
802 static void dhd_prot_flowrings_pool_reset(dhd_pub_t *dhd);
803 static void dhd_prot_flowrings_pool_detach(dhd_pub_t *dhd);
806 static msgbuf_ring_t *dhd_prot_flowrings_pool_fetch(dhd_pub_t *dhd,
811 static void* dhd_prot_alloc_ring_space(dhd_pub_t *dhd, msgbuf_ring_t *ring,
817 static uint8* dhd_prot_get_read_addr(dhd_pub_t *dhd, msgbuf_ring_t *ring,
821 static void dhd_prot_ring_write_complete(dhd_pub_t *dhd, msgbuf_ring_t *ring,
825 static void dhd_prot_agg_db_ring_write(dhd_pub_t *dhd, msgbuf_ring_t * ring,
827 static void dhd_prot_aggregate_db_ring_door_bell(dhd_pub_t *dhd, uint16 flowid, bool ring_db);
828 static void dhd_prot_txdata_aggr_db_write_flush(dhd_pub_t *dhd, uint16 flowid);
830 static void dhd_prot_ring_doorbell(dhd_pub_t *dhd, uint32 value);
831 static void dhd_prot_upd_read_idx(dhd_pub_t *dhd, msgbuf_ring_t *ring);
833 static INLINE int dhd_prot_dma_indx_alloc(dhd_pub_t *dhd, uint8 type,
838 void dhd_prot_dma_indx_set(dhd_pub_t *dhd, uint16 new_index, uint8 type,
840 static uint16 dhd_prot_dma_indx_get(dhd_pub_t *dhd, uint8 type, uint16 ringid);
843 static INLINE void *dhd_prot_packet_get(dhd_pub_t *dhd, uint32 pktid, uint8 pkttype,
846 static INLINE void dhd_prot_packet_free(dhd_pub_t *dhd, void *pkt, uint8 pkttype, bool send);
848 static int dhd_msgbuf_query_ioctl(dhd_pub_t *dhd, int ifidx, uint cmd,
850 static int dhd_msgbuf_set_ioctl(dhd_pub_t *dhd, int ifidx, uint cmd,
852 static int dhd_msgbuf_wait_ioctl_cmplt(dhd_pub_t *dhd, uint32 len, void *buf);
853 static int dhd_fillup_ioct_reqst(dhd_pub_t *dhd, uint16 len, uint cmd,
857 static uint16 dhd_msgbuf_rxbuf_post_ctrlpath(dhd_pub_t *dhd, uint8 msgid, uint32 max_to_post);
860 static void dhd_msgbuf_rxbuf_post(dhd_pub_t *dhd, bool use_rsv_pktid);
861 static int dhd_prot_rxbuf_post(dhd_pub_t *dhd, uint16 count, bool use_rsv_pktid);
864 static void dhd_prot_return_rxbuf(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint32 pktid, uint32 rxcnt);
867 static void dhd_prot_ioctl_trace(dhd_pub_t *dhd, ioctl_req_msg_t *ioct_rqst, uchar *buf, int len);
872 static int dhd_prot_process_msgtype(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint8 *buf, uint32 len);
875 static void dhd_prot_noop(dhd_pub_t *dhd, void *msg);
876 static void dhd_prot_txstatus_process(dhd_pub_t *dhd, void *msg);
877 static void dhd_prot_ioctcmplt_process(dhd_pub_t *dhd, void *msg);
878 static void dhd_prot_ioctack_process(dhd_pub_t *dhd, void *msg);
879 static void dhd_prot_ringstatus_process(dhd_pub_t *dhd, void *msg);
880 static void dhd_prot_genstatus_process(dhd_pub_t *dhd, void *msg);
881 static void dhd_prot_event_process(dhd_pub_t *dhd, void *msg);
884 static void dmaxfer_free_dmaaddr(dhd_pub_t *dhd, dhd_dmaxfer_t *dma);
885 static int dmaxfer_prepare_dmaaddr(dhd_pub_t *dhd, uint len, uint srcdelay,
887 static void dhd_msgbuf_dmaxfer_process(dhd_pub_t *dhd, void *msg);
890 static void dhd_prot_flow_ring_create_response_process(dhd_pub_t *dhd, void *msg);
891 static void dhd_prot_flow_ring_delete_response_process(dhd_pub_t *dhd, void *msg);
892 static void dhd_prot_flow_ring_flush_response_process(dhd_pub_t *dhd, void *msg);
893 static void dhd_prot_process_flow_ring_resume_response(dhd_pub_t *dhd, void* msg);
894 static void dhd_prot_process_flow_ring_suspend_response(dhd_pub_t *dhd, void* msg);
898 extern bool dhd_monitor_enabled(dhd_pub_t *dhd, int ifidx);
903 static void dhd_msgbuf_ring_config_d2h_soft_doorbell(dhd_pub_t *dhd);
904 static void dhd_prot_process_d2h_ring_config_complete(dhd_pub_t *dhd, void *msg);
905 static void dhd_prot_process_d2h_ring_create_complete(dhd_pub_t *dhd, void *buf);
907 static void dhd_prot_process_h2d_ring_create_complete(dhd_pub_t *dhd, void *buf);
908 static void dhd_prot_process_infobuf_complete(dhd_pub_t *dhd, void* buf);
910 static void dhd_prot_process_d2h_mb_data(dhd_pub_t *dhd, void* buf);
911 static void dhd_prot_detach_info_rings(dhd_pub_t *dhd);
913 static void dhd_prot_process_btlog_complete(dhd_pub_t *dhd, void* buf);
914 static void dhd_prot_detach_btlog_rings(dhd_pub_t *dhd);
917 static void dhd_prot_detach_hp2p_rings(dhd_pub_t *dhd);
920 static void dhd_prot_detach_edl_rings(dhd_pub_t *dhd);
922 static void dhd_prot_process_d2h_host_ts_complete(dhd_pub_t *dhd, void* buf);
923 static void dhd_prot_process_snapshot_complete(dhd_pub_t *dhd, void *buf);
940 static void dhd_update_hp2p_rxstats(dhd_pub_t *dhd, host_rxbuf_cmpl_t *rxstatus);
941 static void dhd_update_hp2p_txstats(dhd_pub_t *dhd, host_txbuf_cmpl_t *txstatus);
942 static void dhd_calc_hp2p_burst(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint16 flowid);
943 static void dhd_update_hp2p_txdesc(dhd_pub_t *dhd, host_txbuf_post_t *txdesc);
945 typedef void (*dhd_msgbuf_func_t)(dhd_pub_t *dhd, void *msg);
1011 #define PKT_CTF_CHAINABLE(dhd, ifidx, evh, prio, h_sa, h_da, h_prio) \ argument
1012 (dhd_wet_chainable(dhd) && \
1013 dhd_rx_pkt_chainable((dhd), (ifidx)) && \
1018 ((h_prio) == (prio)) && (dhd_ctf_hotbrc_check((dhd), (evh), (ifidx))) && \
1023 static void dhd_rxchain_frame(dhd_pub_t *dhd, void *pkt, uint ifidx);
1024 static void dhd_rxchain_commit(dhd_pub_t *dhd);
1036 static void dhd_prot_h2d_sync_init(dhd_pub_t *dhd);
1040 dhd_prot_get_minidump_buf(dhd_pub_t *dhd) in dhd_prot_get_minidump_buf() argument
1042 return &dhd->prot->fw_trap_buf; in dhd_prot_get_minidump_buf()
1047 dhd_prot_get_rxbufpost_sz(dhd_pub_t *dhd) in dhd_prot_get_rxbufpost_sz() argument
1049 return dhd->prot->rxbufpost_sz; in dhd_prot_get_rxbufpost_sz()
1053 dhd_prot_get_h2d_rx_post_active(dhd_pub_t *dhd) in dhd_prot_get_h2d_rx_post_active() argument
1055 dhd_prot_t *prot = dhd->prot; in dhd_prot_get_h2d_rx_post_active()
1062 if (dhd->dma_d2h_ring_upd_support) { in dhd_prot_get_h2d_rx_post_active()
1063 rd = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, flow_ring->idx); in dhd_prot_get_h2d_rx_post_active()
1065 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, flow_ring->idx); in dhd_prot_get_h2d_rx_post_active()
1071 dhd_prot_get_d2h_rx_cpln_active(dhd_pub_t *dhd) in dhd_prot_get_d2h_rx_cpln_active() argument
1073 dhd_prot_t *prot = dhd->prot; in dhd_prot_get_d2h_rx_cpln_active()
1077 if (dhd->dma_d2h_ring_upd_support) { in dhd_prot_get_d2h_rx_cpln_active()
1078 wr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, flow_ring->idx); in dhd_prot_get_d2h_rx_cpln_active()
1080 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, flow_ring->idx); in dhd_prot_get_d2h_rx_cpln_active()
1090 dhd_prot_is_cmpl_ring_empty(dhd_pub_t *dhd, void *prot_info) in dhd_prot_is_cmpl_ring_empty() argument
1096 if (dhd->dma_d2h_ring_upd_support) { in dhd_prot_is_cmpl_ring_empty()
1099 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, flow_ring->idx); in dhd_prot_is_cmpl_ring_empty()
1101 if (dhd->dma_h2d_ring_upd_support) { in dhd_prot_is_cmpl_ring_empty()
1104 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, flow_ring->idx); in dhd_prot_is_cmpl_ring_empty()
1119 dhd_prot_get_h2d_max_txpost(dhd_pub_t *dhd) in dhd_prot_get_h2d_max_txpost() argument
1124 dhd_prot_set_h2d_max_txpost(dhd_pub_t *dhd, uint16 max_txpost) in dhd_prot_set_h2d_max_txpost() argument
1130 dhd_prot_get_h2d_htput_max_txpost(dhd_pub_t *dhd) in dhd_prot_get_h2d_htput_max_txpost() argument
1135 dhd_prot_set_h2d_htput_max_txpost(dhd_pub_t *dhd, uint16 htput_max_txpost) in dhd_prot_set_h2d_htput_max_txpost() argument
1149 static void dhd_prot_d2h_sync_livelock(dhd_pub_t *dhd, uint32 msg_seqnum, msgbuf_ring_t *ring,
1151 static uint8 dhd_prot_d2h_sync_seqnum(dhd_pub_t *dhd, msgbuf_ring_t *ring,
1153 static uint8 dhd_prot_d2h_sync_xorcsum(dhd_pub_t *dhd, msgbuf_ring_t *ring,
1155 static uint8 dhd_prot_d2h_sync_none(dhd_pub_t *dhd, msgbuf_ring_t *ring,
1157 static void dhd_prot_d2h_sync_init(dhd_pub_t *dhd);
1158 static int dhd_send_d2h_ringcreate(dhd_pub_t *dhd, msgbuf_ring_t *ring_to_create,
1160 static int dhd_send_h2d_ringcreate(dhd_pub_t *dhd, msgbuf_ring_t *ring_to_create,
1173 dhd_prot_d2h_sync_livelock(dhd_pub_t *dhd, uint32 msg_seqnum, msgbuf_ring_t *ring, uint32 tries, in dhd_prot_d2h_sync_livelock() argument
1178 if (dhd_query_bus_erros(dhd)) { in dhd_prot_d2h_sync_livelock()
1185 dhd, ring->name, msg_seqnum, ring_seqnum, ring_seqnum% D2H_EPOCH_MODULO, tries, in dhd_prot_d2h_sync_livelock()
1186 dhd->prot->d2h_sync_wait_max, dhd->prot->d2h_sync_wait_tot, in dhd_prot_d2h_sync_livelock()
1193 dhdpcie_runtime_bus_wake(dhd, CAN_SLEEP(), __builtin_return_address(0)); in dhd_prot_d2h_sync_livelock()
1197 if (DHD_BUS_CHECK_SUSPEND_OR_ANY_SUSPEND_IN_PROGRESS(dhd)) { in dhd_prot_d2h_sync_livelock()
1199 __FUNCTION__, dhd->busstate, dhd->dhd_bus_busy_state)); in dhd_prot_d2h_sync_livelock()
1203 dhd_bus_dump_console_buffer(dhd->bus); in dhd_prot_d2h_sync_livelock()
1204 dhd_prot_debug_info_print(dhd); in dhd_prot_d2h_sync_livelock()
1207 if (dhd->memdump_enabled) { in dhd_prot_d2h_sync_livelock()
1209 dhd->memdump_type = DUMP_TYPE_BY_LIVELOCK; in dhd_prot_d2h_sync_livelock()
1210 dhd_bus_mem_dump(dhd); in dhd_prot_d2h_sync_livelock()
1215 dhd_schedule_reset(dhd); in dhd_prot_d2h_sync_livelock()
1220 dhd->bus->no_cfg_restore = 1; in dhd_prot_d2h_sync_livelock()
1223 dhd->hang_reason = HANG_REASON_MSGBUF_LIVELOCK; in dhd_prot_d2h_sync_livelock()
1224 dhd_os_send_hang_message(dhd); in dhd_prot_d2h_sync_livelock()
1227 dhd->livelock_occured = TRUE; in dhd_prot_d2h_sync_livelock()
1235 BCMFASTPATH(dhd_prot_d2h_sync_seqnum)(dhd_pub_t *dhd, msgbuf_ring_t *ring, in BCMFASTPATH()
1242 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
1279 if (dhd->dhd_induce_error != DHD_INDUCE_LIVELOCK) { in BCMFASTPATH()
1296 dhd_prot_d2h_sync_livelock(dhd, msg_seqnum, ring, total_tries, in BCMFASTPATH()
1314 BCMFASTPATH(dhd_prot_d2h_sync_xorcsum)(dhd_pub_t *dhd, msgbuf_ring_t *ring, in BCMFASTPATH()
1321 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
1365 if (dhd->dhd_induce_error != DHD_INDUCE_LIVELOCK) { in BCMFASTPATH()
1384 dhd_prot_d2h_sync_livelock(dhd, msg->epoch, ring, total_tries, in BCMFASTPATH()
1402 BCMFASTPATH(dhd_prot_d2h_sync_none)(dhd_pub_t *dhd, msgbuf_ring_t *ring, in BCMFASTPATH()
1409 if (dhd->dhd_induce_error == DHD_INDUCE_LIVELOCK) { in BCMFASTPATH()
1432 BCMFASTPATH(dhd_prot_d2h_sync_edl)(dhd_pub_t *dhd, msgbuf_ring_t *ring, in BCMFASTPATH()
1438 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
1501 if (dhd->dhd_induce_error != DHD_INDUCE_LIVELOCK) { in BCMFASTPATH()
1550 dhd_prot_d2h_sync_livelock(dhd, msg->epoch, ring, total_tries, in BCMFASTPATH()
1570 (dhd_prot_d2h_sync_edl_none)(dhd_pub_t *dhd, msgbuf_ring_t *ring, in BCMFASTPATH()
1577 if (dhd->dhd_induce_error == DHD_INDUCE_LIVELOCK) { in BCMFASTPATH()
1590 dhd_wakeup_ioctl_event(dhd_pub_t *dhd, dhd_ioctl_recieved_status_t reason) in dhd_wakeup_ioctl_event() argument
1594 dhd->prot->ioctl_received = reason; in dhd_wakeup_ioctl_event()
1597 dhd_os_ioctl_resp_wake(dhd); in dhd_wakeup_ioctl_event()
1605 dhd_prot_d2h_sync_init(dhd_pub_t *dhd) in dhd_prot_d2h_sync_init() argument
1607 dhd_prot_t *prot = dhd->prot; in dhd_prot_d2h_sync_init()
1620 if (dhd->d2h_sync_mode & PCIE_SHARED_D2H_SYNC_SEQNUM) { in dhd_prot_d2h_sync_init()
1626 } else if (dhd->d2h_sync_mode & PCIE_SHARED_D2H_SYNC_XORCSUM) { in dhd_prot_d2h_sync_init()
1645 dhd_prot_h2d_sync_init(dhd_pub_t *dhd) in dhd_prot_h2d_sync_init() argument
1647 dhd_prot_t *prot = dhd->prot; in dhd_prot_h2d_sync_init()
1677 dhd_dma_buf_audit(dhd_pub_t *dhd, dhd_dma_buf_t *dma_buf) in dhd_dma_buf_audit() argument
1704 dhd_dma_buf_alloc(dhd_pub_t *dhd, dhd_dma_buf_t *dma_buf, uint32 buf_len) in dhd_dma_buf_alloc() argument
1707 osl_t *osh = dhd->osh; in dhd_dma_buf_alloc()
1730 if (dhd_dma_buf_audit(dhd, dma_buf) != BCME_OK) { /* audit dma buf */ in dhd_dma_buf_alloc()
1731 dhd_dma_buf_free(dhd, dma_buf); in dhd_dma_buf_alloc()
1735 dhd_dma_buf_reset(dhd, dma_buf); /* zero out and cache flush */ in dhd_dma_buf_alloc()
1744 dhd_dma_buf_reset(dhd_pub_t *dhd, dhd_dma_buf_t *dma_buf) in dhd_dma_buf_reset() argument
1749 (void)dhd_dma_buf_audit(dhd, dma_buf); in dhd_dma_buf_reset()
1769 dhd_dma_buf_free(dhd_pub_t *dhd, dhd_dma_buf_t *dma_buf) in dhd_dma_buf_free() argument
1771 osl_t *osh = dhd->osh; in dhd_dma_buf_free()
1779 (void)dhd_dma_buf_audit(dhd, dma_buf); in dhd_dma_buf_free()
1793 dhd_dma_buf_init(dhd_pub_t *dhd, void *dhd_dma_buf, in dhd_dma_buf_init() argument
1806 (void)dhd_dma_buf_audit(dhd, dma_buf); in dhd_dma_buf_init()
1842 #define DHD_PKTID_LOG_INIT(dhd, hdl) dhd_pktid_logging_init((dhd), (hdl)) argument
1843 #define DHD_PKTID_LOG_FINI(dhd, hdl) dhd_pktid_logging_fini((dhd), (hdl)) argument
1844 #define DHD_PKTID_LOG(dhd, hdl, pa, pktid, len, pkttype) \ argument
1845 dhd_pktid_logging((dhd), (hdl), (pa), (pktid), (len), (pkttype))
1846 #define DHD_PKTID_LOG_DUMP(dhd) dhd_pktid_logging_dump((dhd)) argument
1849 dhd_pktid_logging_init(dhd_pub_t *dhd, uint32 num_items) in dhd_pktid_logging_init() argument
1855 log = (dhd_pktid_log_t *)MALLOCZ(dhd->osh, log_size); in dhd_pktid_logging_init()
1869 dhd_pktid_logging_fini(dhd_pub_t *dhd, dhd_pktid_log_handle_t *handle) in dhd_pktid_logging_fini() argument
1881 MFREE(dhd->osh, handle, log_size); in dhd_pktid_logging_fini()
1885 dhd_pktid_logging(dhd_pub_t *dhd, dhd_pktid_log_handle_t *handle, dmaaddr_t pa, in dhd_pktid_logging() argument
1907 dhd_pktid_logging_dump(dhd_pub_t *dhd) in dhd_pktid_logging_dump() argument
1909 dhd_prot_t *prot = dhd->prot; in dhd_pktid_logging_dump()
2002 static dhd_pktid_map_handle_t *dhd_pktid_map_init(dhd_pub_t *dhd, uint32 num_items);
2005 static void dhd_pktid_map_fini(dhd_pub_t *dhd, dhd_pktid_map_handle_t *map);
2007 #define DHD_NATIVE_TO_PKTID_INIT(dhd, items) dhd_pktid_map_init((dhd), (items)) argument
2008 #define DHD_NATIVE_TO_PKTID_RESET(dhd, map) dhd_pktid_map_reset((dhd), (map)) argument
2009 #define DHD_NATIVE_TO_PKTID_FINI(dhd, map) dhd_pktid_map_fini((dhd), (map)) argument
2018 static void free_ioctl_return_buffer(dhd_pub_t *dhd, dhd_dma_buf_t *retbuf);
2019 static int alloc_ioctl_return_buffer(dhd_pub_t *dhd, dhd_dma_buf_t *retbuf);
2026 static INLINE uint32 dhd_pktid_map_reserve(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle,
2028 static INLINE void dhd_pktid_map_save(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle,
2031 static uint32 dhd_pktid_map_alloc(dhd_pub_t *dhd, dhd_pktid_map_handle_t *map,
2036 static void *dhd_pktid_map_free(dhd_pub_t *dhd, dhd_pktid_map_handle_t *map,
2043 dhd_pktid_map_save_metadata(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle, void *mpkt,
2050 static void * dhd_pktid_map_retreive_metadata(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle,
2182 #define DHD_NATIVE_TO_PKTID_RESET_IOCTL(dhd, map) dhd_pktid_map_reset_ioctl((dhd), (map)) argument
2185 #define DHD_NATIVE_TO_PKTID_RSV(dhd, map, pkt, pkttype) \ argument
2186 dhd_pktid_map_reserve((dhd), (map), (pkt), (pkttype))
2188 #define DHD_NATIVE_TO_PKTID_SAVE(dhd, map, pkt, nkey, pa, len, dir, dmah, secdma, pkttype) \ argument
2189 dhd_pktid_map_save((dhd), (map), (void *)(pkt), (nkey), (pa), (uint32)(len), \
2193 #define DHD_NATIVE_TO_PKTID(dhd, map, pkt, pa, len, dir, dmah, secdma, pkttype) \ argument
2194 dhd_pktid_map_alloc((dhd), (map), (void *)(pkt), (pa), (uint32)(len), \
2199 #define DHD_PKTID_TO_NATIVE(dhd, map, pktid, pa, len, dmah, secdma, pkttype) \ argument
2200 dhd_pktid_map_free((dhd), (map), (uint32)(pktid), \
2205 #define DHD_PKTID_TO_NATIVE_RSV(dhd, map, pktid, pa, len, dmah, secdma, pkttype) \ argument
2206 dhd_pktid_map_free((dhd), (map), (uint32)(pktid), \
2211 #define DHD_PKTID_SAVE_METADATA(dhd, map, mpkt, mpkt_pa, mpkt_len, dmah, nkey) \ argument
2212 dhd_pktid_map_save_metadata(dhd, map, mpkt, mpkt_pa, mpkt_len, dmah, nkey)
2214 #define DHD_PKTID_RETREIVE_METADATA(dhd, map, mpkt_pa, mpkt_len, dmah, nkey) \ argument
2215 dhd_pktid_map_retreive_metadata(dhd, map, (dmaaddr_t *)&mpkt_pa, (uint32 *)&mpkt_len, \
2224 dhd_get_pktid_map_type(dhd_pub_t *dhd, dhd_pktid_map_t *pktid_map) in dhd_get_pktid_map_type() argument
2226 dhd_prot_t *prot = dhd->prot; in dhd_get_pktid_map_type()
2246 __dhd_pktid_audit(dhd_pub_t *dhd, dhd_pktid_map_t *pktid_map, uint32 pktid, in __dhd_pktid_audit() argument
2328 dhd->pktid_audit_failed = TRUE; in __dhd_pktid_audit()
2335 dhd_pktid_audit(dhd_pub_t *dhd, dhd_pktid_map_t *pktid_map, uint32 pktid, in dhd_pktid_audit() argument
2339 ret = __dhd_pktid_audit(dhd, pktid_map, pktid, test_for, errmsg); in dhd_pktid_audit()
2342 __FUNCTION__, pktid, dhd_get_pktid_map_type(dhd, pktid_map))); in dhd_pktid_audit()
2343 dhd_pktid_error_handler(dhd); in dhd_pktid_audit()
2345 DHD_PKTID_LOG_DUMP(dhd); in dhd_pktid_audit()
2406 dhd_pktid_map_init(dhd_pub_t *dhd, uint32 num_items) in dhd_pktid_map_init() argument
2414 osh = dhd->osh; in dhd_pktid_map_init()
2508 dhd_pktid_map_reset(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle) in dhd_pktid_map_reset() argument
2520 osh = dhd->osh; in dhd_pktid_map_reset()
2531 OSL_ATOMIC_DEC(dhd->osh, &dhd->prot->active_tx_count); in dhd_pktid_map_reset()
2535 DHD_PKTID_AUDIT(dhd, map, nkey, DHD_DUPLICATE_FREE); /* duplicate frees */ in dhd_pktid_map_reset()
2538 DHD_PKTID_LOG(dhd, dhd->prot->pktid_dma_unmap, in dhd_pktid_map_reset()
2544 dhd_prot_packet_free(dhd, (ulong*)locker->pkt, in dhd_pktid_map_reset()
2549 DHD_PKTID_AUDIT(dhd, map, nkey, DHD_TEST_IS_FREE); in dhd_pktid_map_reset()
2563 dhd_pktid_map_reset_ioctl(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle) in dhd_pktid_map_reset_ioctl() argument
2581 DHD_PKTID_AUDIT(dhd, map, nkey, DHD_DUPLICATE_FREE); /* duplicate frees */ in dhd_pktid_map_reset_ioctl()
2591 free_ioctl_return_buffer(dhd, &retbuf); in dhd_pktid_map_reset_ioctl()
2595 DHD_PKTID_AUDIT(dhd, map, nkey, DHD_TEST_IS_FREE); in dhd_pktid_map_reset_ioctl()
2611 dhd_pktid_map_fini(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle) in dhd_pktid_map_fini() argument
2621 dhd_pktid_map_reset(dhd, handle); in dhd_pktid_map_fini()
2627 DHD_PKTID_LOCK_DEINIT(dhd->osh, map->pktid_lock); in dhd_pktid_map_fini()
2631 bcm_mwbmap_fini(dhd->osh, map->pktid_audit); /* Destruct pktid_audit */ in dhd_pktid_map_fini()
2634 DHD_PKTID_AUDIT_LOCK_DEINIT(dhd->osh, map->pktid_audit_lock); in dhd_pktid_map_fini()
2638 MFREE(dhd->osh, map->keys, map_keys_sz); in dhd_pktid_map_fini()
2639 VMFREE(dhd->osh, handle, dhd_pktid_map_sz); in dhd_pktid_map_fini()
2644 dhd_pktid_map_fini_ioctl(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle) in dhd_pktid_map_fini_ioctl() argument
2654 dhd_pktid_map_reset_ioctl(dhd, handle); in dhd_pktid_map_fini_ioctl()
2660 DHD_PKTID_LOCK_DEINIT(dhd->osh, map->pktid_lock); in dhd_pktid_map_fini_ioctl()
2664 bcm_mwbmap_fini(dhd->osh, map->pktid_audit); /* Destruct pktid_audit */ in dhd_pktid_map_fini_ioctl()
2667 DHD_PKTID_AUDIT_LOCK_DEINIT(dhd->osh, map->pktid_audit_lock); in dhd_pktid_map_fini_ioctl()
2672 MFREE(dhd->osh, map->keys, map_keys_sz); in dhd_pktid_map_fini_ioctl()
2673 VMFREE(dhd->osh, handle, dhd_pktid_map_sz); in dhd_pktid_map_fini_ioctl()
2703 dhd_pktid_map_reserve(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle, in dhd_pktid_map_reserve() argument
2755 dhd_pktid_map_save_metadata(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle, void *mpkt, in dhd_pktid_map_save_metadata() argument
2775 if (dhd->memdump_enabled) { in dhd_pktid_map_save_metadata()
2777 dhd->memdump_type = DUMP_TYPE_PKTID_INVALID; in dhd_pktid_map_save_metadata()
2778 dhd_bus_mem_dump(dhd); in dhd_pktid_map_save_metadata()
2808 dhd_pktid_map_save(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle, void *pkt, in dhd_pktid_map_save() argument
2826 if (dhd->memdump_enabled) { in dhd_pktid_map_save()
2828 dhd->memdump_type = DUMP_TYPE_PKTID_INVALID; in dhd_pktid_map_save()
2829 dhd_bus_mem_dump(dhd); in dhd_pktid_map_save()
2852 DHD_PKTID_LOG(dhd, dhd->prot->pktid_dma_map, pa, nkey, len, pkttype); in dhd_pktid_map_save()
2862 BCMFASTPATH(dhd_pktid_map_alloc)(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle, void *pkt, in BCMFASTPATH()
2868 nkey = dhd_pktid_map_reserve(dhd, handle, pkt, pkttype); in BCMFASTPATH()
2870 dhd_pktid_map_save(dhd, handle, pkt, nkey, pa, in BCMFASTPATH()
2879 BCMFASTPATH(dhd_pktid_map_retreive_metadata)(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle, in BCMFASTPATH()
2902 if (dhd->memdump_enabled) { in BCMFASTPATH()
2904 dhd->memdump_type = DUMP_TYPE_PKTID_INVALID; in BCMFASTPATH()
2905 dhd_bus_mem_dump(dhd); in BCMFASTPATH()
2935 BCMFASTPATH(dhd_pktid_map_free)(dhd_pub_t *dhd, dhd_pktid_map_handle_t *handle, uint32 nkey, in BCMFASTPATH()
2957 if (dhd->memdump_enabled) { in BCMFASTPATH()
2959 dhd->memdump_type = DUMP_TYPE_PKTID_INVALID; in BCMFASTPATH()
2960 dhd_bus_mem_dump(dhd); in BCMFASTPATH()
2971 DHD_PKTID_AUDIT(dhd, map, nkey, DHD_DUPLICATE_FREE); /* Audit duplicate FREE */ in BCMFASTPATH()
2981 if (dhd->memdump_enabled) { in BCMFASTPATH()
2983 dhd->memdump_type = DUMP_TYPE_PKTID_INVALID; in BCMFASTPATH()
2984 dhd_bus_mem_dump(dhd); in BCMFASTPATH()
3011 if (dhd->memdump_enabled) { in BCMFASTPATH()
3013 dhd->memdump_type = DUMP_TYPE_PKTID_INVALID; in BCMFASTPATH()
3014 dhd_bus_mem_dump(dhd); in BCMFASTPATH()
3032 DHD_PKTID_AUDIT(dhd, map, nkey, DHD_TEST_IS_FREE); in BCMFASTPATH()
3035 DHD_PKTID_LOG(dhd, dhd->prot->pktid_dma_unmap, locker->pa, nkey, in BCMFASTPATH()
3085 dhd_pktid_map_init(dhd_pub_t *dhd, uint32 num_items) in dhd_pktid_map_init() argument
3087 osl_t *osh = dhd->osh; in dhd_pktid_map_init()
3142 dhd_pktid_map_reset(dhd_pub_t *dhd, pktlists_t *handle) in dhd_pktid_map_reset() argument
3144 osl_t *osh = dhd->osh; in dhd_pktid_map_reset()
3163 dhd_pktid_map_fini(dhd_pub_t *dhd, dhd_pktid_map_handle_t *map) in dhd_pktid_map_fini() argument
3165 osl_t *osh = dhd->osh; in dhd_pktid_map_fini()
3173 dhd_pktid_map_reset(dhd, handle); in dhd_pktid_map_fini()
3233 #define DHD_NATIVE_TO_PKTID_RSV(dhd, map, pkt, pkttype) DHD_PKTID32(pkt) argument
3235 #define DHD_NATIVE_TO_PKTID_SAVE(dhd, map, pkt, nkey, pa, len, dma_dir, dmah, secdma, pkttype) \ argument
3236 ({ BCM_REFERENCE(dhd); BCM_REFERENCE(nkey); BCM_REFERENCE(dma_dir); \
3241 #define DHD_NATIVE_TO_PKTID(dhd, map, pkt, pa, len, dma_dir, dmah, secdma, pkttype) \ argument
3242 ({ BCM_REFERENCE(dhd); BCM_REFERENCE(dma_dir); \
3247 #define DHD_PKTID_TO_NATIVE(dhd, map, pktid, pa, len, dmah, secdma, pkttype) \ argument
3248 ({ BCM_REFERENCE(dhd); BCM_REFERENCE(pkttype); \
3266 dhd_prot_allocate_bufs(dhd_pub_t *dhd, dhd_prot_t *prot) in dhd_prot_allocate_bufs() argument
3272 if (dhd_prot_ring_attach(dhd, &prot->h2dring_ctrl_subn, "h2dctrl", in dhd_prot_allocate_bufs()
3281 if (dhd_prot_ring_attach(dhd, &prot->h2dring_rxp_subn, "h2drxp", in dhd_prot_allocate_bufs()
3290 if (dhd_prot_ring_attach(dhd, &prot->d2hring_ctrl_cpln, "d2hctrl", in dhd_prot_allocate_bufs()
3299 if (dhd_prot_ring_attach(dhd, &prot->d2hring_tx_cpln, "d2htxcpl", in dhd_prot_allocate_bufs()
3309 if (dhd_prot_ring_attach(dhd, &prot->d2hring_rx_cpln, "d2hrxcpl", in dhd_prot_allocate_bufs()
3324 if (dhd_dma_buf_alloc(dhd, &prot->retbuf, IOCT_RETBUF_SIZE)) { in dhd_prot_allocate_bufs()
3329 if (dhd_dma_buf_alloc(dhd, &prot->ioctbuf, IOCT_RETBUF_SIZE)) { in dhd_prot_allocate_bufs()
3334 if (dhd_dma_buf_alloc(dhd, &prot->hostts_req_buf, CTRLSUB_HOSTTS_MEESAGE_SIZE)) { in dhd_prot_allocate_bufs()
3341 if (dhd_dma_buf_alloc(dhd, &prot->d2h_dma_scratch_buf, in dhd_prot_allocate_bufs()
3344 if (dhd_dma_buf_alloc(dhd, &prot->d2h_dma_scratch_buf, DMA_D2H_SCRATCH_BUF_LEN)) in dhd_prot_allocate_bufs()
3354 if (dhd_dma_buf_alloc(dhd, &prot->hmaptest.mem, HMAP_SANDBOX_BUFFER_LEN)) { in dhd_prot_allocate_bufs()
3373 if (dhd_dma_buf_alloc(dhd, &prot->host_bus_throughput_buf, DHD_BUS_TPUT_BUF_LEN)) { in dhd_prot_allocate_bufs()
3379 if (dhd_dma_buf_alloc(dhd, &prot->snapshot_upload_buf, SNAPSHOT_UPLOAD_BUF_SIZE)) { in dhd_prot_allocate_bufs()
3412 dhd_prot_attach(dhd_pub_t *dhd) in dhd_prot_attach() argument
3414 osl_t *osh = dhd->osh; in dhd_prot_attach()
3419 if (!(prot = (dhd_prot_t *)DHD_OS_PREALLOC(dhd, DHD_PREALLOC_PROT, in dhd_prot_attach()
3427 dhd->prot = prot; in dhd_prot_attach()
3430 dhd->dma_d2h_ring_upd_support = FALSE; in dhd_prot_attach()
3431 dhd->dma_h2d_ring_upd_support = FALSE; in dhd_prot_attach()
3432 dhd->dma_ring_upd_overwrite = FALSE; in dhd_prot_attach()
3434 dhd->idma_inited = 0; in dhd_prot_attach()
3435 dhd->ifrm_inited = 0; in dhd_prot_attach()
3436 dhd->dar_inited = 0; in dhd_prot_attach()
3438 if (dhd_prot_allocate_bufs(dhd, prot) != BCME_OK) { in dhd_prot_attach()
3446 prot->pktid_ctrl_map = DHD_NATIVE_TO_PKTID_INIT(dhd, MAX_PKTID_CTRL); in dhd_prot_attach()
3451 prot->pktid_rx_map = DHD_NATIVE_TO_PKTID_INIT(dhd, MAX_PKTID_RX); in dhd_prot_attach()
3455 prot->pktid_tx_map = DHD_NATIVE_TO_PKTID_INIT(dhd, MAX_PKTID_TX); in dhd_prot_attach()
3460 prot->pktid_map_handle_ioctl = DHD_NATIVE_TO_PKTID_INIT(dhd, in dhd_prot_attach()
3468 prot->pktid_dma_map = DHD_PKTID_LOG_INIT(dhd, MAX_PKTID_LOG); in dhd_prot_attach()
3474 prot->pktid_dma_unmap = DHD_PKTID_LOG_INIT(dhd, MAX_PKTID_LOG); in dhd_prot_attach()
3482 if (dhd->bus->sih->buscorerev < 71) { in dhd_prot_attach()
3496 if (dhd_dma_buf_alloc(dhd, &dhd->prot->fw_trap_buf, trap_buf_len)) { in dhd_prot_attach()
3507 dhd_prot_detach(dhd); in dhd_prot_attach()
3514 dhd_alloc_host_scbs(dhd_pub_t *dhd) in dhd_alloc_host_scbs() argument
3518 dhd_prot_t *prot = dhd->prot; in dhd_alloc_host_scbs()
3521 if (dhd->hscb_enable) { in dhd_alloc_host_scbs()
3523 dhd_bus_cmn_readshared(dhd->bus, &host_scb_size, HOST_SCB_ADDR, 0); in dhd_alloc_host_scbs()
3533 dhd_dma_buf_free(dhd, &prot->host_scb_buf); in dhd_alloc_host_scbs()
3538 ret = dhd_dma_buf_alloc(dhd, &prot->host_scb_buf, host_scb_size); in dhd_alloc_host_scbs()
3543 dhd_bus_cmn_writeshared(dhd->bus, &base_addr, sizeof(base_addr), in dhd_alloc_host_scbs()
3559 dhd_set_host_cap(dhd_pub_t *dhd) in dhd_set_host_cap() argument
3562 dhd_prot_t *prot = dhd->prot; in dhd_set_host_cap()
3567 if (dhd->bus->api.fw_rev >= PCIE_SHARED_VERSION_6) { in dhd_set_host_cap()
3568 if (dhd->h2d_phase_supported) { in dhd_set_host_cap()
3570 if (dhd->force_dongletrap_on_bad_h2d_phase) in dhd_set_host_cap()
3580 if (dhdpcie_bus_get_pcie_hostready_supported(dhd->bus)) { in dhd_set_host_cap()
3585 if (dhdpcie_bus_get_pcie_inband_dw_supported(dhd->bus)) { in dhd_set_host_cap()
3589 dhdpcie_bus_enab_pcie_dw(dhd->bus, DEVICE_WAKE_INB); in dhd_set_host_cap()
3590 if (!dhd->dma_h2d_ring_upd_support || !dhd->dma_d2h_ring_upd_support) { in dhd_set_host_cap()
3591 dhd_init_dongle_ds_lock(dhd->bus); in dhd_set_host_cap()
3592 dhdpcie_set_dongle_deepsleep(dhd->bus, FALSE); in dhd_set_host_cap()
3597 if (dhdpcie_bus_get_pcie_oob_dw_supported(dhd->bus)) { in dhd_set_host_cap()
3598 dhdpcie_bus_enab_pcie_dw(dhd->bus, DEVICE_WAKE_OOB); in dhd_set_host_cap()
3604 dhdpcie_bus_enab_pcie_dw(dhd->bus, DEVICE_WAKE_NONE); in dhd_set_host_cap()
3611 if (dhd->pcie_txs_metadata_enable != 0) in dhd_set_host_cap()
3616 if (dhd->bt_logging) { in dhd_set_host_cap()
3617 if (dhd->bt_logging_enabled) { in dhd_set_host_cap()
3631 if (dhd->fast_delete_ring_support) { in dhd_set_host_cap()
3635 if (dhdpcie_bus_get_pcie_idma_supported(dhd->bus)) { in dhd_set_host_cap()
3638 dhd->idma_inited = TRUE; in dhd_set_host_cap()
3641 dhd->idma_inited = FALSE; in dhd_set_host_cap()
3644 if (dhdpcie_bus_get_pcie_ifrm_supported(dhd->bus)) { in dhd_set_host_cap()
3647 dhd->ifrm_inited = TRUE; in dhd_set_host_cap()
3648 dhd->dma_h2d_ring_upd_support = FALSE; in dhd_set_host_cap()
3649 dhd_prot_dma_indx_free(dhd); in dhd_set_host_cap()
3652 dhd->ifrm_inited = FALSE; in dhd_set_host_cap()
3655 if (dhdpcie_bus_get_pcie_dar_supported(dhd->bus)) { in dhd_set_host_cap()
3658 dhd->dar_inited = TRUE; in dhd_set_host_cap()
3661 dhd->dar_inited = FALSE; in dhd_set_host_cap()
3671 if (dhd->snapshot_upload) { in dhd_set_host_cap()
3677 if (dhd->hscb_enable) { in dhd_set_host_cap()
3682 if (dhd->dongle_edl_support) { in dhd_set_host_cap()
3691 if (dhd_bus_is_minidump_enabled(dhd)) { in dhd_set_host_cap()
3697 if (dhdpcie_bus_get_hp2p_supported(dhd->bus)) { in dhd_set_host_cap()
3707 if (dhd->db0ts_capable) { in dhd_set_host_cap()
3714 if (dhd->extdtxs_in_txcpl) { in dhd_set_host_cap()
3727 dhd_bus_cmn_writeshared(dhd->bus, &data, sizeof(uint32), HOST_API_VERSION, 0); in dhd_set_host_cap()
3728 dhd_bus_cmn_writeshared(dhd->bus, &prot->fw_trap_buf.pa, in dhd_set_host_cap()
3731 if (dhd_bus_is_minidump_enabled(dhd)) { in dhd_set_host_cap()
3734 dhd_bus_cmn_writeshared(dhd->bus, &host_trap_addr_len, in dhd_set_host_cap()
3741 dhd_timesync_notify_ipc_rev(dhd->ts, prot->active_ipc_version); in dhd_set_host_cap()
3746 void dhd_agg_inflight_stats_dump(dhd_pub_t *dhd, struct bcmstrbuf *strbuf) in dhd_agg_inflight_stats_dump() argument
3748 uint64 *inflight_histo = dhd->prot->agg_h2d_db_info.inflight_histo; in dhd_agg_inflight_stats_dump()
3760 void dhd_agg_inflights_stats_update(dhd_pub_t *dhd, uint32 inflight) in dhd_agg_inflights_stats_update() argument
3762 uint64 *bin = dhd->prot->agg_h2d_db_info.inflight_histo; in dhd_agg_inflights_stats_update()
3800 dhd_pub_t *dhd; in dhd_msgbuf_agg_h2d_db_timer_fn() local
3809 dhd = agg_db_info->dhd; in dhd_msgbuf_agg_h2d_db_timer_fn()
3810 prot = dhd->prot; in dhd_msgbuf_agg_h2d_db_timer_fn()
3813 if (IDMA_ACTIVE(dhd)) { in dhd_msgbuf_agg_h2d_db_timer_fn()
3815 if (dhd->bus->sih) { in dhd_msgbuf_agg_h2d_db_timer_fn()
3816 corerev = dhd->bus->sih->buscorerev; in dhd_msgbuf_agg_h2d_db_timer_fn()
3821 prot->mb_2_ring_fn(dhd->bus, db_index, TRUE); in dhd_msgbuf_agg_h2d_db_timer_fn()
3823 prot->mb_ring_fn(dhd->bus, DHD_AGGR_H2D_DB_MAGIC); in dhd_msgbuf_agg_h2d_db_timer_fn()
3842 dhd_msgbuf_agg_h2d_db_timer_init(dhd_pub_t *dhd) in dhd_msgbuf_agg_h2d_db_timer_init() argument
3844 dhd_prot_t *prot = dhd->prot; in dhd_msgbuf_agg_h2d_db_timer_init()
3847 agg_db_info->dhd = dhd; in dhd_msgbuf_agg_h2d_db_timer_init()
3854 agg_db_info->inflight_histo = (uint64 *)MALLOCZ(dhd->osh, DHD_INFLIGHT_HISTO_SIZE); in dhd_msgbuf_agg_h2d_db_timer_init()
3858 dhd_msgbuf_agg_h2d_db_timer_reset(dhd_pub_t *dhd) in dhd_msgbuf_agg_h2d_db_timer_reset() argument
3860 dhd_prot_t *prot = dhd->prot; in dhd_msgbuf_agg_h2d_db_timer_reset()
3864 MFREE(dhd->osh, agg_db_info->inflight_histo, DHD_INFLIGHT_HISTO_SIZE); in dhd_msgbuf_agg_h2d_db_timer_reset()
3872 dhd_msgbuf_agg_h2d_db_timer_cancel(dhd_pub_t *dhd) in dhd_msgbuf_agg_h2d_db_timer_cancel() argument
3874 dhd_prot_t *prot = dhd->prot; in dhd_msgbuf_agg_h2d_db_timer_cancel()
3881 dhd_prot_clearcounts(dhd_pub_t *dhd) in dhd_prot_clearcounts() argument
3883 dhd_prot_t *prot = dhd->prot; in dhd_prot_clearcounts()
3904 dhd_prot_init(dhd_pub_t *dhd) in dhd_prot_init() argument
3907 dhd_prot_t *prot = dhd->prot; in dhd_prot_init()
3930 dhd_bus_cmn_readshared(dhd->bus, &prot->max_rxbufpost, MAX_HOST_RXBUFS, 0); in dhd_prot_init()
3939 max_eventbufpost = (uint16)dhdpcie_get_max_eventbufpost(dhd->bus); in dhd_prot_init()
3950 OSL_ATOMIC_INIT(dhd->osh, &prot->active_tx_count); in dhd_prot_init()
3986 prot->device_ipc_version = dhd->bus->api.fw_rev; in dhd_prot_init()
3991 dhd_set_host_cap(dhd); in dhd_prot_init()
3994 if ((ret = dhd_alloc_host_scbs(dhd))) { in dhd_prot_init()
4001 prot->mb_ring_fn = dhd_bus_get_mbintr_fn(dhd->bus); in dhd_prot_init()
4002 prot->mb_2_ring_fn = dhd_bus_get_mbintr_2_fn(dhd->bus); in dhd_prot_init()
4006 dhd_msgbuf_agg_h2d_db_timer_init(dhd); in dhd_prot_init()
4009 dhd->bus->_dar_war = (dhd->bus->sih->buscorerev < 64) ? TRUE : FALSE; in dhd_prot_init()
4014 if (dhd->dma_d2h_ring_upd_support) { in dhd_prot_init()
4016 dhd_bus_cmn_writeshared(dhd->bus, &base_addr, sizeof(base_addr), in dhd_prot_init()
4019 dhd_bus_cmn_writeshared(dhd->bus, &base_addr, sizeof(base_addr), in dhd_prot_init()
4023 if (dhd->dma_h2d_ring_upd_support || IDMA_ENAB(dhd)) { in dhd_prot_init()
4025 dhd_bus_cmn_writeshared(dhd->bus, &base_addr, sizeof(base_addr), in dhd_prot_init()
4028 dhd_bus_cmn_writeshared(dhd->bus, &base_addr, sizeof(base_addr), in dhd_prot_init()
4032 dhd_prot_ring_init(dhd, &prot->h2dring_ctrl_subn); in dhd_prot_init()
4033 dhd_prot_ring_init(dhd, &prot->h2dring_rxp_subn); in dhd_prot_init()
4034 dhd_prot_ring_init(dhd, &prot->d2hring_ctrl_cpln); in dhd_prot_init()
4043 dhd_prot_ring_init(dhd, &prot->d2hring_tx_cpln); in dhd_prot_init()
4044 dhd_prot_ring_init(dhd, &prot->d2hring_rx_cpln); in dhd_prot_init()
4046 dhd_prot_d2h_sync_init(dhd); in dhd_prot_init()
4048 dhd_prot_h2d_sync_init(dhd); in dhd_prot_init()
4052 if (INBAND_DW_ENAB(dhd->bus)) { in dhd_prot_init()
4053 dhdpcie_bus_set_pcie_inband_dw_state(dhd->bus, in dhd_prot_init()
4060 dhd_bus_cmn_writeshared(dhd->bus, &base_addr, sizeof(base_addr), in dhd_prot_init()
4062 dhd_bus_cmn_writeshared(dhd->bus, &prot->d2h_dma_scratch_buf.len, in dhd_prot_init()
4068 if (dhd->hostrdy_after_init) in dhd_prot_init()
4069 dhd_bus_hostready(dhd->bus); in dhd_prot_init()
4079 if (dhd_prot_flowrings_pool_attach(dhd) != BCME_OK) { in dhd_prot_init()
4083 dhd->ring_attached = TRUE; in dhd_prot_init()
4086 if (IFRM_ENAB(dhd)) { in dhd_prot_init()
4088 dhd_bus_cmn_writeshared(dhd->bus, &base_addr, sizeof(base_addr), in dhd_prot_init()
4100 if (IDMA_ACTIVE(dhd)) { in dhd_prot_init()
4103 uint buscorerev = dhd->bus->sih->buscorerev; in dhd_prot_init()
4104 idmacontrol = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_prot_init()
4112 idmacontrol = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_prot_init()
4130 dhd_msgbuf_ring_config_d2h_soft_doorbell(dhd); in dhd_prot_init()
4132 dhd_msgbuf_rxbuf_post_ioctlresp_bufs(dhd); in dhd_prot_init()
4133 dhd_msgbuf_rxbuf_post_event_bufs(dhd); in dhd_prot_init()
4155 if (dhd->bus->api.fw_rev >= PCIE_SHARED_VERSION_6 && !dhd->dongle_edl_support) in dhd_prot_init()
4157 if (dhd->bus->api.fw_rev >= PCIE_SHARED_VERSION_6) in dhd_prot_init()
4160 if ((ret = dhd_prot_init_info_rings(dhd)) != BCME_OK) { in dhd_prot_init()
4171 if (dhd->dongle_edl_support) { in dhd_prot_init()
4172 if ((ret = dhd_prot_init_edl_rings(dhd)) != BCME_OK) { in dhd_prot_init()
4181 if (dhd->bus->api.fw_rev >= PCIE_SHARED_VERSION_7 && dhd->bt_logging) { in dhd_prot_init()
4182 if ((ret = dhd_prot_init_btlog_rings(dhd)) != BCME_OK) { in dhd_prot_init()
4194 if (dhd->bus->api.fw_rev >= PCIE_SHARED_VERSION_7 && dhd->hp2p_capable) { in dhd_prot_init()
4195 if ((ret = dhd_prot_init_hp2p_rings(dhd)) != BCME_OK) { in dhd_prot_init()
4207 dhd->lb_rxp_stop_thr = (D2HRING_RXCMPLT_MAX_ITEM * LB_RXP_STOP_THR); in dhd_prot_init()
4208 dhd->lb_rxp_strt_thr = (D2HRING_RXCMPLT_MAX_ITEM * LB_RXP_STRT_THR); in dhd_prot_init()
4209 atomic_set(&dhd->lb_rxp_flow_ctrl, FALSE); in dhd_prot_init()
4218 void dhd_prot_detach(dhd_pub_t *dhd) in dhd_prot_detach() argument
4220 dhd_prot_t *prot = dhd->prot; in dhd_prot_detach()
4227 dhd_prot_reset(dhd); in dhd_prot_detach()
4231 dhd_dma_buf_free(dhd, &prot->d2h_dma_scratch_buf); in dhd_prot_detach()
4233 dhd_dma_buf_free(dhd, &prot->hmaptest.mem); in dhd_prot_detach()
4235 dhd_dma_buf_free(dhd, &prot->retbuf); in dhd_prot_detach()
4236 dhd_dma_buf_free(dhd, &prot->ioctbuf); in dhd_prot_detach()
4237 dhd_dma_buf_free(dhd, &prot->host_bus_throughput_buf); in dhd_prot_detach()
4238 dhd_dma_buf_free(dhd, &prot->hostts_req_buf); in dhd_prot_detach()
4239 dhd_dma_buf_free(dhd, &prot->fw_trap_buf); in dhd_prot_detach()
4240 dhd_dma_buf_free(dhd, &prot->host_scb_buf); in dhd_prot_detach()
4242 dhd_dma_buf_free(dhd, &prot->snapshot_upload_buf); in dhd_prot_detach()
4246 dhd_dma_buf_free(dhd, &prot->h2d_dma_indx_wr_buf); in dhd_prot_detach()
4247 dhd_dma_buf_free(dhd, &prot->h2d_dma_indx_rd_buf); in dhd_prot_detach()
4248 dhd_dma_buf_free(dhd, &prot->d2h_dma_indx_wr_buf); in dhd_prot_detach()
4249 dhd_dma_buf_free(dhd, &prot->d2h_dma_indx_rd_buf); in dhd_prot_detach()
4251 dhd_dma_buf_free(dhd, &prot->h2d_ifrm_indx_wr_buf); in dhd_prot_detach()
4254 dhd_prot_ring_detach(dhd, &prot->h2dring_ctrl_subn); in dhd_prot_detach()
4255 dhd_prot_ring_detach(dhd, &prot->h2dring_rxp_subn); in dhd_prot_detach()
4256 dhd_prot_ring_detach(dhd, &prot->d2hring_ctrl_cpln); in dhd_prot_detach()
4257 dhd_prot_ring_detach(dhd, &prot->d2hring_tx_cpln); in dhd_prot_detach()
4258 dhd_prot_ring_detach(dhd, &prot->d2hring_rx_cpln); in dhd_prot_detach()
4261 dhd_prot_flowrings_pool_detach(dhd); in dhd_prot_detach()
4264 dhd_prot_detach_info_rings(dhd); in dhd_prot_detach()
4268 dhd_prot_detach_btlog_rings(dhd); in dhd_prot_detach()
4272 dhd_prot_detach_edl_rings(dhd); in dhd_prot_detach()
4276 dhd_prot_detach_hp2p_rings(dhd); in dhd_prot_detach()
4288 DHD_NATIVE_TO_PKTID_FINI(dhd, prot->pktid_ctrl_map); in dhd_prot_detach()
4289 DHD_NATIVE_TO_PKTID_FINI(dhd, prot->pktid_rx_map); in dhd_prot_detach()
4290 DHD_NATIVE_TO_PKTID_FINI(dhd, prot->pktid_tx_map); in dhd_prot_detach()
4292 DHD_NATIVE_TO_PKTID_FINI_IOCTL(dhd, prot->pktid_map_handle_ioctl); in dhd_prot_detach()
4295 DHD_PKTID_LOG_FINI(dhd, prot->pktid_dma_map); in dhd_prot_detach()
4296 DHD_PKTID_LOG_FINI(dhd, prot->pktid_dma_unmap); in dhd_prot_detach()
4300 MFREE(dhd->osh, prot->h2d_dma_indx_rd_copy_buf, in dhd_prot_detach()
4304 MFREE(dhd->osh, prot->d2h_dma_indx_wr_copy_buf, in dhd_prot_detach()
4308 DHD_OS_PREFREE(dhd, dhd->prot, sizeof(dhd_prot_t)); in dhd_prot_detach()
4310 dhd->prot = NULL; in dhd_prot_detach()
4323 dhd_prot_reset(dhd_pub_t *dhd) in dhd_prot_reset() argument
4325 struct dhd_prot *prot = dhd->prot; in dhd_prot_reset()
4333 dhd->ring_attached = FALSE; in dhd_prot_reset()
4335 dhd_prot_flowrings_pool_reset(dhd); in dhd_prot_reset()
4338 dhd_prot_ring_reset(dhd, &prot->h2dring_ctrl_subn); in dhd_prot_reset()
4339 dhd_prot_ring_reset(dhd, &prot->h2dring_rxp_subn); in dhd_prot_reset()
4340 dhd_prot_ring_reset(dhd, &prot->d2hring_ctrl_cpln); in dhd_prot_reset()
4341 dhd_prot_ring_reset(dhd, &prot->d2hring_tx_cpln); in dhd_prot_reset()
4342 dhd_prot_ring_reset(dhd, &prot->d2hring_rx_cpln); in dhd_prot_reset()
4346 dhd_prot_ring_reset(dhd, prot->h2dring_info_subn); in dhd_prot_reset()
4350 dhd_prot_ring_reset(dhd, prot->d2hring_info_cpln); in dhd_prot_reset()
4355 dhd_prot_ring_reset(dhd, prot->d2hring_edl); in dhd_prot_reset()
4360 dhd_dma_buf_reset(dhd, &prot->d2h_dma_scratch_buf); in dhd_prot_reset()
4362 dhd_dma_buf_reset(dhd, &prot->hmaptest.mem); in dhd_prot_reset()
4364 dhd_dma_buf_reset(dhd, &prot->retbuf); in dhd_prot_reset()
4365 dhd_dma_buf_reset(dhd, &prot->ioctbuf); in dhd_prot_reset()
4366 dhd_dma_buf_reset(dhd, &prot->host_bus_throughput_buf); in dhd_prot_reset()
4367 dhd_dma_buf_reset(dhd, &prot->hostts_req_buf); in dhd_prot_reset()
4368 dhd_dma_buf_reset(dhd, &prot->fw_trap_buf); in dhd_prot_reset()
4369 dhd_dma_buf_reset(dhd, &prot->host_scb_buf); in dhd_prot_reset()
4371 dhd_dma_buf_reset(dhd, &prot->snapshot_upload_buf); in dhd_prot_reset()
4374 dhd_dma_buf_reset(dhd, &prot->h2d_ifrm_indx_wr_buf); in dhd_prot_reset()
4377 dhd_dma_buf_reset(dhd, &prot->h2d_dma_indx_rd_buf); in dhd_prot_reset()
4378 dhd_dma_buf_reset(dhd, &prot->h2d_dma_indx_wr_buf); in dhd_prot_reset()
4379 dhd_dma_buf_reset(dhd, &prot->d2h_dma_indx_rd_buf); in dhd_prot_reset()
4380 dhd_dma_buf_reset(dhd, &prot->d2h_dma_indx_wr_buf); in dhd_prot_reset()
4400 OSL_ATOMIC_INIT(dhd->osh, &prot->active_tx_count); in dhd_prot_reset()
4413 if (dhd->flow_rings_inited) { in dhd_prot_reset()
4414 dhd_flow_rings_deinit(dhd); in dhd_prot_reset()
4420 dhd_prot_ring_reset(dhd, prot->h2dring_btlog_subn); in dhd_prot_reset()
4424 dhd_prot_ring_reset(dhd, prot->d2hring_btlog_cpln); in dhd_prot_reset()
4429 dhd_prot_ring_reset(dhd, prot->d2hring_hp2p_txcpl); in dhd_prot_reset()
4432 dhd_prot_ring_reset(dhd, prot->d2hring_hp2p_rxcpl); in dhd_prot_reset()
4437 DHD_NATIVE_TO_PKTID_RESET(dhd, prot->pktid_ctrl_map); in dhd_prot_reset()
4438 DHD_NATIVE_TO_PKTID_RESET(dhd, prot->pktid_rx_map); in dhd_prot_reset()
4439 DHD_NATIVE_TO_PKTID_RESET(dhd, prot->pktid_tx_map); in dhd_prot_reset()
4441 DHD_NATIVE_TO_PKTID_RESET_IOCTL(dhd, prot->pktid_map_handle_ioctl); in dhd_prot_reset()
4444 dhd->dma_stats.txdata = dhd->dma_stats.txdata_sz = 0; in dhd_prot_reset()
4445 dhd->dma_stats.rxdata = dhd->dma_stats.rxdata_sz = 0; in dhd_prot_reset()
4447 dhd->dma_stats.ioctl_rx = dhd->dma_stats.ioctl_rx_sz = 0; in dhd_prot_reset()
4449 dhd->dma_stats.event_rx = dhd->dma_stats.event_rx_sz = 0; in dhd_prot_reset()
4450 dhd->dma_stats.info_rx = dhd->dma_stats.info_rx_sz = 0; in dhd_prot_reset()
4451 dhd->dma_stats.tsbuf_rx = dhd->dma_stats.tsbuf_rx_sz = 0; in dhd_prot_reset()
4455 dhd_msgbuf_agg_h2d_db_timer_reset(dhd); in dhd_prot_reset()
4487 dhd_prot_rx_dataoffset(dhd_pub_t *dhd, uint32 rx_offset) in dhd_prot_rx_dataoffset() argument
4489 dhd_prot_t *prot = dhd->prot; in dhd_prot_rx_dataoffset()
4494 dhd_check_create_info_rings(dhd_pub_t *dhd) in dhd_check_create_info_rings() argument
4496 dhd_prot_t *prot = dhd->prot; in dhd_check_create_info_rings()
4501 if (dhd->submit_count_WAR) { in dhd_check_create_info_rings()
4502 ringid = dhd->bus->max_tx_flowrings + BCMPCIE_COMMON_MSGRINGS; in dhd_check_create_info_rings()
4509 ringid = dhd->bus->max_tx_flowrings + in dhd_check_create_info_rings()
4510 (dhd->bus->max_submission_rings - dhd->bus->max_tx_flowrings) + in dhd_check_create_info_rings()
4533 ret = dhd_prot_ring_attach(dhd, prot->h2dring_info_subn, "h2dinfo", in dhd_check_create_info_rings()
4558 ret = dhd_prot_ring_attach(dhd, prot->d2hring_info_cpln, "d2hinfo", in dhd_check_create_info_rings()
4564 dhd_prot_ring_detach(dhd, prot->h2dring_info_subn); in dhd_check_create_info_rings()
4580 dhd_prot_init_info_rings(dhd_pub_t *dhd) in dhd_prot_init_info_rings() argument
4582 dhd_prot_t *prot = dhd->prot; in dhd_prot_init_info_rings()
4585 if ((ret = dhd_check_create_info_rings(dhd)) != BCME_OK) { in dhd_prot_init_info_rings()
4597 ret = dhd_send_d2h_ringcreate(dhd, prot->d2hring_info_cpln, in dhd_prot_init_info_rings()
4611 ret = dhd_send_h2d_ringcreate(dhd, prot->h2dring_info_subn, in dhd_prot_init_info_rings()
4621 dhd_prot_detach_info_rings(dhd_pub_t *dhd) in dhd_prot_detach_info_rings() argument
4623 if (dhd->prot->h2dring_info_subn) { in dhd_prot_detach_info_rings()
4624 dhd_prot_ring_detach(dhd, dhd->prot->h2dring_info_subn); in dhd_prot_detach_info_rings()
4625 MFREE(dhd->prot->osh, dhd->prot->h2dring_info_subn, sizeof(msgbuf_ring_t)); in dhd_prot_detach_info_rings()
4627 if (dhd->prot->d2hring_info_cpln) { in dhd_prot_detach_info_rings()
4628 dhd_prot_ring_detach(dhd, dhd->prot->d2hring_info_cpln); in dhd_prot_detach_info_rings()
4629 MFREE(dhd->prot->osh, dhd->prot->d2hring_info_cpln, sizeof(msgbuf_ring_t)); in dhd_prot_detach_info_rings()
4635 dhd_check_create_hp2p_rings(dhd_pub_t *dhd) in dhd_check_create_hp2p_rings() argument
4637 dhd_prot_t *prot = dhd->prot; in dhd_check_create_hp2p_rings()
4642 ringid = dhd->bus->max_submission_rings + dhd->bus->max_completion_rings - 2; in dhd_check_create_hp2p_rings()
4654 ret = dhd_prot_ring_attach(dhd, prot->d2hring_hp2p_txcpl, "d2hhp2p_txcpl", in dhd_check_create_hp2p_rings()
4655 dhd_bus_get_hp2p_ring_max_size(dhd->bus, TRUE), D2HRING_TXCMPLT_ITEMSIZE, in dhd_check_create_hp2p_rings()
4681 ret = dhd_prot_ring_attach(dhd, prot->d2hring_hp2p_rxcpl, "d2hhp2p_rxcpl", in dhd_check_create_hp2p_rings()
4682 dhd_bus_get_hp2p_ring_max_size(dhd->bus, FALSE), D2HRING_RXCMPLT_ITEMSIZE, in dhd_check_create_hp2p_rings()
4712 dhd_prot_init_hp2p_rings(dhd_pub_t *dhd) in dhd_prot_init_hp2p_rings() argument
4714 dhd_prot_t *prot = dhd->prot; in dhd_prot_init_hp2p_rings()
4717 dhd->hp2p_ring_more = TRUE; in dhd_prot_init_hp2p_rings()
4719 dhd->hp2p_mf_enable = FALSE; in dhd_prot_init_hp2p_rings()
4721 if ((ret = dhd_check_create_hp2p_rings(dhd)) != BCME_OK) { in dhd_prot_init_hp2p_rings()
4734 ret = dhd_send_d2h_ringcreate(dhd, prot->d2hring_hp2p_txcpl, in dhd_prot_init_hp2p_rings()
4749 ret = dhd_send_d2h_ringcreate(dhd, prot->d2hring_hp2p_rxcpl, in dhd_prot_init_hp2p_rings()
4764 dhd_prot_detach_hp2p_rings(dhd_pub_t *dhd) in dhd_prot_detach_hp2p_rings() argument
4766 if (dhd->prot->d2hring_hp2p_txcpl) { in dhd_prot_detach_hp2p_rings()
4767 dhd_prot_ring_detach(dhd, dhd->prot->d2hring_hp2p_txcpl); in dhd_prot_detach_hp2p_rings()
4768 MFREE(dhd->prot->osh, dhd->prot->d2hring_hp2p_txcpl, sizeof(msgbuf_ring_t)); in dhd_prot_detach_hp2p_rings()
4769 dhd->prot->d2hring_hp2p_txcpl = NULL; in dhd_prot_detach_hp2p_rings()
4771 if (dhd->prot->d2hring_hp2p_rxcpl) { in dhd_prot_detach_hp2p_rings()
4772 dhd_prot_ring_detach(dhd, dhd->prot->d2hring_hp2p_rxcpl); in dhd_prot_detach_hp2p_rings()
4773 MFREE(dhd->prot->osh, dhd->prot->d2hring_hp2p_rxcpl, sizeof(msgbuf_ring_t)); in dhd_prot_detach_hp2p_rings()
4774 dhd->prot->d2hring_hp2p_rxcpl = NULL; in dhd_prot_detach_hp2p_rings()
4781 dhd_check_create_btlog_rings(dhd_pub_t *dhd) in dhd_check_create_btlog_rings() argument
4783 dhd_prot_t *prot = dhd->prot; in dhd_check_create_btlog_rings()
4787 if (dhd->submit_count_WAR) { in dhd_check_create_btlog_rings()
4788 ringid = dhd->bus->max_tx_flowrings + BCMPCIE_COMMON_MSGRINGS + 2; in dhd_check_create_btlog_rings()
4791 ringid = dhd->bus->max_tx_flowrings + in dhd_check_create_btlog_rings()
4792 (dhd->bus->max_submission_rings - dhd->bus->max_tx_flowrings) + in dhd_check_create_btlog_rings()
4815 ret = dhd_prot_ring_attach(dhd, prot->h2dring_btlog_subn, "h2dbtlog", in dhd_check_create_btlog_rings()
4834 if (dhd->submit_count_WAR) { in dhd_check_create_btlog_rings()
4842 ret = dhd_prot_ring_attach(dhd, prot->d2hring_btlog_cpln, "d2hbtlog", in dhd_check_create_btlog_rings()
4848 dhd_prot_ring_detach(dhd, prot->h2dring_btlog_subn); in dhd_check_create_btlog_rings()
4864 dhd_prot_init_btlog_rings(dhd_pub_t *dhd) in dhd_prot_init_btlog_rings() argument
4866 dhd_prot_t *prot = dhd->prot; in dhd_prot_init_btlog_rings()
4869 if ((ret = dhd_check_create_btlog_rings(dhd)) != BCME_OK) { in dhd_prot_init_btlog_rings()
4881 ret = dhd_send_d2h_ringcreate(dhd, prot->d2hring_btlog_cpln, in dhd_prot_init_btlog_rings()
4895 ret = dhd_send_h2d_ringcreate(dhd, prot->h2dring_btlog_subn, in dhd_prot_init_btlog_rings()
4905 dhd_prot_detach_btlog_rings(dhd_pub_t *dhd) in dhd_prot_detach_btlog_rings() argument
4907 if (dhd->prot->h2dring_btlog_subn) { in dhd_prot_detach_btlog_rings()
4908 dhd_prot_ring_detach(dhd, dhd->prot->h2dring_btlog_subn); in dhd_prot_detach_btlog_rings()
4909 MFREE(dhd->prot->osh, dhd->prot->h2dring_btlog_subn, sizeof(msgbuf_ring_t)); in dhd_prot_detach_btlog_rings()
4911 if (dhd->prot->d2hring_btlog_cpln) { in dhd_prot_detach_btlog_rings()
4912 dhd_prot_ring_detach(dhd, dhd->prot->d2hring_btlog_cpln); in dhd_prot_detach_btlog_rings()
4913 MFREE(dhd->prot->osh, dhd->prot->d2hring_btlog_cpln, sizeof(msgbuf_ring_t)); in dhd_prot_detach_btlog_rings()
4920 dhd_check_create_edl_rings(dhd_pub_t *dhd) in dhd_check_create_edl_rings() argument
4922 dhd_prot_t *prot = dhd->prot; in dhd_check_create_edl_rings()
4927 if (dhd->submit_count_WAR) { in dhd_check_create_edl_rings()
4928 ringid = dhd->bus->max_tx_flowrings + BCMPCIE_COMMON_MSGRINGS; in dhd_check_create_edl_rings()
4935 ringid = dhd->bus->max_tx_flowrings + in dhd_check_create_edl_rings()
4936 (dhd->bus->max_submission_rings - dhd->bus->max_tx_flowrings) + in dhd_check_create_edl_rings()
4956 ret = dhd_prot_ring_attach(dhd, prot->d2hring_edl, "d2hring_edl", in dhd_check_create_edl_rings()
4975 dhd_prot_init_edl_rings(dhd_pub_t *dhd) in dhd_prot_init_edl_rings() argument
4977 dhd_prot_t *prot = dhd->prot; in dhd_prot_init_edl_rings()
4980 if ((ret = dhd_check_create_edl_rings(dhd)) != BCME_OK) { in dhd_prot_init_edl_rings()
4992 ret = dhd_send_d2h_ringcreate(dhd, prot->d2hring_edl, in dhd_prot_init_edl_rings()
5004 dhd_prot_detach_edl_rings(dhd_pub_t *dhd) in dhd_prot_detach_edl_rings() argument
5006 if (dhd->prot->d2hring_edl) { in dhd_prot_detach_edl_rings()
5007 dhd_prot_ring_detach(dhd, dhd->prot->d2hring_edl); in dhd_prot_detach_edl_rings()
5008 MFREE(dhd->prot->osh, dhd->prot->d2hring_edl, sizeof(msgbuf_ring_t)); in dhd_prot_detach_edl_rings()
5009 dhd->prot->d2hring_edl = NULL; in dhd_prot_detach_edl_rings()
5018 int dhd_sync_with_dongle(dhd_pub_t *dhd) in dhd_sync_with_dongle() argument
5024 dhd_prot_t *prot = dhd->prot; in dhd_sync_with_dongle()
5031 ret = dhd_msgbuf_rxbuf_post_ts_bufs(dhd); in dhd_sync_with_dongle()
5042 ret = dhd_wl_ioctl_cmd(dhd, WLC_GET_VAR, buf, sizeof(buf), FALSE, 0); in dhd_sync_with_dongle()
5046 dhd->wlc_ver_major = ((wl_wlc_version_t*)buf)->wlc_ver_major; in dhd_sync_with_dongle()
5047 dhd->wlc_ver_minor = ((wl_wlc_version_t*)buf)->wlc_ver_minor; in dhd_sync_with_dongle()
5050 DHD_ERROR(("wlc_ver_major %d, wlc_ver_minor %d\n", dhd->wlc_ver_major, dhd->wlc_ver_minor)); in dhd_sync_with_dongle()
5055 ret = dhd_wl_ioctl_cmd(dhd, WLC_GET_VAR, buf, sizeof(buf), FALSE, 0); in dhd_sync_with_dongle()
5060 memcpy(dhd->mac.octet, buf, ETHER_ADDR_LEN); in dhd_sync_with_dongle()
5068 dhd_get_memdump_info(dhd); in dhd_sync_with_dongle()
5071 dhd_get_assert_info(dhd); in dhd_sync_with_dongle()
5076 ret = dhd_wl_ioctl_cmd(dhd, WLC_GET_REVINFO, &revinfo, sizeof(revinfo), FALSE, 0); in dhd_sync_with_dongle()
5092 ret = dhd_wl_ioctl_cmd(dhd, WLC_GET_VAR, buf, sizeof(buf), FALSE, 0); in dhd_sync_with_dongle()
5115 dhd_msgbuf_rxbuf_post(dhd, FALSE); /* alloc pkt ids */ in dhd_sync_with_dongle()
5117 DHD_SSSR_DUMP_INIT(dhd); in dhd_sync_with_dongle()
5119 dhd_process_cid_mac(dhd, TRUE); in dhd_sync_with_dongle()
5120 ret = dhd_preinit_ioctls(dhd); in dhd_sync_with_dongle()
5121 dhd_process_cid_mac(dhd, FALSE); in dhd_sync_with_dongle()
5123 dhd_sdtc_etb_init(dhd); in dhd_sync_with_dongle()
5127 if (FW_SUPPORTED(dhd, h2dlogts) || dhd->hp2p_capable) in dhd_sync_with_dongle()
5129 if (FW_SUPPORTED(dhd, h2dlogts)) in dhd_sync_with_dongle()
5133 if (dhd->hp2p_enable) { in dhd_sync_with_dongle()
5134 dhd->dhd_rte_time_sync_ms = DHD_H2D_LOG_TIME_STAMP_MATCH / 40; in dhd_sync_with_dongle()
5136 dhd->dhd_rte_time_sync_ms = DHD_H2D_LOG_TIME_STAMP_MATCH; in dhd_sync_with_dongle()
5139 dhd->dhd_rte_time_sync_ms = DHD_H2D_LOG_TIME_STAMP_MATCH; in dhd_sync_with_dongle()
5141 dhd->bus->dhd_rte_time_sync_count = OSL_SYSUPTIME_US(); in dhd_sync_with_dongle()
5143 dhd_h2d_log_time_sync(dhd); in dhd_sync_with_dongle()
5145 dhd->dhd_rte_time_sync_ms = 0; in dhd_sync_with_dongle()
5150 if (FW_SUPPORTED(dhd, host_sfhllc)) { in dhd_sync_with_dongle()
5151 dhd->host_sfhllc_supported = TRUE; in dhd_sync_with_dongle()
5153 dhd->host_sfhllc_supported = FALSE; in dhd_sync_with_dongle()
5158 dhd->iswl = TRUE; in dhd_sync_with_dongle()
5167 BCMFASTPATH(dhd_prot_print_metadata)(dhd_pub_t *dhd, void *ptr, int len) in BCMFASTPATH()
5257 BCMFASTPATH(dhd_prot_packet_free)(dhd_pub_t *dhd, void *pkt, uint8 pkttype, bool send) in BCMFASTPATH()
5265 PKTFREE_STATIC(dhd->osh, pkt, send); in BCMFASTPATH()
5267 PKTFREE(dhd->osh, pkt, send); in BCMFASTPATH()
5270 PKTFREE(dhd->osh, pkt, send); in BCMFASTPATH()
5281 BCMFASTPATH(dhd_prot_packet_get)(dhd_pub_t *dhd, uint32 pktid, uint8 pkttype, bool free_pktid) in BCMFASTPATH()
5291 PKTBUF = DHD_PKTID_TO_NATIVE(dhd, dhd->prot->pktid_ctrl_map, in BCMFASTPATH()
5294 PKTBUF = DHD_PKTID_TO_NATIVE_RSV(dhd, dhd->prot->pktid_ctrl_map, in BCMFASTPATH()
5298 PKTBUF = DHD_PKTID_TO_NATIVE(dhd, dhd->prot->pktid_ctrl_map, pktid, pa, in BCMFASTPATH()
5302 DMA_UNMAP(dhd->osh, pa, (uint) len, DMA_RX, 0, dmah); in BCMFASTPATH()
5307 dhd->dma_stats.ioctl_rx--; in BCMFASTPATH()
5308 dhd->dma_stats.ioctl_rx_sz -= len; in BCMFASTPATH()
5312 dhd->dma_stats.event_rx--; in BCMFASTPATH()
5313 dhd->dma_stats.event_rx_sz -= len; in BCMFASTPATH()
5316 dhd->dma_stats.info_rx--; in BCMFASTPATH()
5317 dhd->dma_stats.info_rx_sz -= len; in BCMFASTPATH()
5320 dhd->dma_stats.tsbuf_rx--; in BCMFASTPATH()
5321 dhd->dma_stats.tsbuf_rx_sz -= len; in BCMFASTPATH()
5332 BCMFASTPATH(dhd_prot_ioctl_ret_buffer_get)(dhd_pub_t *dhd, uint32 pktid, dhd_dma_buf_t *retbuf) in BCMFASTPATH()
5335 retbuf->va = DHD_PKTID_TO_NATIVE(dhd, dhd->prot->pktid_map_handle_ioctl, pktid, in BCMFASTPATH()
5378 BCMFASTPATH(dhd_msgbuf_rxbuf_post)(dhd_pub_t *dhd, bool use_rsv_pktid) in BCMFASTPATH()
5380 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
5390 retcount = dhd_prot_rxbuf_post(dhd, fillbufs, use_rsv_pktid); in BCMFASTPATH()
5405 BCMFASTPATH(dhd_prot_rxbuf_post)(dhd_pub_t *dhd, uint16 count, bool use_rsv_pktid) in BCMFASTPATH()
5416 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
5427 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK) in BCMFASTPATH()
5433 lcl_buf = MALLOC(dhd->osh, lcl_buf_size); in BCMFASTPATH()
5437 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus); in BCMFASTPATH()
5446 if ((p = PKTGET(dhd->osh, pktsz, FALSE)) == NULL) { in BCMFASTPATH()
5448 dhd->rx_pktgetfail++; in BCMFASTPATH()
5454 PKTPULL(dhd->osh, p, BCMEXTRAHDROOM); in BCMFASTPATH()
5456 pktlen[i] = PKTLEN(dhd->osh, p); in BCMFASTPATH()
5457 pa = DMA_MAP(dhd->osh, PKTDATA(dhd->osh, p), pktlen[i], DMA_RX, p, 0); in BCMFASTPATH()
5460 PKTFREE(dhd->osh, p, FALSE); in BCMFASTPATH()
5466 dhd->dma_stats.rxdata++; in BCMFASTPATH()
5467 dhd->dma_stats.rxdata_sz += pktlen[i]; in BCMFASTPATH()
5470 PKTPULL(dhd->osh, p, prot->rx_metadata_offset); in BCMFASTPATH()
5471 pktlen[i] = PKTLEN(dhd->osh, p); in BCMFASTPATH()
5484 dhd_prot_alloc_ring_space(dhd, ring, count, &alloced, TRUE); in BCMFASTPATH()
5500 pktid = DHD_NATIVE_TO_PKTID(dhd, dhd->prot->pktid_rx_map, p, pa, in BCMFASTPATH()
5509 if (dhd->prot->hmaptest_rx_active == HMAPTEST_D11_RX_ACTIVE) { in BCMFASTPATH()
5511 dhd->prot->hmap_rx_buf_va = (char *)dhd->prot->hmaptest.mem.va in BCMFASTPATH()
5512 + dhd->prot->hmaptest.offset; in BCMFASTPATH()
5514 dhd->prot->hmap_rx_buf_len = pktlen[i] + prot->rx_metadata_offset; in BCMFASTPATH()
5515 if ((dhd->prot->hmap_rx_buf_va + dhd->prot->hmap_rx_buf_len) > in BCMFASTPATH()
5516 ((char *)dhd->prot->hmaptest.mem.va + dhd->prot->hmaptest.mem.len)) { in BCMFASTPATH()
5519 dhd->prot->hmaptest_rx_active = HMAPTEST_D11_RX_INACTIVE; in BCMFASTPATH()
5520 dhd->prot->hmaptest.in_progress = FALSE; in BCMFASTPATH()
5522 pa = DMA_MAP(dhd->osh, dhd->prot->hmap_rx_buf_va, in BCMFASTPATH()
5523 dhd->prot->hmap_rx_buf_len, DMA_RX, p, 0); in BCMFASTPATH()
5525 dhd->prot->hmap_rx_buf_pa = pa; in BCMFASTPATH()
5526 dhd->prot->hmaptest_rx_pktid = pktid; in BCMFASTPATH()
5527 dhd->prot->hmaptest_rx_active = HMAPTEST_D11_RX_POSTED; in BCMFASTPATH()
5531 dhd->prot->hmap_rx_buf_va, (uint32)PHYSADDRLO(pa))); in BCMFASTPATH()
5533 PKTDATA(dhd->osh, p), (uint32)PHYSADDRLO(pktbuf_pa[i]))); in BCMFASTPATH()
5537 dhd->prot->tot_rxbufpost++; in BCMFASTPATH()
5560 DHD_PKTID_AUDIT(dhd, prot->pktid_rx_map, pktid, DHD_DUPLICATE_ALLOC); in BCMFASTPATH()
5568 PKTAUDIT(dhd->osh, p); in BCMFASTPATH()
5589 dhd_prot_ring_write_complete(dhd, ring, msg_start, alloced); in BCMFASTPATH()
5599 DMA_UNMAP(dhd->osh, pa, pktlen[i], DMA_RX, 0, DHD_DMAH_NULL); in BCMFASTPATH()
5600 PKTFREE(dhd->osh, p, FALSE); in BCMFASTPATH()
5603 MFREE(dhd->osh, lcl_buf, lcl_buf_size); in BCMFASTPATH()
5605 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus); in BCMFASTPATH()
5613 dhd_prot_infobufpost(dhd_pub_t *dhd, msgbuf_ring_t *ring) in dhd_prot_infobufpost() argument
5617 dhd_prot_t *prot = dhd->prot; in dhd_prot_infobufpost()
5634 if (ring == dhd->prot->h2dring_info_subn) { in dhd_prot_infobufpost()
5641 else if (ring == dhd->prot->h2dring_btlog_subn) { in dhd_prot_infobufpost()
5661 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK) in dhd_prot_infobufpost()
5669 msg_start = (void *) dhd_prot_alloc_ring_space(dhd, ring, count, &alloced, FALSE); in dhd_prot_infobufpost()
5675 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus); in dhd_prot_infobufpost()
5690 p = PKTGET_STATIC(dhd->osh, pktsz, FALSE); in dhd_prot_infobufpost()
5692 p = PKTGET(dhd->osh, pktsz, FALSE); in dhd_prot_infobufpost()
5696 dhd->rx_pktgetfail++; in dhd_prot_infobufpost()
5699 pktlen = PKTLEN(dhd->osh, p); in dhd_prot_infobufpost()
5700 pa = DMA_MAP(dhd->osh, PKTDATA(dhd->osh, p), pktlen, DMA_RX, p, 0); in dhd_prot_infobufpost()
5702 DMA_UNMAP(dhd->osh, pa, pktlen, DMA_RX, 0, DHD_DMAH_NULL); in dhd_prot_infobufpost()
5704 PKTFREE_STATIC(dhd->osh, p, FALSE); in dhd_prot_infobufpost()
5706 PKTFREE(dhd->osh, p, FALSE); in dhd_prot_infobufpost()
5713 dhd->dma_stats.info_rx++; in dhd_prot_infobufpost()
5714 dhd->dma_stats.info_rx_sz += pktlen; in dhd_prot_infobufpost()
5716 pktlen = PKTLEN(dhd->osh, p); in dhd_prot_infobufpost()
5725 pktid = DHD_NATIVE_TO_PKTID(dhd, dhd->prot->pktid_ctrl_map, p, pa, in dhd_prot_infobufpost()
5730 DMA_UNMAP(dhd->osh, pa, pktlen, DMA_RX, 0, 0); in dhd_prot_infobufpost()
5733 PKTFREE_STATIC(dhd->osh, p, FALSE); in dhd_prot_infobufpost()
5735 PKTFREE(dhd->osh, p, FALSE); in dhd_prot_infobufpost()
5747 DHD_PKTID_AUDIT(dhd, prot->pktid_ctrl_map, pktid, DHD_DUPLICATE_ALLOC); in dhd_prot_infobufpost()
5758 PKTAUDIT(dhd->osh, p); in dhd_prot_infobufpost()
5778 if (ring == dhd->prot->h2dring_info_subn) { in dhd_prot_infobufpost()
5782 if (ring == dhd->prot->h2dring_btlog_subn) { in dhd_prot_infobufpost()
5786 dhd_prot_ring_write_complete(dhd, ring, msg_start, alloced); in dhd_prot_infobufpost()
5792 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus); in dhd_prot_infobufpost()
5800 alloc_ioctl_return_buffer(dhd_pub_t *dhd, dhd_dma_buf_t *retbuf) in alloc_ioctl_return_buffer() argument
5805 if ((err = dhd_dma_buf_alloc(dhd, retbuf, IOCT_RETBUF_SIZE)) != BCME_OK) { in alloc_ioctl_return_buffer()
5815 free_ioctl_return_buffer(dhd_pub_t *dhd, dhd_dma_buf_t *retbuf) in free_ioctl_return_buffer() argument
5825 dhd_dma_buf_free(dhd, retbuf); in free_ioctl_return_buffer()
5831 dhd_prot_rxbufpost_ctrl(dhd_pub_t *dhd, uint8 msg_type) in dhd_prot_rxbufpost_ctrl() argument
5838 dhd_prot_t *prot = dhd->prot; in dhd_prot_rxbufpost_ctrl()
5849 if (dhd->busstate == DHD_BUS_DOWN) { in dhd_prot_rxbufpost_ctrl()
5867 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK) { in dhd_prot_rxbufpost_ctrl()
5887 if (alloc_ioctl_return_buffer(dhd, &retbuf) != BCME_OK) { in dhd_prot_rxbufpost_ctrl()
5900 p = PKTGET_STATIC(dhd->osh, pktsz, FALSE); in dhd_prot_rxbufpost_ctrl()
5902 p = PKTGET(dhd->osh, pktsz, FALSE); in dhd_prot_rxbufpost_ctrl()
5908 dhd->rx_pktgetfail++; in dhd_prot_rxbufpost_ctrl()
5912 pktlen = PKTLEN(dhd->osh, p); in dhd_prot_rxbufpost_ctrl()
5913 pa = DMA_MAP(dhd->osh, PKTDATA(dhd->osh, p), pktlen, DMA_RX, p, 0); in dhd_prot_rxbufpost_ctrl()
5925 dhd->dma_stats.ioctl_rx++; in dhd_prot_rxbufpost_ctrl()
5926 dhd->dma_stats.ioctl_rx_sz += pktlen; in dhd_prot_rxbufpost_ctrl()
5930 dhd->dma_stats.event_rx++; in dhd_prot_rxbufpost_ctrl()
5931 dhd->dma_stats.event_rx_sz += pktlen; in dhd_prot_rxbufpost_ctrl()
5934 dhd->dma_stats.tsbuf_rx++; in dhd_prot_rxbufpost_ctrl()
5935 dhd->dma_stats.tsbuf_rx_sz += pktlen; in dhd_prot_rxbufpost_ctrl()
5948 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE); in dhd_prot_rxbufpost_ctrl()
5959 DMA_UNMAP(dhd->osh, pa, pktlen, DMA_RX, 0, DHD_DMAH_NULL); in dhd_prot_rxbufpost_ctrl()
5969 map_handle = dhd->prot->pktid_map_handle_ioctl; in dhd_prot_rxbufpost_ctrl()
5970 pktid = DHD_NATIVE_TO_PKTID(dhd, map_handle, p, pa, pktlen, DMA_RX, dmah, in dhd_prot_rxbufpost_ctrl()
5975 map_handle = dhd->prot->pktid_ctrl_map; in dhd_prot_rxbufpost_ctrl()
5976 pktid = DHD_NATIVE_TO_PKTID(dhd, map_handle, in dhd_prot_rxbufpost_ctrl()
5992 DMA_UNMAP(dhd->osh, pa, pktlen, DMA_RX, 0, DHD_DMAH_NULL); in dhd_prot_rxbufpost_ctrl()
5998 DHD_PKTID_AUDIT(dhd, map_handle, pktid, DHD_DUPLICATE_ALLOC); in dhd_prot_rxbufpost_ctrl()
6022 DMA_UNMAP(dhd->osh, pa, pktlen, DMA_RX, 0, DHD_DMAH_NULL); in dhd_prot_rxbufpost_ctrl()
6029 rxbuf_post->host_buf_len = htol16((uint16)PKTLEN(dhd->osh, p)); in dhd_prot_rxbufpost_ctrl()
6037 PKTAUDIT(dhd->osh, p); in dhd_prot_rxbufpost_ctrl()
6040 dhd_prot_ring_write_complete(dhd, ring, rxbuf_post, 1); in dhd_prot_rxbufpost_ctrl()
6045 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus); in dhd_prot_rxbufpost_ctrl()
6052 free_ioctl_return_buffer(dhd, &retbuf); in dhd_prot_rxbufpost_ctrl()
6054 dhd_prot_packet_free(dhd, p, buf_type, FALSE); in dhd_prot_rxbufpost_ctrl()
6057 dhd_prot_packet_free(dhd, p, buf_type, FALSE); in dhd_prot_rxbufpost_ctrl()
6062 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus); in dhd_prot_rxbufpost_ctrl()
6068 dhd_msgbuf_rxbuf_post_ctrlpath(dhd_pub_t *dhd, uint8 msg_type, uint32 max_to_post) in dhd_msgbuf_rxbuf_post_ctrlpath() argument
6075 if (dhd->busstate == DHD_BUS_DOWN) { in dhd_msgbuf_rxbuf_post_ctrlpath()
6081 ret_val = dhd_prot_rxbufpost_ctrl(dhd, msg_type); in dhd_msgbuf_rxbuf_post_ctrlpath()
6091 dhd_msgbuf_rxbuf_post_ioctlresp_bufs(dhd_pub_t *dhd) in dhd_msgbuf_rxbuf_post_ioctlresp_bufs() argument
6093 dhd_prot_t *prot = dhd->prot; in dhd_msgbuf_rxbuf_post_ioctlresp_bufs()
6103 prot->cur_ioctlresp_bufs_posted += dhd_msgbuf_rxbuf_post_ctrlpath(dhd, in dhd_msgbuf_rxbuf_post_ioctlresp_bufs()
6108 dhd_msgbuf_rxbuf_post_event_bufs(dhd_pub_t *dhd) in dhd_msgbuf_rxbuf_post_event_bufs() argument
6110 dhd_prot_t *prot = dhd->prot; in dhd_msgbuf_rxbuf_post_event_bufs()
6119 prot->cur_event_bufs_posted += dhd_msgbuf_rxbuf_post_ctrlpath(dhd, in dhd_msgbuf_rxbuf_post_event_bufs()
6124 dhd_msgbuf_rxbuf_post_ts_bufs(dhd_pub_t *dhd) in dhd_msgbuf_rxbuf_post_ts_bufs() argument
6127 dhd_prot_t *prot = dhd->prot; in dhd_msgbuf_rxbuf_post_ts_bufs()
6143 prot->cur_ts_bufs_posted += dhd_msgbuf_rxbuf_post_ctrlpath(dhd, in dhd_msgbuf_rxbuf_post_ts_bufs()
6150 BCMFASTPATH(dhd_prot_process_msgbuf_infocpl)(dhd_pub_t *dhd, uint bound) in BCMFASTPATH()
6152 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
6164 while (!dhd_is_device_removed(dhd)) { in BCMFASTPATH()
6168 if (dhd->hang_was_sent) { in BCMFASTPATH()
6173 if (dhd->smmu_fault_occurred) { in BCMFASTPATH()
6180 msg_addr = dhd_prot_get_read_addr(dhd, ring, &msg_len); in BCMFASTPATH()
6190 if (dhd_prot_process_msgtype(dhd, ring, msg_addr, msg_len) != BCME_OK) { in BCMFASTPATH()
6196 dhd_prot_upd_read_idx(dhd, ring); in BCMFASTPATH()
6210 BCMFASTPATH(dhd_prot_process_msgbuf_btlogcpl)(dhd_pub_t *dhd, uint bound) in BCMFASTPATH()
6212 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
6223 while (!dhd_is_device_removed(dhd)) { in BCMFASTPATH()
6227 if (dhd_query_bus_erros(dhd)) { in BCMFASTPATH()
6232 if (dhd->hang_was_sent) { in BCMFASTPATH()
6237 if (dhd->smmu_fault_occurred) { in BCMFASTPATH()
6243 msg_addr = dhd_prot_get_read_addr(dhd, ring, &msg_len); in BCMFASTPATH()
6252 if (dhd_prot_process_msgtype(dhd, ring, msg_addr, msg_len) != BCME_OK) { in BCMFASTPATH()
6258 dhd_prot_upd_read_idx(dhd, ring); in BCMFASTPATH()
6273 dhd_prot_process_msgbuf_edl(dhd_pub_t *dhd) in dhd_prot_process_msgbuf_edl() argument
6275 dhd_prot_t *prot = dhd->prot; in dhd_prot_process_msgbuf_edl()
6292 if (dhd_query_bus_erros(dhd)) { in dhd_prot_process_msgbuf_edl()
6296 if (dhd->hang_was_sent) { in dhd_prot_process_msgbuf_edl()
6307 if (dhd->dma_d2h_ring_upd_support) { in dhd_prot_process_msgbuf_edl()
6309 ring->wr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx); in dhd_prot_process_msgbuf_edl()
6311 dhd_bus_cmn_readshared(dhd->bus, &ring->wr, RING_WR_UPD, ring->idx); in dhd_prot_process_msgbuf_edl()
6330 dhd->busstate, dhd->bus->wait_for_d3_ack)); in dhd_prot_process_msgbuf_edl()
6334 dhd->bus->read_shm_fail = TRUE; in dhd_prot_process_msgbuf_edl()
6338 if (dhd->memdump_enabled) { in dhd_prot_process_msgbuf_edl()
6340 dhd->memdump_type = DUMP_TYPE_RESUMED_ON_INVALID_RING_RDWR; in dhd_prot_process_msgbuf_edl()
6341 dhd_bus_mem_dump(dhd); in dhd_prot_process_msgbuf_edl()
6346 dhd_schedule_reset(dhd); in dhd_prot_process_msgbuf_edl()
6357 dhd_schedule_logtrace(dhd->info); in dhd_prot_process_msgbuf_edl()
6367 dhd_prot_process_edl_complete(dhd_pub_t *dhd, void *evt_decode_data) in dhd_prot_process_edl_complete() argument
6380 if (!dhd || !dhd->prot) in dhd_prot_process_edl_complete()
6383 prot = dhd->prot; in dhd_prot_process_edl_complete()
6390 if (dhd->hang_was_sent) { in dhd_prot_process_edl_complete()
6432 if ((err = dhd->prot->d2h_edl_sync_cb(dhd, ring, msg)) != BCME_OK) { in dhd_prot_process_edl_complete()
6453 err = dhd_event_logtrace_process_edl(dhd, msg_addr, evt_decode_data); in dhd_prot_process_edl_complete()
6478 DHD_GENERAL_LOCK(dhd, flags); in dhd_prot_process_edl_complete()
6479 if (DHD_BUS_CHECK_SUSPEND_OR_ANY_SUSPEND_IN_PROGRESS(dhd)) { in dhd_prot_process_edl_complete()
6481 __FUNCTION__, dhd->busstate, dhd->dhd_bus_busy_state)); in dhd_prot_process_edl_complete()
6482 DHD_GENERAL_UNLOCK(dhd, flags); in dhd_prot_process_edl_complete()
6484 DHD_GENERAL_UNLOCK(dhd, flags); in dhd_prot_process_edl_complete()
6485 DHD_EDL_RING_TCM_RD_UPDATE(dhd); in dhd_prot_process_edl_complete()
6527 dhd_prot_edl_ring_tcm_rd_update(dhd_pub_t *dhd) in dhd_prot_edl_ring_tcm_rd_update() argument
6533 if (!dhd) in dhd_prot_edl_ring_tcm_rd_update()
6536 prot = dhd->prot; in dhd_prot_edl_ring_tcm_rd_update()
6542 dhd_prot_upd_read_idx(dhd, ring); in dhd_prot_edl_ring_tcm_rd_update()
6544 if (dhd->dma_h2d_ring_upd_support && in dhd_prot_edl_ring_tcm_rd_update()
6545 !IDMA_ACTIVE(dhd)) { in dhd_prot_edl_ring_tcm_rd_update()
6546 dhd_prot_ring_doorbell(dhd, DHD_RDPTR_UPDATE_H2D_DB_MAGIC(ring)); in dhd_prot_edl_ring_tcm_rd_update()
6552 dhd_prot_rx_frame(dhd_pub_t *dhd, void *pkt, int ifidx, uint pkt_count) in dhd_prot_rx_frame() argument
6556 if (dhd_read_lb_rxp(dhd) == 1) { in dhd_prot_rx_frame()
6557 dhd_lb_rx_pkt_enqueue(dhd, pkt, ifidx); in dhd_prot_rx_frame()
6561 dhd_bus_rx_frame(dhd->bus, pkt, ifidx, pkt_count); in dhd_prot_rx_frame()
6565 static int dhd_prot_lb_rxp_flow_ctrl(dhd_pub_t *dhd) in dhd_prot_lb_rxp_flow_ctrl() argument
6567 if ((dhd->lb_rxp_stop_thr == 0) || (dhd->lb_rxp_strt_thr == 0)) { in dhd_prot_lb_rxp_flow_ctrl()
6572 if ((dhd_lb_rxp_process_qlen(dhd) >= dhd->lb_rxp_stop_thr) && in dhd_prot_lb_rxp_flow_ctrl()
6573 (!atomic_read(&dhd->lb_rxp_flow_ctrl))) { in dhd_prot_lb_rxp_flow_ctrl()
6574 atomic_set(&dhd->lb_rxp_flow_ctrl, TRUE); in dhd_prot_lb_rxp_flow_ctrl()
6576 dhd->lb_rxp_stop_thr_hitcnt++; in dhd_prot_lb_rxp_flow_ctrl()
6579 dhd_lb_rxp_process_qlen(dhd), dhd->lb_rxp_stop_thr)); in dhd_prot_lb_rxp_flow_ctrl()
6580 } else if ((dhd_lb_rxp_process_qlen(dhd) <= dhd->lb_rxp_strt_thr) && in dhd_prot_lb_rxp_flow_ctrl()
6581 (atomic_read(&dhd->lb_rxp_flow_ctrl))) { in dhd_prot_lb_rxp_flow_ctrl()
6582 atomic_set(&dhd->lb_rxp_flow_ctrl, FALSE); in dhd_prot_lb_rxp_flow_ctrl()
6584 dhd->lb_rxp_strt_thr_hitcnt++; in dhd_prot_lb_rxp_flow_ctrl()
6587 dhd_lb_rxp_process_qlen(dhd), dhd->lb_rxp_strt_thr)); in dhd_prot_lb_rxp_flow_ctrl()
6590 return atomic_read(&dhd->lb_rxp_flow_ctrl); in dhd_prot_lb_rxp_flow_ctrl()
6596 BCMFASTPATH(dhd_prot_process_msgbuf_rxcpl)(dhd_pub_t *dhd, uint bound, int ringtype) in BCMFASTPATH()
6600 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
6620 if (dhd_prot_lb_rxp_flow_ctrl(dhd)) { in BCMFASTPATH()
6629 if (dhdpcie_runtime_bus_wake(dhd, FALSE, dhd_prot_process_msgbuf_rxcpl)) { in BCMFASTPATH()
6630 dhd->rx_pending_due_to_rpm = TRUE; in BCMFASTPATH()
6633 dhd->rx_pending_due_to_rpm = FALSE; in BCMFASTPATH()
6644 if (dhd_is_device_removed(dhd)) in BCMFASTPATH()
6647 if (dhd_query_bus_erros(dhd)) in BCMFASTPATH()
6650 if (dhd->hang_was_sent) in BCMFASTPATH()
6653 if (dhd->smmu_fault_occurred) { in BCMFASTPATH()
6664 msg_addr = dhd_prot_get_read_addr(dhd, ring, &msg_len); in BCMFASTPATH()
6674 sync = prot->d2h_sync_cb(dhd, ring, &msg->cmn_hdr, item_len); in BCMFASTPATH()
6696 DHD_PKTID_AUDIT_RING_DEBUG(dhd, dhd->prot->pktid_rx_map, pktid, in BCMFASTPATH()
6700 pkt = DHD_PKTID_TO_NATIVE(dhd, prot->pktid_rx_map, pktid, pa, in BCMFASTPATH()
6703 if (!pkt || (dhd_check_shinfo_nrfrags(dhd, pkt, &pa, pktid) != BCME_OK)) { in BCMFASTPATH()
6708 dhd->prot->tot_rxcpl++; in BCMFASTPATH()
6710 DMA_UNMAP(dhd->osh, pa, (uint) len, DMA_RX, 0, dmah); in BCMFASTPATH()
6713 dhd->dma_stats.rxdata--; in BCMFASTPATH()
6714 dhd->dma_stats.rxdata_sz -= len; in BCMFASTPATH()
6717 if ((dhd->prot->hmaptest_rx_active == HMAPTEST_D11_RX_POSTED) && in BCMFASTPATH()
6718 (pktid == dhd->prot->hmaptest_rx_pktid)) { in BCMFASTPATH()
6721 ptr = PKTDATA(dhd->osh, pkt) - (prot->rx_metadata_offset); in BCMFASTPATH()
6722 DMA_UNMAP(dhd->osh, dhd->prot->hmap_rx_buf_pa, in BCMFASTPATH()
6723 (uint)dhd->prot->hmap_rx_buf_len, DMA_RX, 0, dmah); in BCMFASTPATH()
6729 dhd->prot->hmap_rx_buf_va, in BCMFASTPATH()
6730 (uint32)PHYSADDRLO(dhd->prot->hmap_rx_buf_pa))); in BCMFASTPATH()
6732 PKTDATA(dhd->osh, pkt), (uint32)PHYSADDRLO(pa))); in BCMFASTPATH()
6733 memcpy(ptr, dhd->prot->hmap_rx_buf_va, dhd->prot->hmap_rx_buf_len); in BCMFASTPATH()
6734 dhd->prot->hmaptest_rx_active = HMAPTEST_D11_RX_INACTIVE; in BCMFASTPATH()
6735 dhd->prot->hmap_rx_buf_va = NULL; in BCMFASTPATH()
6736 dhd->prot->hmap_rx_buf_len = 0; in BCMFASTPATH()
6737 PHYSADDRHISET(dhd->prot->hmap_rx_buf_pa, 0); in BCMFASTPATH()
6738 PHYSADDRLOSET(dhd->prot->hmap_rx_buf_pa, 0); in BCMFASTPATH()
6747 msg->cmn_hdr.flags, PKTDATA(dhd->osh, pkt), in BCMFASTPATH()
6759 ptr = PKTDATA(dhd->osh, pkt) - (prot->rx_metadata_offset); in BCMFASTPATH()
6762 dhd_prot_print_metadata(dhd, ptr, msg->metadata_len); in BCMFASTPATH()
6770 PKTPULL(dhd->osh, pkt, ltoh16(msg->data_offset)); in BCMFASTPATH()
6774 PKTPULL(dhd->osh, pkt, prot->rx_dataoffset); in BCMFASTPATH()
6777 PKTSETLEN(dhd->osh, pkt, ltoh16(msg->data_len)); in BCMFASTPATH()
6779 if (dhd_get_pktts_enab(dhd) == TRUE) { in BCMFASTPATH()
6791 dhd_msgbuf_send_msg_rx_ts(dhd, pkt, fwr1, fwr2); in BCMFASTPATH()
6798 if (dhd_monitor_enabled(dhd, ifidx)) { in BCMFASTPATH()
6800 dhd_rx_mon_pkt(dhd, msg, pkt, ifidx); in BCMFASTPATH()
6820 PKTSETNEXT(dhd->osh, prevpkt, pkt); in BCMFASTPATH()
6826 if (dhd->hp2p_capable && ring == prot->d2hring_hp2p_rxcpl) { in BCMFASTPATH()
6830 dhd_update_hp2p_rxstats(dhd, msg); in BCMFASTPATH()
6835 if (dhd->prot->rx_ts_log_enabled) { in BCMFASTPATH()
6840 dhd_parse_proto(PKTDATA(dhd->osh, pkt), &parse); in BCMFASTPATH()
6843 dhd_timesync_log_rx_timestamp(dhd->ts, ifidx, in BCMFASTPATH()
6849 PKTAUDIT(dhd->osh, pkt); in BCMFASTPATH()
6862 dhd_prot_upd_read_idx(dhd, ring); in BCMFASTPATH()
6868 nextpkt = PKTNEXT(dhd->osh, pkt); in BCMFASTPATH()
6869 PKTSETNEXT(dhd->osh, pkt, NULL); in BCMFASTPATH()
6871 dhd_rxchain_frame(dhd, pkt, ifidx); in BCMFASTPATH()
6873 dhd_prot_rx_frame(dhd, pkt, ifidx, 1); in BCMFASTPATH()
6879 dhd_rxchain_frame(dhd, pkt_newidx, if_newidx); in BCMFASTPATH()
6881 dhd_prot_rx_frame(dhd, pkt_newidx, if_newidx, 1); in BCMFASTPATH()
6888 dhd_prot_return_rxbuf(dhd, ring, 0, pkt_cnt); in BCMFASTPATH()
6891 dhd_rxchain_commit(dhd); in BCMFASTPATH()
6905 !(dhd_monitor_enabled(dhd, ifidx)) && in BCMFASTPATH()
6908 DHD_LB_DISPATCH_RX_PROCESS(dhd); in BCMFASTPATH()
6919 dhd_prot_update_txflowring(dhd_pub_t *dhd, uint16 flowid, void *msgring) in dhd_prot_update_txflowring() argument
6928 if (dhd->dma_d2h_ring_upd_support) { in dhd_prot_update_txflowring()
6929 ring->rd = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, ring->idx); in dhd_prot_update_txflowring()
6936 dhd_bus_schedule_queue(dhd->bus, flowid, TRUE); /* from queue to flowring */ in dhd_prot_update_txflowring()
6941 BCMFASTPATH(dhd_prot_process_msgbuf_txcpl)(dhd_pub_t *dhd, uint bound, int ringtype) in BCMFASTPATH()
6949 if (ringtype == DHD_HP2P_RING && dhd->prot->d2hring_hp2p_txcpl) in BCMFASTPATH()
6950 ring = dhd->prot->d2hring_hp2p_txcpl; in BCMFASTPATH()
6953 ring = &dhd->prot->d2hring_tx_cpln; in BCMFASTPATH()
6956 while (!dhd_is_device_removed(dhd)) { in BCMFASTPATH()
6960 if (dhd_query_bus_erros(dhd)) { in BCMFASTPATH()
6965 if (dhd->hang_was_sent) { in BCMFASTPATH()
6970 if (dhd->smmu_fault_occurred) { in BCMFASTPATH()
6977 msg_addr = dhd_prot_get_read_addr(dhd, ring, &msg_len); in BCMFASTPATH()
6988 if (dhd_prot_process_msgtype(dhd, ring, msg_addr, msg_len) != BCME_OK) { in BCMFASTPATH()
6994 dhd_prot_upd_read_idx(dhd, ring); in BCMFASTPATH()
7007 if (dhd->dma_h2d_ring_upd_support && !IDMA_ACTIVE(dhd)) { in BCMFASTPATH()
7008 dhd_prot_ring_doorbell(dhd, DHD_RDPTR_UPDATE_H2D_DB_MAGIC(ring)); in BCMFASTPATH()
7009 dhd->prot->txcpl_db_cnt++; in BCMFASTPATH()
7016 BCMFASTPATH(dhd_prot_process_trapbuf)(dhd_pub_t *dhd) in BCMFASTPATH()
7019 dhd_dma_buf_t *trap_addr = &dhd->prot->fw_trap_buf; in BCMFASTPATH()
7033 if (dhd->db7_trap.fw_db7w_trap_inprogress) { in BCMFASTPATH()
7041 if (dhd->extended_trap_data) { in BCMFASTPATH()
7044 memcpy(dhd->extended_trap_data, (uint32 *)trap_addr->va, in BCMFASTPATH()
7047 if (dhd->db7_trap.fw_db7w_trap_inprogress == FALSE) { in BCMFASTPATH()
7054 dhd->dongle_trap_due_to_bt = TRUE; in BCMFASTPATH()
7064 BCMFASTPATH(dhd_prot_process_ctrlbuf)(dhd_pub_t *dhd) in BCMFASTPATH()
7066 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
7071 while (!dhd_is_device_removed(dhd)) { in BCMFASTPATH()
7075 if (dhd_query_bus_erros(dhd)) { in BCMFASTPATH()
7079 if (dhd->hang_was_sent) { in BCMFASTPATH()
7083 if (dhd->smmu_fault_occurred) { in BCMFASTPATH()
7089 msg_addr = dhd_prot_get_read_addr(dhd, ring, &msg_len); in BCMFASTPATH()
7098 if (dhd_prot_process_msgtype(dhd, ring, msg_addr, msg_len) != BCME_OK) { in BCMFASTPATH()
7104 dhd_prot_upd_read_idx(dhd, ring); in BCMFASTPATH()
7116 BCMFASTPATH(dhd_prot_process_msgtype)(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint8 *buf, uint32 len) in BCMFASTPATH()
7133 if (dhd->hang_was_sent) { in BCMFASTPATH()
7138 if (dhd->smmu_fault_occurred) { in BCMFASTPATH()
7146 msg_type = dhd->prot->d2h_sync_cb(dhd, ring, msg, item_len); in BCMFASTPATH()
7181 if (ring == dhd->prot->d2hring_info_cpln) { in BCMFASTPATH()
7182 if (!dhd->prot->infobufpost) { in BCMFASTPATH()
7187 dhd->prot->infobufpost--; in BCMFASTPATH()
7188 dhd_prot_infobufpost(dhd, dhd->prot->h2dring_info_subn); in BCMFASTPATH()
7189 dhd_prot_process_infobuf_complete(dhd, buf); in BCMFASTPATH()
7192 else if (ring == dhd->prot->d2hring_btlog_cpln) { in BCMFASTPATH()
7195 if (!dhd->prot->btlogbufpost) { in BCMFASTPATH()
7201 dhd->prot->btlogbufpost--; in BCMFASTPATH()
7203 dhd_prot_infobufpost(dhd, dhd->prot->h2dring_btlog_subn); in BCMFASTPATH()
7205 dhd_prot_process_btlog_complete(dhd, buf); in BCMFASTPATH()
7211 table_lookup[msg_type](dhd, buf); in BCMFASTPATH()
7225 dhd_rxchain_commit(dhd); in BCMFASTPATH()
7232 dhd_prot_noop(dhd_pub_t *dhd, void *msg) in dhd_prot_noop() argument
7239 dhd_prot_ringstatus_process(dhd_pub_t *dhd, void *msg) in dhd_prot_ringstatus_process() argument
7261 if (dhd->prot->h2dring_info_subn != NULL) { in dhd_prot_ringstatus_process()
7262 if (dhd->prot->h2dring_info_subn->create_pending == TRUE) { in dhd_prot_ringstatus_process()
7264 dhd->prot->h2dring_info_subn->create_pending = FALSE; in dhd_prot_ringstatus_process()
7274 if (dhd->prot->d2hring_info_cpln != NULL) { in dhd_prot_ringstatus_process()
7275 if (dhd->prot->d2hring_info_cpln->create_pending == TRUE) { in dhd_prot_ringstatus_process()
7277 dhd->prot->d2hring_info_cpln->create_pending = FALSE; in dhd_prot_ringstatus_process()
7288 if (dhd->prot->h2dring_btlog_subn != NULL) { in dhd_prot_ringstatus_process()
7289 if (dhd->prot->h2dring_btlog_subn->create_pending == TRUE) { in dhd_prot_ringstatus_process()
7291 dhd->prot->h2dring_btlog_subn->create_pending = FALSE; in dhd_prot_ringstatus_process()
7301 if (dhd->prot->d2hring_btlog_cpln != NULL) { in dhd_prot_ringstatus_process()
7302 if (dhd->prot->d2hring_btlog_cpln->create_pending == TRUE) { in dhd_prot_ringstatus_process()
7304 dhd->prot->d2hring_btlog_cpln->create_pending = FALSE; in dhd_prot_ringstatus_process()
7316 if (dhd->prot->d2hring_hp2p_txcpl != NULL) { in dhd_prot_ringstatus_process()
7317 if (dhd->prot->d2hring_hp2p_txcpl->create_pending == TRUE) { in dhd_prot_ringstatus_process()
7319 dhd->prot->d2hring_hp2p_txcpl->create_pending = FALSE; in dhd_prot_ringstatus_process()
7329 if (dhd->prot->d2hring_hp2p_rxcpl != NULL) { in dhd_prot_ringstatus_process()
7330 if (dhd->prot->d2hring_hp2p_rxcpl->create_pending == TRUE) { in dhd_prot_ringstatus_process()
7332 dhd->prot->d2hring_hp2p_rxcpl->create_pending = FALSE; in dhd_prot_ringstatus_process()
7350 dhd_prot_genstatus_process(dhd_pub_t *dhd, void *msg) in dhd_prot_genstatus_process() argument
7366 dhd_prot_ioctack_process(dhd_pub_t *dhd, void *msg) in dhd_prot_ioctack_process() argument
7378 DHD_PKTID_AUDIT_RING_DEBUG(dhd, dhd->prot->pktid_ctrl_map, pktid, in dhd_prot_ioctack_process()
7381 DHD_PKTID_AUDIT_RING_DEBUG(dhd, dhd->prot->pktid_map_handle_ioctl, pktid, in dhd_prot_ioctack_process()
7387 dhd->prot->ioctl_ack_time = OSL_LOCALTIME_NS(); in dhd_prot_ioctack_process()
7389 DHD_GENERAL_LOCK(dhd, flags); in dhd_prot_ioctack_process()
7390 if ((dhd->prot->ioctl_state & MSGBUF_IOCTL_ACK_PENDING) && in dhd_prot_ioctack_process()
7391 (dhd->prot->ioctl_state & MSGBUF_IOCTL_RESP_PENDING)) { in dhd_prot_ioctack_process()
7392 dhd->prot->ioctl_state &= ~MSGBUF_IOCTL_ACK_PENDING; in dhd_prot_ioctack_process()
7395 __FUNCTION__, dhd->prot->ioctl_state, dhd->prot->ioctl_trans_id)); in dhd_prot_ioctack_process()
7399 DHD_GENERAL_UNLOCK(dhd, flags); in dhd_prot_ioctack_process()
7410 dhd_stop_bus_timer(dhd); in dhd_prot_ioctack_process()
7417 dhd_prot_ioctcmplt_process(dhd_pub_t *dhd, void *msg) in dhd_prot_ioctcmplt_process() argument
7419 dhd_prot_t *prot = dhd->prot; in dhd_prot_ioctcmplt_process()
7433 if (dhd->dhd_induce_error == DHD_INDUCE_IOCTL_TIMEOUT) { in dhd_prot_ioctcmplt_process()
7444 DHD_PKTID_AUDIT_RING_DEBUG(dhd, prot->pktid_ctrl_map, pkt_id, in dhd_prot_ioctcmplt_process()
7447 DHD_PKTID_AUDIT_RING_DEBUG(dhd, prot->pktid_map_handle_ioctl, pkt_id, in dhd_prot_ioctcmplt_process()
7452 DHD_GENERAL_LOCK(dhd, flags); in dhd_prot_ioctcmplt_process()
7456 __FUNCTION__, dhd->prot->ioctl_state, dhd->prot->ioctl_trans_id)); in dhd_prot_ioctcmplt_process()
7459 DHD_GENERAL_UNLOCK(dhd, flags); in dhd_prot_ioctcmplt_process()
7463 dhd->prot->ioctl_cmplt_time = OSL_LOCALTIME_NS(); in dhd_prot_ioctcmplt_process()
7467 DHD_GENERAL_UNLOCK(dhd, flags); in dhd_prot_ioctcmplt_process()
7470 pkt = dhd_prot_packet_get(dhd, pkt_id, PKTTYPE_IOCTL_RX, TRUE); in dhd_prot_ioctcmplt_process()
7472 dhd_prot_ioctl_ret_buffer_get(dhd, pkt_id, &retbuf); in dhd_prot_ioctcmplt_process()
7491 dhd_stop_cmd_timer(dhd); in dhd_prot_ioctcmplt_process()
7493 dhd_wakeup_ioctl_event(dhd, IOCTL_RETURN_ON_ERROR); in dhd_prot_ioctcmplt_process()
7494 dhd_prot_debug_info_print(dhd); in dhd_prot_ioctcmplt_process()
7496 if (dhd->memdump_enabled) { in dhd_prot_ioctcmplt_process()
7498 dhd->memdump_type = DUMP_TYPE_TRANS_ID_MISMATCH; in dhd_prot_ioctcmplt_process()
7499 dhd_bus_mem_dump(dhd); in dhd_prot_ioctcmplt_process()
7504 dhd_schedule_reset(dhd); in dhd_prot_ioctcmplt_process()
7508 dhd_xt_id = dhd_get_request_id(dhd); in dhd_prot_ioctcmplt_process()
7510 dhd_stop_cmd_timer(dhd); in dhd_prot_ioctcmplt_process()
7521 bcopy(PKTDATA(dhd->osh, pkt), prot->retbuf.va, prot->ioctl_resplen); in dhd_prot_ioctcmplt_process()
7528 dhd_wakeup_ioctl_event(dhd, IOCTL_RETURN_ON_SUCCESS); in dhd_prot_ioctcmplt_process()
7532 dhd_prot_packet_free(dhd, pkt, in dhd_prot_ioctcmplt_process()
7535 free_ioctl_return_buffer(dhd, &retbuf); in dhd_prot_ioctcmplt_process()
7543 dhd_msgbuf_rxbuf_post_ioctlresp_bufs(dhd); in dhd_prot_ioctcmplt_process()
7547 dhd_prot_check_tx_resource(dhd_pub_t *dhd) in dhd_prot_check_tx_resource() argument
7549 return dhd->prot->no_tx_resource; in dhd_prot_check_tx_resource()
7850 BCMFASTPATH(dhd_prot_txstatus_process)(dhd_pub_t *dhd, void *msg) in BCMFASTPATH()
7852 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
7862 msgbuf_ring_t *ring = &dhd->prot->d2hring_tx_cpln; in BCMFASTPATH()
7890 if ((dhd->memdump_type == DUMP_TYPE_PKTID_AUDIT_FAILURE) || in BCMFASTPATH()
7891 (dhd->memdump_type == DUMP_TYPE_PKTID_INVALID)) { in BCMFASTPATH()
7903 flow_ring_node = DHD_FLOW_RING(dhd, flowid); in BCMFASTPATH()
7906 OSL_ATOMIC_DEC(dhd->osh, &flow_ring->inflight); in BCMFASTPATH()
7919 if (dhd->prot->d2hring_hp2p_txcpl && in BCMFASTPATH()
7921 ring = dhd->prot->d2hring_hp2p_txcpl; in BCMFASTPATH()
7934 if (dhd->pcie_txs_metadata_enable > 1) { in BCMFASTPATH()
7955 DHD_PKTID_AUDIT_RING_DEBUG(dhd, dhd->prot->pktid_tx_map, pktid, in BCMFASTPATH()
7960 if (OSL_ATOMIC_DEC_RETURN(dhd->osh, &prot->active_tx_count) < 0) { in BCMFASTPATH()
7967 if ((dhd->prot->hmaptest_tx_active == HMAPTEST_D11_TX_POSTED) && in BCMFASTPATH()
7968 (pktid == dhd->prot->hmaptest_tx_pktid)) { in BCMFASTPATH()
7972 dhd->prot->hmap_tx_buf_va, (uint32)PHYSADDRLO(dhd->prot->hmap_tx_buf_pa))); in BCMFASTPATH()
7973 dhd->prot->hmaptest_tx_active = HMAPTEST_D11_TX_INACTIVE; in BCMFASTPATH()
7974 dhd->prot->hmap_tx_buf_va = NULL; in BCMFASTPATH()
7975 dhd->prot->hmap_tx_buf_len = 0; in BCMFASTPATH()
7976 PHYSADDRHISET(dhd->prot->hmap_tx_buf_pa, 0); in BCMFASTPATH()
7977 PHYSADDRLOSET(dhd->prot->hmap_tx_buf_pa, 0); in BCMFASTPATH()
7984 if (dhd_get_pktts_enab(dhd) && in BCMFASTPATH()
7985 dhd->pkt_metadata_buflen) { in BCMFASTPATH()
7987 meta_data_buf.va = DHD_PKTID_RETREIVE_METADATA(dhd, dhd->prot->pktid_tx_map, in BCMFASTPATH()
7990 if (dhd->pkt_metadata_version == METADATA_VER_1) { in BCMFASTPATH()
7992 } else if (dhd->pkt_metadata_version == METADATA_VER_2) { in BCMFASTPATH()
8004 pkt = DHD_PKTID_TO_NATIVE(dhd, dhd->prot->pktid_tx_map, pktid, in BCMFASTPATH()
8015 DMA_FREE_CONSISTENT(dhd->osh, meta_data_buf.va, meta_data_buf._alloced, in BCMFASTPATH()
8022 if (dhd->memdump_enabled) { in BCMFASTPATH()
8024 dhd->memdump_type = DUMP_TYPE_PKTID_INVALID; in BCMFASTPATH()
8025 dhd_bus_mem_dump(dhd); in BCMFASTPATH()
8033 if (DHD_PKTID_AVAIL(dhd->prot->pktid_tx_map) == DHD_PKTID_MIN_AVAIL_COUNT) { in BCMFASTPATH()
8037 dhd->prot->no_tx_resource = FALSE; in BCMFASTPATH()
8038 dhd_bus_start_queue(dhd->bus); in BCMFASTPATH()
8041 DMA_UNMAP(dhd->osh, pa, (uint) len, DMA_RX, 0, dmah); in BCMFASTPATH()
8048 if (dhd->pkt_latency > 0 && in BCMFASTPATH()
8049 tx_status_latency > (dhd->pkt_latency)) { in BCMFASTPATH()
8051 tx_status_latency, dhd->pkt_latency, in BCMFASTPATH()
8052 dhd->awdl_aw_counter)); in BCMFASTPATH()
8060 if_flow_lkup = (if_flow_lkup_t *)dhd->if_flow_lkup; in BCMFASTPATH()
8064 awdl_stats = &dhd->awdl_stats[dhd->awdl_tx_status_slot]; in BCMFASTPATH()
8065 DHD_AWDL_STATS_LOCK(dhd->awdl_stats_lock, awdl_stats_lock_flags); in BCMFASTPATH()
8068 DHD_AWDL_STATS_UNLOCK(dhd->awdl_stats_lock, awdl_stats_lock_flags); in BCMFASTPATH()
8073 if (dhd->host_sfhllc_supported) { in BCMFASTPATH()
8076 PKTDATA(dhd->osh, pkt), sizeof(eth))) { in BCMFASTPATH()
8077 if (dhd_8023_llc_to_ether_hdr(dhd->osh, in BCMFASTPATH()
8088 dhd->dma_stats.txdata--; in BCMFASTPATH()
8089 dhd->dma_stats.txdata_sz -= len; in BCMFASTPATH()
8091 pkt_fate = dhd_dbg_process_tx_status(dhd, pkt, pktid, in BCMFASTPATH()
8094 if (dhd->d11_tx_status) { in BCMFASTPATH()
8097 dhd_handle_pktdata(dhd, ltoh32(txstatus->cmn_hdr.if_id), in BCMFASTPATH()
8098 pkt, (uint8 *)PKTDATA(dhd->osh, pkt), pktid, len, in BCMFASTPATH()
8103 dhd_txcomplete(dhd, pkt, pkt_fate); in BCMFASTPATH()
8105 dhd_eap_txcomplete(dhd, pkt, pkt_fate, txstatus->cmn_hdr.if_id); in BCMFASTPATH()
8110 if (dhd_get_pktts_enab(dhd) == TRUE) { in BCMFASTPATH()
8111 if (dhd->pkt_metadata_buflen) { in BCMFASTPATH()
8113 if ((dhd->pkt_metadata_version == METADATA_VER_1) && in BCMFASTPATH()
8126 dhd_msgbuf_send_msg_tx_ts(dhd, pkt, &fwts, in BCMFASTPATH()
8127 dhd->pkt_metadata_version); in BCMFASTPATH()
8129 } else if ((dhd->pkt_metadata_version == METADATA_VER_2) && in BCMFASTPATH()
8168 dhd_msgbuf_send_msg_tx_ts(dhd, pkt, &fwts, in BCMFASTPATH()
8169 dhd->pkt_metadata_version); in BCMFASTPATH()
8188 dhd_msgbuf_send_msg_tx_ts(dhd, pkt, &fwts, METADATA_VER_1); in BCMFASTPATH()
8196 if (dhd->prot->metadata_dbg && in BCMFASTPATH()
8197 dhd->prot->tx_metadata_offset && txstatus->metadata_len) { in BCMFASTPATH()
8202 PKTPULL(dhd->osh, pkt, ETHER_HDR_LEN); in BCMFASTPATH()
8203 ptr = PKTDATA(dhd->osh, pkt) - (dhd->prot->tx_metadata_offset); in BCMFASTPATH()
8205 dhd_prot_print_metadata(dhd, ptr, txstatus->metadata_len); in BCMFASTPATH()
8210 if (dhd->hp2p_capable && flow_ring_node->flow_info.tid == HP2P_PRIO) { in BCMFASTPATH()
8214 dhd_update_hp2p_txstats(dhd, txstatus); in BCMFASTPATH()
8219 if (dhd->prot->tx_ts_log_enabled) { in BCMFASTPATH()
8224 dhd_parse_proto(PKTDATA(dhd->osh, pkt), &parse); in BCMFASTPATH()
8227 dhd_timesync_log_tx_timestamp(dhd->ts, in BCMFASTPATH()
8235 PKTAUDIT(dhd->osh, pkt); in BCMFASTPATH()
8237 DHD_FLOWRING_TXSTATUS_CNT_UPDATE(dhd->bus, txstatus->compl_hdr.flow_ring_id, in BCMFASTPATH()
8242 DMA_FREE_CONSISTENT(dhd->osh, meta_data_buf.va, meta_data_buf._alloced, in BCMFASTPATH()
8247 DHD_MEM_STATS_LOCK(dhd->mem_stats_lock, flags); in BCMFASTPATH()
8249 __FUNCTION__, dhd->txpath_mem, PKTLEN(dhd->osh, pkt))); in BCMFASTPATH()
8250 dhd->txpath_mem -= PKTLEN(dhd->osh, pkt); in BCMFASTPATH()
8251 DHD_MEM_STATS_UNLOCK(dhd->mem_stats_lock, flags); in BCMFASTPATH()
8253 PKTFREE(dhd->osh, pkt, TRUE); in BCMFASTPATH()
8261 dhd_prot_event_process(dhd_pub_t *dhd, void *msg) in dhd_prot_event_process() argument
8268 dhd_prot_t *prot = dhd->prot; in dhd_prot_event_process()
8275 DHD_PKTID_AUDIT_RING_DEBUG(dhd, dhd->prot->pktid_ctrl_map, bufid, in dhd_prot_event_process()
8287 dhd_msgbuf_rxbuf_post_event_bufs(dhd); in dhd_prot_event_process()
8289 pkt = dhd_prot_packet_get(dhd, bufid, PKTTYPE_EVENT_RX, TRUE); in dhd_prot_event_process()
8299 if (dhd->prot->rx_dataoffset) in dhd_prot_event_process()
8300 PKTPULL(dhd->osh, pkt, dhd->prot->rx_dataoffset); in dhd_prot_event_process()
8303 PKTSETLEN(dhd->osh, pkt, buflen); in dhd_prot_event_process()
8305 PKTAUDIT(dhd->osh, pkt); in dhd_prot_event_process()
8307 dhd_bus_rx_frame(dhd->bus, pkt, ifidx, 1); in dhd_prot_event_process()
8313 BCMFASTPATH(dhd_prot_process_infobuf_complete)(dhd_pub_t *dhd, void* buf) in BCMFASTPATH()
8325 DHD_PKTID_AUDIT_RING_DEBUG(dhd, dhd->prot->pktid_ctrl_map, pktid, in BCMFASTPATH()
8331 dhd->prot->rx_dataoffset)); in BCMFASTPATH()
8333 if (dhd->debug_buf_dest_support) { in BCMFASTPATH()
8335 dhd->debug_buf_dest_stat[resp->dest]++; in BCMFASTPATH()
8339 pkt = dhd_prot_packet_get(dhd, pktid, PKTTYPE_INFO_RX, TRUE); in BCMFASTPATH()
8346 if (dhd->prot->rx_dataoffset) in BCMFASTPATH()
8347 PKTPULL(dhd->osh, pkt, dhd->prot->rx_dataoffset); in BCMFASTPATH()
8350 PKTSETLEN(dhd->osh, pkt, buflen); in BCMFASTPATH()
8352 PKTAUDIT(dhd->osh, pkt); in BCMFASTPATH()
8358 dhd_bus_rx_frame(dhd->bus, pkt, DHD_DUMMY_INFO_IF /* ifidx HACK */, 1); in BCMFASTPATH()
8364 BCMFASTPATH(dhd_prot_process_snapshot_complete)(dhd_pub_t *dhd, void *buf) in BCMFASTPATH()
8367 dhd_prot_t *prot = dhd->prot; in BCMFASTPATH()
8404 BCMFASTPATH(dhd_prot_process_btlog_complete)(dhd_pub_t *dhd, void* buf) in BCMFASTPATH()
8423 DHD_PKTID_AUDIT_RING_DEBUG(dhd, dhd->prot->pktid_ctrl_map, pktid, in BCMFASTPATH()
8429 dhd->prot->rx_dataoffset)); in BCMFASTPATH()
8431 pkt = dhd_prot_packet_get(dhd, pktid, PKTTYPE_INFO_RX, TRUE); in BCMFASTPATH()
8439 if (dhd->prot->rx_dataoffset) in BCMFASTPATH()
8440 PKTPULL(dhd->osh, pkt, dhd->prot->rx_dataoffset); in BCMFASTPATH()
8443 PKTSETLEN(dhd->osh, pkt, buflen); in BCMFASTPATH()
8444 PKTSETNEXT(dhd->osh, pkt, NULL); in BCMFASTPATH()
8446 dhd_bus_rx_bt_log(dhd->bus, pkt); in BCMFASTPATH()
8451 void dhd_prot_stop(dhd_pub_t *dhd) in dhd_prot_stop() argument
8453 ASSERT(dhd); in dhd_prot_stop()
8457 if (dhd->prot) { in dhd_prot_stop()
8458 DHD_NATIVE_TO_PKTID_RESET(dhd, dhd->prot->pktid_ctrl_map); in dhd_prot_stop()
8459 DHD_NATIVE_TO_PKTID_RESET(dhd, dhd->prot->pktid_rx_map); in dhd_prot_stop()
8460 DHD_NATIVE_TO_PKTID_RESET(dhd, dhd->prot->pktid_tx_map); in dhd_prot_stop()
8462 DHD_NATIVE_TO_PKTID_RESET_IOCTL(dhd, dhd->prot->pktid_map_handle_ioctl); in dhd_prot_stop()
8472 BCMFASTPATH(dhd_prot_hdrpush)(dhd_pub_t *dhd, int ifidx, void *PKTBUF) in BCMFASTPATH()
8478 dhd_prot_hdrlen(dhd_pub_t *dhd, void *PKTBUF) in dhd_prot_hdrlen() argument
8490 BCMFASTPATH(dhd_prot_txdata)(dhd_pub_t *dhd, void *PKTBUF, uint8 ifidx)
8493 dhd_prot_t *prot = dhd->prot;
8511 uint16 meta_data_buf_len = dhd->pkt_metadata_buflen;
8516 bool host_sfh_llc_reqd = dhd->host_sfhllc_supported;
8521 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK) {
8527 if (dhd->flow_ring_table == NULL) {
8533 if (!DHD_PKTID_AVAIL(dhd->prot->pktid_tx_map)) {
8534 if (dhd->prot->pktid_depleted_cnt == DHD_PKTID_DEPLETED_MAX_COUNT) {
8538 dhd_bus_stop_queue(dhd->bus);
8539 dhd->prot->no_tx_resource = TRUE;
8541 dhd->prot->pktid_depleted_cnt++;
8544 dhd->prot->pktid_depleted_cnt = 0;
8548 if (dhd->dhd_induce_error == DHD_INDUCE_TX_BIG_PKT) {
8549 if ((big_pktbuf = PKTGET(dhd->osh, DHD_FLOWRING_TX_BIG_PKT_SIZE, TRUE)) == NULL) {
8554 memset(PKTDATA(dhd->osh, big_pktbuf), 0xff, DHD_FLOWRING_TX_BIG_PKT_SIZE);
8555 DHD_ERROR(("PKTBUF len = %d big_pktbuf len = %d\n", PKTLEN(dhd->osh, PKTBUF),
8556 PKTLEN(dhd->osh, big_pktbuf)));
8557 if (memcpy_s(PKTDATA(dhd->osh, big_pktbuf), DHD_FLOWRING_TX_BIG_PKT_SIZE,
8558 PKTDATA(dhd->osh, PKTBUF), PKTLEN(dhd->osh, PKTBUF)) != BCME_OK) {
8565 flow_ring_table = (flow_ring_table_t *)dhd->flow_ring_table;
8584 if (osl_is_flag_set(dhd->osh, OSL_PHYS_MEM_LESS_THAN_16MB)) {
8613 if (dhd->dhd_induce_error == DHD_INDUCE_TX_BIG_PKT && big_pktbuf) {
8614 PKTFREE(dhd->osh, PKTBUF, TRUE);
8621 pktid = DHD_NATIVE_TO_PKTID_RSV(dhd, dhd->prot->pktid_tx_map,
8638 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE);
8641 __FUNCTION__, __LINE__, OSL_ATOMIC_READ(dhd->osh, &prot->active_tx_count)));
8647 pktdata = PKTDATA(dhd->osh, PKTBUF);
8648 pktlen = PKTLEN(dhd->osh, PKTBUF);
8651 DHD_DBG_PKT_MON_TX(dhd, PKTBUF, pktid);
8653 dhd_handle_pktdata(dhd, ifidx, PKTBUF, pktdata, pktid,
8661 if (osl_is_flag_set(dhd->osh, OSL_PHYS_MEM_LESS_THAN_16MB))
8662 PKTCFREE(dhd->osh, pkt_to_free, FALSE);
8677 if (dhd->awdl_llc_enabled &&
8678 dhd->awdl_ifidx && ifidx == dhd->awdl_ifidx) {
8692 if (dhd_ether_to_awdl_llc_hdr(dhd, (struct ether_header *)pktdata,
8698 memcpy_s(txdesc->txhdr, ETHER_HDR_LEN, PKTDATA(dhd->osh, PKTBUF),
8709 if (dhd_ether_to_8023_hdr(dhd->osh, (struct ether_header *)pktdata,
8712 pktdata = PKTDATA(dhd->osh, PKTBUF);
8713 pktlen = PKTLEN(dhd->osh, PKTBUF);
8722 pktlen = PKTLEN(dhd->osh, PKTBUF) - ETHER_HDR_LEN;
8723 pktdata = PKTPULL(dhd->osh, PKTBUF, ETHER_HDR_LEN);
8727 pa = DMA_MAP(dhd->osh, PKTDATA(dhd->osh, PKTBUF), pktlen, DMA_TX, PKTBUF, 0);
8740 dhd->dma_stats.txdata++;
8741 dhd->dma_stats.txdata_sz += pktlen;
8744 DHD_NATIVE_TO_PKTID_SAVE(dhd, dhd->prot->pktid_tx_map, PKTBUF, pktid,
8753 if (dhd->prot->hmaptest_tx_active == HMAPTEST_D11_TX_ACTIVE) {
8755 dhd->prot->hmap_tx_buf_va = (char *)dhd->prot->hmaptest.mem.va
8756 + dhd->prot->hmaptest.offset;
8758 dhd->prot->hmap_tx_buf_len = pktlen;
8759 if ((dhd->prot->hmap_tx_buf_va + dhd->prot->hmap_tx_buf_len) >
8760 ((char *)dhd->prot->hmaptest.mem.va + dhd->prot->hmaptest.mem.len)) {
8763 dhd->prot->hmaptest_tx_active = HMAPTEST_D11_TX_INACTIVE;
8764 dhd->prot->hmaptest.in_progress = FALSE;
8767 memcpy(dhd->prot->hmap_tx_buf_va, PKTDATA(dhd->osh, PKTBUF), pktlen);
8768 pa = DMA_MAP(dhd->osh, dhd->prot->hmap_tx_buf_va,
8769 dhd->prot->hmap_tx_buf_len, DMA_TX, PKTBUF, 0);
8771 dhd->prot->hmap_tx_buf_pa = pa;
8773 dhd->prot->hmaptest_tx_pktid = pktid;
8774 dhd->prot->hmaptest_tx_active = HMAPTEST_D11_TX_POSTED;
8777 dhd->prot->hmap_tx_buf_va, (uint32)PHYSADDRLO(pa), pktlen));
8784 if (dhd_get_pktts_enab(dhd) &&
8785 dhd->pkt_metadata_buflen) {
8787 meta_data_buf.va = DMA_ALLOC_CONSISTENT(dhd->osh, meta_data_buf_len,
8795 DHD_PKTID_SAVE_METADATA(dhd, dhd->prot->pktid_tx_map,
8840 PKTPUSH(dhd->osh, PKTBUF, ETHER_HDR_LEN);
8848 if (!llc_inserted && dhd->prot->tx_ts_log_enabled) {
8851 dhd_parse_proto(PKTDATA(dhd->osh, PKTBUF), &parse);
8854 if (dhd->prot->no_retry)
8856 if (dhd->prot->no_aggr)
8858 if (dhd->prot->fixed_rate)
8872 dhd->tx_profile_enab && dhd->num_profiles > 0)
8876 for (offset = 0; offset < dhd->num_profiles; offset++) {
8877 if (dhd_protocol_matches_profile((uint8 *)PKTDATA(dhd->osh, PKTBUF),
8878 PKTLEN(dhd->osh, PKTBUF), &(dhd->protocol_filters[offset]),
8882 (((uint8)dhd->protocol_filters[offset].profile_index) &
8895 headroom = (uint16)PKTHEADROOM(dhd->osh, PKTBUF);
8904 PKTPUSH(dhd->osh, PKTBUF, prot->tx_metadata_offset);
8906 meta_pa = DMA_MAP(dhd->osh, PKTDATA(dhd->osh, PKTBUF),
8911 DMA_UNMAP(dhd->osh, pa, pktlen, DMA_TX, 0, DHD_DMAH_NULL);
8927 PKTPULL(dhd->osh, PKTBUF, prot->tx_metadata_offset);
8934 if (dhd->hp2p_capable && flow_ring_node->flow_info.tid == HP2P_PRIO) {
8935 dhd_update_hp2p_txdesc(dhd, txdesc);
8939 if (!dhd_get_pktts_enab(dhd) || !dhd->pkt_metadata_buflen) {
8950 OSL_ATOMIC_INC(dhd->osh, &ring->inflight);
8954 DHD_PKTID_AUDIT(dhd, prot->pktid_tx_map, pktid, DHD_DUPLICATE_ALLOC);
8963 PKTAUDIT(dhd->osh, PKTBUF);
8969 if (dhd->hp2p_capable && flow_ring_node->flow_info.tid == HP2P_PRIO) {
8970 dhd_calc_hp2p_burst(dhd, ring, flowid);
8978 dhd_prot_txdata_aggr_db_write_flush(dhd, flowid);
8980 dhd_prot_aggregate_db_ring_door_bell(dhd, flowid, TRUE);
8985 dhd_prot_txdata_write_flush(dhd, flowid);
8992 dhd_prot_ring_write_complete(dhd, ring, txdesc, 1);
8999 if (dhd_get_pktts_enab(dhd) == TRUE) {
9007 OSL_ATOMIC_INC(dhd->osh, &prot->active_tx_count);
9013 DHD_TXFL_WAKE_LOCK_TIMEOUT(dhd, MAX_TX_TIMEOUT);
9016 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
9042 DHD_PKTID_TO_NATIVE(dhd, dhd->prot->pktid_tx_map, pktid,
9050 if (osl_is_flag_set(dhd->osh, OSL_PHYS_MEM_LESS_THAN_16MB))
9051 PKTCFREE(dhd->osh, PKTBUF, FALSE);
9058 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
9065 dhd_prot_txdata_aggr_db_write_flush(dhd_pub_t *dhd, uint16 flowid) argument
9071 if (dhd->flow_ring_table == NULL) {
9075 flow_ring_table = (flow_ring_table_t *)dhd->flow_ring_table;
9080 dhd_prot_agg_db_ring_write(dhd, ring, ring->start_addr,
9092 BCMFASTPATH(dhd_prot_txdata_write_flush)(dhd_pub_t *dhd, uint16 flowid)
9099 if (dhd->flow_ring_table == NULL) {
9103 flow_ring_table = (flow_ring_table_t *)dhd->flow_ring_table;
9109 dhd_prot_ring_write_complete(dhd, ring, ring->start_addr,
9113 dhd->prot->tx_h2d_db_cnt++;
9121 BCMFASTPATH(dhd_prot_hdrpull)(dhd_pub_t *dhd, int *ifidx, void *pkt, uchar *buf, uint *len)
9128 BCMFASTPATH(dhd_prot_return_rxbuf)(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint32 pktid,
9132 dhd_prot_t *prot = dhd->prot;
9145 dhd_msgbuf_rxbuf_post(dhd, FALSE); /* alloc pkt ids */
9146 } else if (dhd->dma_h2d_ring_upd_support && !IDMA_ACTIVE(dhd)) {
9150 dhd_prot_ring_doorbell(dhd, DHD_RDPTR_UPDATE_H2D_DB_MAGIC(ring));
9159 dhd_msgbuf_hmaptest_cmplt(dhd_pub_t *dhd) argument
9161 dhd_prot_t *prot = dhd->prot;
9164 uint32 len = dhd->prot->hmaptest.len;
9180 readbuf = (char *)dhd->prot->hmaptest.mem.va + dhd->prot->hmaptest.offset;
9181 OSL_CACHE_FLUSH(dhd->prot->hmaptest.mem.va,
9182 dhd->prot->hmaptest.mem.len);
9201 dhdmsgbuf_set_hmaptest_windows(dhd_pub_t *dhd) argument
9208 dhd_prot_t *prot = dhd->prot;
9209 uint corerev = dhd->bus->sih->buscorerev;
9247 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9249 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9251 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9256 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9259 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9262 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9266 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9268 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9270 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9276 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9284 dhdmsgbuf_hmaptest_stop(dhd_pub_t *dhd) argument
9288 uint corerev = dhd->bus->sih->buscorerev;
9291 dhd->prot->hmaptest.in_progress = FALSE;
9299 window_config = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9302 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9306 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9308 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9310 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9319 dhdmsgbuf_hmap(dhd_pub_t *dhd, pcie_hmap_t *hmap_params, bool set) argument
9328 dhd_prot_t *prot = dhd->prot;
9329 dhd_bus_t *bus = dhd->bus;
9344 dhdmsgbuf_set_hmaptest_windows(dhd);
9346 dhdmsgbuf_hmaptest_stop(dhd); /* stop will clear all programmed windows */
9350 OSL_CACHE_FLUSH(dhd->prot->hmaptest.mem.va,
9351 dhd->prot->hmaptest.mem.len);
9353 window_config = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9369 addr_lo = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9371 addr_hi = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9373 window_length = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9379 addr_hi = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9381 addr_lo = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9383 window_length = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx,
9401 dhdmsgbuf_hmaptest(dhd_pub_t *dhd, pcie_hmaptest_t *hmaptest_params) argument
9404 dhd_prot_t *prot = dhd->prot;
9408 dhd_bus_t *bus = dhd->bus;
9439 dhd->prot->hmaptest_rx_active = HMAPTEST_D11_RX_ACTIVE;
9442 dhd->prot->hmaptest_tx_active = HMAPTEST_D11_TX_ACTIVE;
9449 char *fillbuf = (char *)dhd->prot->hmaptest.mem.va
9452 ((char *)dhd->prot->hmaptest.mem.va + dhd->prot->hmaptest.mem.len)) {
9454 dhd->prot->hmaptest.in_progress = FALSE;
9478 OSL_CACHE_FLUSH(dhd->prot->hmaptest.mem.va,
9479 dhd->prot->hmaptest.mem.len);
9511 dhd_prot_wlioctl_intercept(dhd_pub_t *dhd, wl_ioctl_t * ioc, void * buf) argument
9513 dhd_prot_t *prot = dhd->prot;
9531 dhdmsgbuf_hmap(dhd, hmap_params, (ioc->cmd == WLC_SET_VAR));
9539 dhdmsgbuf_hmaptest(dhd, hmaptest_params);
9546 dhd_prot_wl_ioctl_ret_intercept(dhd_pub_t *dhd, wl_ioctl_t * ioc, void * buf, argument
9552 dhd_msgbuf_hmaptest_cmplt(dhd);
9564 dhd->wme_dp = (uint8) ltoh32(val);
9571 dhd_awdl_peer_op(dhd, (uint8)ifidx, ((char *)buf + slen), len - slen);
9589 dhd_update_interface_link_status(dhd, (uint8)ifidx, TRUE);
9592 dhd_clear_awdl_stats(dhd);
9595 dhd_update_interface_link_status(dhd, (uint8)ifidx, FALSE);
9596 dhd_del_all_sta(dhd, (uint8)ifidx);
9597 dhd_awdl_peer_op(dhd, (uint8)ifidx, NULL, 0);
9614 dhd->awdl_minext = extcnt->minExt;
9622 dhd->awdl_presmode = *((uint8 *)((char *)buf + slen));
9635 int dhd_prot_ioctl(dhd_pub_t *dhd, int ifidx, wl_ioctl_t * ioc, void * buf, int len) argument
9640 if (dhd->bus->is_linkdown) {
9645 if (dhd_query_bus_erros(dhd)) {
9650 if ((dhd->busstate == DHD_BUS_DOWN) || dhd->hang_was_sent) {
9653 dhd->busstate, dhd->hang_was_sent));
9657 if (dhd->busstate == DHD_BUS_SUSPEND) {
9670 if (dhd->bus->sih->buscorerev == 72) {
9671 if (dhd_get_pcie_linkspeed(dhd) == PCIE_LNK_SPEED_GEN1) {
9707 dhd_prot_wlioctl_intercept(dhd, ioc, buf);
9713 ret = dhd_msgbuf_set_ioctl(dhd, ifidx, ioc->cmd, buf, len, action);
9715 ret = dhd_msgbuf_query_ioctl(dhd, ifidx, ioc->cmd, buf, len, action);
9727 dhd->dongle_error = ret;
9730 dhd_prot_wl_ioctl_ret_intercept(dhd, ioc, buf, ifidx, ret, len);
9746 dhdmsgbuf_lpbk_req(dhd_pub_t *dhd, uint len) argument
9749 dhd_prot_t *prot = dhd->prot;
9762 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
9769 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE);
9774 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
9800 dhd_prot_ring_write_complete(dhd, ring, ioct_rqst, 1);
9805 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
9812 void dmaxfer_free_dmaaddr(dhd_pub_t *dhd, dhd_dmaxfer_t *dmaxfer) argument
9817 dhd_dma_buf_free(dhd, &dmaxfer->srcmem);
9818 dhd_dma_buf_free(dhd, &dmaxfer->dstmem);
9862 int dmaxfer_prepare_dmaaddr(dhd_pub_t *dhd, uint len, argument
9870 dmaxfer_free_dmaaddr(dhd, dmaxfer);
9872 if (dhd_dma_buf_alloc(dhd, &dmaxfer->srcmem, len)) {
9876 if (dhd_dma_buf_alloc(dhd, &dmaxfer->dstmem, len + 8)) {
9877 dhd_dma_buf_free(dhd, &dmaxfer->srcmem);
9910 dhd_msgbuf_dmaxfer_process(dhd_pub_t *dhd, void *msg) argument
9912 dhd_prot_t *prot = dhd->prot;
9923 dhd_os_set_intr_poll_period(dhd->bus, dhd->cur_intr_poll_period);
10002 buf_free_scheduled = dhd_prepare_schedule_dmaxfer_free(dhd);
10010 dhd->prot->dmaxfer.in_progress = FALSE;
10013 dhd->bus->dmaxfer_complete = TRUE;
10014 dhd_os_dmaxfer_wake(dhd);
10024 dhdmsgbuf_dmaxfer_req(dhd_pub_t *dhd, uint len, uint srcdelay, uint destdelay, argument
10029 dhd_prot_t *prot = dhd->prot;
10048 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK) {
10054 if ((ret = dmaxfer_prepare_dmaaddr(dhd, xferlen, srcdelay, destdelay,
10058 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
10064 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE);
10067 dmaxfer_free_dmaaddr(dhd, &prot->dmaxfer);
10071 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
10111 dhd_prot_ring_write_complete(dhd, ring, dmap, 1);
10117 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
10124 dhdmsgbuf_dmaxfer_status(dhd_pub_t *dhd, dma_xfer_info_t *result) argument
10126 dhd_prot_t *prot = dhd->prot;
10151 dhd_msgbuf_query_ioctl(dhd_pub_t *dhd, int ifidx, uint cmd, void *buf, uint len, uint8 action) argument
10158 if (dhd->bus->is_linkdown) {
10164 if (dhd->busstate == DHD_BUS_DOWN) {
10170 if (dhd->hang_was_sent) {
10189 strlcpy((char *)buf, bcmerrorstr(dhd->dongle_error), copylen);
10193 *(uint32 *)(uint32 *)buf = dhd->dongle_error;
10211 OSL_DISABLE_PREEMPTION(dhd->osh);
10212 dhd_set_request_id(dhd, dhd->prot->ioctl_trans_id+1, cmd);
10213 dhd_start_cmd_timer(dhd);
10214 dhd_start_bus_timer(dhd);
10217 ret = dhd_fillup_ioct_reqst(dhd, (uint16)len, cmd, buf, ifidx);
10223 dhd_stop_cmd_timer(dhd);
10224 dhd_stop_bus_timer(dhd);
10225 OSL_ENABLE_PREEMPTION(dhd->osh);
10228 OSL_ENABLE_PREEMPTION(dhd->osh);
10237 ret = dhd_msgbuf_wait_ioctl_cmplt(dhd, len, buf);
10244 dhd_msgbuf_iovar_timeout_dump(dhd_pub_t *dhd) argument
10247 dhd_prot_t *prot = dhd->prot;
10248 dhd->rxcnt_timeout++;
10249 dhd->rx_ctlerrs++;
10252 dhd->is_sched_error ? " due to scheduling problem" : "",
10253 dhd->rxcnt_timeout, prot->curr_ioctl_cmd, prot->ioctl_trans_id,
10254 prot->ioctl_state, dhd->busstate, prot->ioctl_received));
10265 if (dhd->is_sched_error && dhd->memdump_enabled == DUMP_MEMFILE_BUGON) {
10290 intstatus = si_corereg(dhd->bus->sih,
10291 dhd->bus->sih->buscoreidx, dhd->bus->pcie_mailbox_int, 0, 0);
10294 dhd->bus->is_linkdown = TRUE;
10297 dhd_bus_dump_console_buffer(dhd->bus);
10298 dhd_prot_debug_info_print(dhd);
10306 dhd_msgbuf_wait_ioctl_cmplt(dhd_pub_t *dhd, uint32 len, void *buf) argument
10308 dhd_prot_t *prot = dhd->prot;
10316 if (dhd_query_bus_erros(dhd)) {
10325 prev_stop_count = dhd->gdb_proxy_stop_count;
10326 timeleft = dhd_os_ioctl_resp_wait(dhd, (uint *)&prot->ioctl_received);
10327 } while ((timeleft == 0) && ((dhd->gdb_proxy_stop_count != prev_stop_count) ||
10328 (dhd->gdb_proxy_stop_count & GDB_PROXY_STOP_MASK)));
10331 timeleft = dhd_os_ioctl_resp_wait(dhd, (uint *)&prot->ioctl_received);
10336 uint32 intstatus = si_corereg(dhd->bus->sih,
10337 dhd->bus->sih->buscoreidx, dhd->bus->pcie_mailbox_int, 0, 0);
10338 int host_irq_disbled = dhdpcie_irq_disabled(dhd->bus);
10340 (timeleft == 0) && (!dhd_query_bus_erros(dhd))) {
10344 dhd_pcie_intr_count_dump(dhd);
10345 dhd_print_tasklet_status(dhd);
10346 dhd_prot_process_ctrlbuf(dhd);
10347 timeleft = dhd_os_ioctl_resp_wait(dhd, (uint *)&prot->ioctl_received);
10349 dhdpcie_bus_clear_intstatus(dhd->bus);
10354 if (dhd->conf->ctrl_resched > 0 && timeleft == 0 && (!dhd_query_bus_erros(dhd))) {
10356 if (cnt <= dhd->conf->ctrl_resched) {
10357 uint buscorerev = dhd->bus->sih->buscorerev;
10359 … intstatus = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, PCIMailBoxInt(buscorerev), 0, 0);
10360 intmask = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, PCIMailBoxMask(buscorerev), 0, 0);
10364 dhd->bus->intstatus = intstatus;
10365 dhd->bus->ipend = TRUE;
10366 dhd->bus->dpc_sched = TRUE;
10367 dhd_sched_dpc(dhd);
10368 timeleft = dhd_os_ioctl_resp_wait(dhd, &prot->ioctl_received);
10375 if (timeleft == 0 && (!dhd_query_bus_erros(dhd))) {
10376 if (dhd->check_trap_rot) {
10379 dhd_bus_checkdied(dhd->bus, NULL, 0);
10381 if (dhd->dongle_trap_occured) {
10384 dhd->bus->no_cfg_restore = 1;
10392 dhd->is_sched_error = dhd_bus_query_dpc_sched_errors(dhd);
10394 dhd->iovar_timeout_occured = TRUE;
10395 dhd_msgbuf_iovar_timeout_dump(dhd);
10399 if (dhd->memdump_enabled) {
10401 dhd->memdump_type = DUMP_TYPE_RESUMED_ON_TIMEOUT;
10402 dhd_bus_mem_dump(dhd);
10413 dhd_schedule_reset(dhd);
10418 dhd->bus->no_cfg_restore = 1;
10430 dhd->rxcnt_timeout = 0;
10431 dhd->rx_ctlpkts++;
10436 if (dhd->prot->ioctl_resplen > len)
10437 dhd->prot->ioctl_resplen = (uint16)len;
10439 bcopy(dhd->prot->retbuf.va, buf, dhd->prot->ioctl_resplen);
10441 ret = (int)(dhd->prot->ioctl_status);
10444 DHD_GENERAL_LOCK(dhd, flags);
10445 dhd->prot->ioctl_state = 0;
10446 dhd->prot->ioctl_resplen = 0;
10447 dhd->prot->ioctl_received = IOCTL_WAIT;
10448 dhd->prot->curr_ioctl_cmd = 0;
10449 DHD_GENERAL_UNLOCK(dhd, flags);
10455 dhd_msgbuf_set_ioctl(dhd_pub_t *dhd, int ifidx, uint cmd, void *buf, uint len, uint8 action) argument
10461 if (dhd->bus->is_linkdown) {
10467 if (dhd->busstate == DHD_BUS_DOWN) {
10473 if (dhd->hang_was_sent) {
10493 OSL_DISABLE_PREEMPTION(dhd->osh);
10494 dhd_set_request_id(dhd, dhd->prot->ioctl_trans_id+1, cmd);
10495 dhd_start_cmd_timer(dhd);
10496 dhd_start_bus_timer(dhd);
10500 ret = dhd_fillup_ioct_reqst(dhd, (uint16)len, cmd, buf, ifidx);
10506 dhd_stop_cmd_timer(dhd);
10507 dhd_stop_bus_timer(dhd);
10508 OSL_ENABLE_PREEMPTION(dhd->osh);
10512 OSL_ENABLE_PREEMPTION(dhd->osh);
10520 ret = dhd_msgbuf_wait_ioctl_cmplt(dhd, len, buf);
10527 int dhd_prot_ctl_complete(dhd_pub_t *dhd) argument
10533 int dhd_prot_iovar_op(dhd_pub_t *dhd, const char *name, argument
10540 int dhd_d2h_h2d_ring_dump(dhd_pub_t *dhd, void *file, const void *user_buf, argument
10549 if (!(dhd) || !(dhd->prot)) {
10552 prot = dhd->prot;
10556 if ((ret = dhd_ring_write(dhd, ring, file, user_buf, file_posn)) < 0)
10560 if ((ret = dhd_ring_write(dhd, ring, file, user_buf, file_posn)) < 0)
10564 if ((ret = dhd_ring_write(dhd, ring, file, user_buf, file_posn)) < 0)
10568 if ((ret = dhd_ring_write(dhd, ring, file, user_buf, file_posn)) < 0)
10572 if ((ret = dhd_ring_write(dhd, ring, file, user_buf, file_posn)) < 0)
10575 h2d_flowrings_total = dhd_get_max_flow_rings(dhd);
10577 if ((ret = dhd_ring_write(dhd, ring, file, user_buf, file_posn)) < 0) {
10583 if (dhd->dongle_edl_support) {
10585 if ((ret = dhd_edl_ring_hdr_write(dhd, ring, file, user_buf, file_posn)) < 0)
10588 else if (dhd->bus->api.fw_rev >= PCIE_SHARED_VERSION_6 && !dhd->dongle_edl_support)
10590 if (dhd->bus->api.fw_rev >= PCIE_SHARED_VERSION_6)
10594 if ((ret = dhd_ring_write(dhd, ring, file, user_buf, file_posn)) < 0)
10598 if ((ret = dhd_ring_write(dhd, ring, file, user_buf, file_posn)) < 0)
10608 int dhd_ring_write(dhd_pub_t *dhd, msgbuf_ring_t *ring, void *file, argument
10635 int dhd_edl_ring_hdr_write(dhd_pub_t *dhd, msgbuf_ring_t *ring, void *file, const void *user_buf, argument
10650 buf = MALLOCZ(dhd->osh, (D2HRING_EDL_MAX_ITEM * D2HRING_EDL_HDR_SIZE));
10678 MFREE(dhd->osh, buf, (D2HRING_EDL_MAX_ITEM * D2HRING_EDL_HDR_SIZE));
10686 void dhd_prot_dump(dhd_pub_t *dhd, struct bcmstrbuf *b) argument
10692 if (dhd->d2h_sync_mode & PCIE_SHARED_D2H_SYNC_SEQNUM)
10694 else if (dhd->d2h_sync_mode & PCIE_SHARED_D2H_SYNC_XORCSUM)
10699 dhd->prot->d2h_sync_wait_max, dhd->prot->d2h_sync_wait_tot);
10702 dhd->dma_h2d_ring_upd_support,
10703 dhd->dma_d2h_ring_upd_support,
10704 dhd->prot->rw_index_sz);
10706 h2d_max_txpost, dhd->prot->h2d_max_txpost);
10709 h2d_htput_max_txpost, dhd->prot->h2d_htput_max_txpost);
10711 bcm_bprintf(b, "pktid_txq_start_cnt: %d\n", dhd->prot->pktid_txq_start_cnt);
10712 bcm_bprintf(b, "pktid_txq_stop_cnt: %d\n", dhd->prot->pktid_txq_stop_cnt);
10713 bcm_bprintf(b, "pktid_depleted_cnt: %d\n", dhd->prot->pktid_depleted_cnt);
10714 bcm_bprintf(b, "txcpl_db_cnt: %d\n", dhd->prot->txcpl_db_cnt);
10716 bcm_bprintf(b, "host_seqnum %u dngl_seqnum %u\n", dhd_prot_read_seqnum(dhd, TRUE),
10717 dhd_prot_read_seqnum(dhd, FALSE));
10719 bcm_bprintf(b, "tx_h2d_db_cnt:%llu\n", dhd->prot->tx_h2d_db_cnt);
10724 dhd->prot->agg_h2d_db_info.timer_db_cnt, dhd->prot->agg_h2d_db_info.direct_db_cnt);
10725 dhd_agg_inflight_stats_dump(dhd, b);
10730 void dhd_prot_dstats(dhd_pub_t *dhd) argument
10736 int dhd_process_pkt_reorder_info(dhd_pub_t *dhd, uchar *reorder_info_buf, argument
10744 dhd_post_dummy_msg(dhd_pub_t *dhd) argument
10750 dhd_prot_t *prot = dhd->prot;
10754 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
10760 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE);
10765 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
10783 dhd_prot_ring_write_complete(dhd, ring, hevent, 1);
10788 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
10799 BCMFASTPATH(dhd_prot_alloc_ring_space)(dhd_pub_t *dhd, msgbuf_ring_t *ring,
10814 if (dhd->dma_d2h_ring_upd_support) {
10815 ring->rd = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, ring->idx);
10817 dhd_bus_cmn_readshared(dhd->bus, &(ring->rd), RING_RD_UPD, ring->idx);
10822 dhd->bus->read_shm_fail = TRUE;
10853 dhd_fillup_ioct_reqst(dhd_pub_t *dhd, uint16 len, uint cmd, void* buf, int ifidx) argument
10855 dhd_prot_t *prot = dhd->prot;
10863 ulong addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
10868 if (dhd_query_bus_erros(dhd)) {
10882 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
10888 R_REG(dhd->osh, (volatile uint16 *)(dhd->bus->tcm + addr));
10895 diff_ns, dhdpcie_bus_get_pcie_inband_dw_state(dhd->bus)));
10906 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
10915 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE);
10923 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
10964 dhd_prot_ioctl_trace(dhd, ioct_rqst, buf, len);
10968 dhd_prot_ring_write_complete(dhd, ring, ioct_rqst, 1);
10973 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
10991 dhd_prot_ring_attach(dhd_pub_t *dhd, msgbuf_ring_t *ring, const char *name, argument
10996 dhd_prot_t *prot = dhd->prot;
10997 uint16 max_flowrings = dhd->bus->max_tx_flowrings;
11012 DHD_IS_FLOWID_HTPUT(dhd, DHD_RINGID_TO_FLOWID(ringid))) {
11047 (void)dhd_dma_buf_audit(dhd, &ring->dma_buf);
11050 if (ring == dhd->prot->d2hring_edl) {
11054 memcpy(&ring->dma_buf, &dhd->edl_ring_mem, sizeof(ring->dma_buf));
11063 dma_buf_alloced = dhd_dma_buf_alloc(dhd, &ring->dma_buf, dma_buf_len);
11073 ring->ring_lock = osl_spin_lock_init(dhd->osh);
11094 dhd_prot_ring_init(dhd_pub_t *dhd, msgbuf_ring_t *ring) argument
11101 dhd_bus_cmn_writeshared(dhd->bus, &ring->base_addr,
11103 dhd_bus_cmn_writeshared(dhd->bus, &ring->max_items,
11105 dhd_bus_cmn_writeshared(dhd->bus, &ring->item_len,
11108 dhd_bus_cmn_writeshared(dhd->bus, &(ring->wr),
11110 dhd_bus_cmn_writeshared(dhd->bus, &(ring->rd),
11123 dhd_prot_ring_reset(dhd_pub_t *dhd, msgbuf_ring_t *ring) argument
11127 dhd_dma_buf_reset(dhd, &ring->dma_buf);
11140 dhd_prot_ring_detach(dhd_pub_t *dhd, msgbuf_ring_t *ring) argument
11142 dhd_prot_t *prot = dhd->prot;
11143 uint16 max_flowrings = dhd->bus->max_tx_flowrings;
11153 (void)dhd_dma_buf_audit(dhd, &ring->dma_buf);
11157 if (ring == dhd->prot->d2hring_edl) {
11165 dhd_dma_buf_free(dhd, &ring->dma_buf);
11169 osl_spin_lock_deinit(dhd->osh, ring->ring_lock);
11175 dhd_get_max_flow_rings(dhd_pub_t *dhd) argument
11177 if (dhd->bus->api.fw_rev >= PCIE_SHARED_VERSION_6)
11178 return dhd->bus->max_tx_flowrings;
11180 return (dhd->bus->max_tx_flowrings - BCMPCIE_H2D_COMMON_MSGRINGS);
11201 dhd_prot_flowrings_pool_attach(dhd_pub_t *dhd) argument
11206 dhd_prot_t *prot = dhd->prot;
11215 prot->h2d_rings_total = (uint16)dhd_bus_max_h2d_queues(dhd->bus);
11224 h2d_flowrings_total = dhd_get_max_flow_rings(dhd);
11242 if (dhd_prot_ring_attach(dhd, ring, ring_name,
11255 dhd_prot_flowrings_pool_detach(dhd); /* Free entire pool of flowrings */
11279 dhd_prot_flowrings_pool_reset(dhd_pub_t *dhd) argument
11283 dhd_prot_t *prot = dhd->prot;
11289 h2d_flowrings_total = dhd_get_max_flow_rings(dhd);
11292 dhd_prot_ring_reset(dhd, ring);
11306 dhd_prot_flowrings_pool_detach(dhd_pub_t *dhd) argument
11311 dhd_prot_t *prot = dhd->prot;
11318 h2d_flowrings_total = dhd_get_max_flow_rings(dhd);
11321 dhd_prot_ring_detach(dhd, ring);
11342 dhd_prot_flowrings_pool_fetch(dhd_pub_t *dhd, uint16 flowid) argument
11345 dhd_prot_t *prot = dhd->prot;
11372 dhd_prot_flowrings_pool_release(dhd_pub_t *dhd, uint16 flowid, void *flow_ring) argument
11375 dhd_prot_t *prot = dhd->prot;
11386 (void)dhd_dma_buf_audit(dhd, &ring->dma_buf);
11397 dhd_prot_schedule_aggregate_h2d_db(dhd_pub_t *dhd, uint16 flowid) argument
11399 dhd_prot_t *prot = dhd->prot;
11407 dhd_prot_txdata_aggr_db_write_flush(dhd, flowid);
11409 inflight = OSL_ATOMIC_READ(dhd->osh, &ring->inflight);
11414 dhd_agg_inflights_stats_update(dhd, inflight);
11415 dhd_prot_aggregate_db_ring_door_bell(dhd, flowid, db_req);
11465 dhd_prot_agg_db_ring_write(dhd_pub_t *dhd, msgbuf_ring_t * ring, void* p, argument
11468 uint16 max_flowrings = dhd->bus->max_tx_flowrings;
11481 DHD_BUS_LP_STATE_LOCK(dhd->bus->bus_lp_state_lock, flags_bus);
11486 if (IDMA_ACTIVE(dhd) || dhd->dma_h2d_ring_upd_support) {
11487 dhd_prot_dma_indx_set(dhd, ring->wr,
11489 } else if (IFRM_ACTIVE(dhd) && DHD_IS_FLOWRING(ring->idx, max_flowrings)) {
11490 dhd_prot_dma_indx_set(dhd, ring->wr,
11493 dhd_bus_cmn_writeshared(dhd->bus, &(ring->wr),
11497 DHD_BUS_LP_STATE_UNLOCK(dhd->bus->bus_lp_state_lock, flags_bus);
11501 dhd_prot_aggregate_db_ring_door_bell(dhd_pub_t *dhd, uint16 flowid, bool ring_db) argument
11503 dhd_prot_t *prot = dhd->prot;
11504 flow_ring_table_t *flow_ring_table = (flow_ring_table_t *)dhd->flow_ring_table;
11511 dhd_msgbuf_agg_h2d_db_timer_cancel(dhd);
11514 if (IDMA_ACTIVE(dhd) || (IFRM_ACTIVE(dhd))) {
11517 if (dhd->bus->sih) {
11518 corerev = dhd->bus->sih->buscorerev;
11526 prot->mb_2_ring_fn(dhd->bus, db_index, TRUE);
11528 prot->mb_ring_fn(dhd->bus, DHD_WRPTR_UPDATE_H2D_DB_MAGIC(ring));
11547 BCMFASTPATH(__dhd_prot_ring_write_complete)(dhd_pub_t *dhd, msgbuf_ring_t * ring, void* p,
11550 dhd_prot_t *prot = dhd->prot;
11552 uint16 max_flowrings = dhd->bus->max_tx_flowrings;
11558 if (IDMA_ACTIVE(dhd) || dhd->dma_h2d_ring_upd_support) {
11559 dhd_prot_dma_indx_set(dhd, ring->wr,
11561 } else if (IFRM_ACTIVE(dhd) && DHD_IS_FLOWRING(ring->idx, max_flowrings)) {
11562 dhd_prot_dma_indx_set(dhd, ring->wr,
11565 dhd_bus_cmn_writeshared(dhd->bus, &(ring->wr),
11570 if (IDMA_ACTIVE(dhd) ||
11571 (IFRM_ACTIVE(dhd) && DHD_IS_FLOWRING(ring->idx, max_flowrings))) {
11574 if (dhd->bus->sih) {
11575 corerev = dhd->bus->sih->buscorerev;
11581 prot->mb_2_ring_fn(dhd->bus, db_index, TRUE);
11583 prot->mb_ring_fn(dhd->bus, DHD_WRPTR_UPDATE_H2D_DB_MAGIC(ring));
11588 BCMFASTPATH(dhd_prot_ring_write_complete)(dhd_pub_t *dhd, msgbuf_ring_t * ring, void* p,
11592 DHD_BUS_LP_STATE_LOCK(dhd->bus->bus_lp_state_lock, flags_bus);
11593 __dhd_prot_ring_write_complete(dhd, ring, p, nitems);
11594 DHD_BUS_LP_STATE_UNLOCK(dhd->bus->bus_lp_state_lock, flags_bus);
11598 BCMFASTPATH(dhd_prot_ring_doorbell)(dhd_pub_t *dhd, uint32 value)
11601 DHD_BUS_LP_STATE_LOCK(dhd->bus->bus_lp_state_lock, flags_bus);
11602 dhd->prot->mb_ring_fn(dhd->bus, value);
11603 DHD_BUS_LP_STATE_UNLOCK(dhd->bus->bus_lp_state_lock, flags_bus);
11612 BCMFASTPATH(dhd_prot_ring_write_complete_mbdata)(dhd_pub_t *dhd, msgbuf_ring_t * ring, void *p,
11617 DHD_BUS_LP_STATE_LOCK(dhd->bus->bus_lp_state_lock, flags_bus);
11619 __dhd_prot_ring_write_complete(dhd, ring, p, nitems);
11623 __DHD_SET_BUS_LPS_D3_INFORMED(dhd->bus);
11626 DHD_BUS_LP_STATE_UNLOCK(dhd->bus->bus_lp_state_lock, flags_bus);
11635 dhd_prot_upd_read_idx(dhd_pub_t *dhd, msgbuf_ring_t * ring) argument
11637 dhd_prot_t *prot = dhd->prot;
11646 if (IDMA_ACTIVE(dhd)) {
11647 dhd_prot_dma_indx_set(dhd, ring->rd,
11650 if (dhd->bus->sih) {
11651 corerev = dhd->bus->sih->buscorerev;
11657 prot->mb_2_ring_fn(dhd->bus, db_index, FALSE);
11658 } else if (dhd->dma_h2d_ring_upd_support) {
11659 dhd_prot_dma_indx_set(dhd, ring->rd,
11662 dhd_bus_cmn_writeshared(dhd->bus, &(ring->rd),
11668 dhd_send_d2h_ringcreate(dhd_pub_t *dhd, msgbuf_ring_t *ring_to_create, argument
11675 uint16 max_h2d_rings = dhd->bus->max_submission_rings;
11676 msgbuf_ring_t *ctrl_ring = &dhd->prot->h2dring_ctrl_subn;
11679 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
11693 d2h_ring = (d2h_ring_create_req_t *) dhd_prot_alloc_ring_space(dhd,
11739 dhd_prot_ring_write_complete(dhd, ctrl_ring, d2h_ring,
11745 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
11753 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
11759 dhd_send_h2d_ringcreate(dhd_pub_t *dhd, msgbuf_ring_t *ring_to_create, uint8 ring_type, uint32 id) argument
11766 msgbuf_ring_t *ctrl_ring = &dhd->prot->h2dring_ctrl_subn;
11769 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
11783 h2d_ring = (h2d_ring_create_req_t *)dhd_prot_alloc_ring_space(dhd,
11819 dhd_prot_ring_write_complete(dhd, ctrl_ring, h2d_ring,
11825 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
11832 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
11843 dhd_prot_dma_indx_set(dhd_pub_t *dhd, uint16 new_index, uint8 type, uint16 ringid) argument
11847 dhd_prot_t *prot = dhd->prot;
11848 uint16 max_h2d_rings = dhd->bus->max_submission_rings;
11892 dhd_prot_dma_indx_get(dhd_pub_t *dhd, uint8 type, uint16 ringid) argument
11897 dhd_prot_t *prot = dhd->prot;
11898 uint16 max_h2d_rings = dhd->bus->max_submission_rings;
11958 dhd_prot_write_host_seqnum(dhd_pub_t *dhd, uint32 seq_num) argument
11961 dhd_prot_t *prot = dhd->prot;
11973 dhd_prot_read_seqnum(dhd_pub_t *dhd, bool host) argument
11976 dhd_prot_t *prot = dhd->prot;
11996 dhd_prot_save_dmaidx(dhd_pub_t *dhd) argument
11998 dhd_prot_t *prot = dhd->prot;
12001 dngl_seqnum = dhd_prot_read_seqnum(dhd, FALSE);
12011 dhd_prot_write_host_seqnum(dhd, prot->host_seqnum);
12013 dhd_prot_ring_doorbell(dhd, DHD_DMA_INDX_SEQ_H2D_DB_MAGIC);
12021 dhd_prot_dma_indx_copybuf_init(dhd_pub_t *dhd, uint32 buf_sz, uint8 type) argument
12023 dhd_prot_t *prot = dhd->prot;
12027 prot->d2h_dma_indx_wr_copy_buf = MALLOCZ(dhd->osh, buf_sz);
12037 prot->h2d_dma_indx_rd_copy_buf = MALLOCZ(dhd->osh, buf_sz);
12066 dhd_prot_dma_indx_alloc(dhd_pub_t *dhd, uint8 type, argument
12074 rc = dhd_dma_buf_alloc(dhd, dma_buf, bufsz);
12080 dhd_prot_dma_indx_init(dhd_pub_t *dhd, uint32 rw_index_sz, uint8 type, uint32 length) argument
12083 dhd_prot_t *prot = dhd->prot;
12100 if (dhd_prot_dma_indx_alloc(dhd, type, dma_buf, bufsz))
12108 if (dhd_prot_dma_indx_alloc(dhd, type, dma_buf, bufsz))
12116 if (dhd_prot_dma_indx_alloc(dhd, type, dma_buf, bufsz))
12124 if (dhd_prot_dma_indx_alloc(dhd, type, dma_buf, bufsz))
12132 if (dhd_prot_dma_indx_alloc(dhd, type, dma_buf, bufsz))
12157 dhd_prot_get_read_addr(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint32 *available_len) argument
12167 __FUNCTION__, (uint32 *)(dhd->prot->d2h_dma_indx_rd_buf.va),
12168 (uint32 *)(dhd->prot->d2h_dma_indx_wr_buf.va)));
12178 if (dhd->dma_d2h_ring_upd_support) {
12180 d2h_wr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
12183 dhd_bus_cmn_readshared(dhd->bus, &(ring->wr), RING_WR_UPD, ring->idx);
12207 dhd->busstate, dhd->bus->wait_for_d3_ack));
12211 dhd->bus->read_shm_fail = TRUE;
12215 if (dhd->memdump_enabled) {
12217 dhd->memdump_type = DUMP_TYPE_RESUMED_ON_INVALID_RING_RDWR;
12218 dhd_bus_mem_dump(dhd);
12225 dhd_schedule_reset(dhd);
12256 int dhd_prot_h2d_mbdata_send_ctrlmsg(dhd_pub_t *dhd, uint32 mb_data) argument
12260 msgbuf_ring_t *ctrl_ring = &dhd->prot->h2dring_ctrl_subn;
12273 if ((INBAND_DW_ENAB(dhd->bus)) &&
12274 (dhdpcie_bus_get_pcie_inband_dw_state(dhd->bus) ==
12286 h2d_mb_data = (h2d_mailbox_data_t *)dhd_prot_alloc_ring_space(dhd,
12321 dhd_prot_ring_write_complete_mbdata(dhd, ctrl_ring, h2d_mb_data,
12338 dhd_prot_flow_ring_create(dhd_pub_t *dhd, flow_ring_node_t *flow_ring_node) argument
12342 dhd_prot_t *prot = dhd->prot;
12346 uint16 max_flowrings = dhd->bus->max_tx_flowrings;
12349 flow_ring = dhd_prot_flowrings_pool_fetch(dhd, flow_ring_node->flowid);
12357 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
12364 dhd_prot_alloc_ring_space(dhd, ctrl_ring, 1, &alloced, FALSE);
12367 dhd_prot_flowrings_pool_release(dhd, flow_ring_node->flowid, flow_ring);
12372 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
12404 if (dhd->hp2p_capable && dhd->hp2p_ring_more &&
12406 (dhd->hp2p_infra_enable || flow_create_rqst->msg.if_id) &&
12411 if (!dhd->hp2p_mf_enable) {
12412 dhd->hp2p_ring_more = FALSE;
12424 if (IFRM_ACTIVE(dhd))
12433 if (IDMA_ACTIVE(dhd) || dhd->dma_h2d_ring_upd_support) {
12434 dhd_prot_dma_indx_set(dhd, flow_ring->wr,
12436 } else if (IFRM_ACTIVE(dhd) && DHD_IS_FLOWRING(flow_ring->idx, max_flowrings)) {
12437 dhd_prot_dma_indx_set(dhd, flow_ring->wr,
12440 dhd_bus_cmn_writeshared(dhd->bus, &(flow_ring->wr),
12445 dhd_prot_ring_write_complete(dhd, ctrl_ring, flow_create_rqst, 1);
12450 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
12457 dhd_prot_flow_ring_create_response_process(dhd_pub_t *dhd, void *msg) argument
12465 dhd_bus_flow_ring_create_response(dhd->bus,
12472 dhd_prot_process_h2d_ring_create_complete(dhd_pub_t *dhd, void *buf) argument
12484 if (dhd->prot->h2dring_info_subn->create_req_id == ltoh32(resp->cmn_hdr.request_id) &&
12485 !dhd->prot->h2dring_info_subn->create_pending) {
12489 if (dhd->prot->h2dring_btlog_subn &&
12490 dhd->prot->h2dring_btlog_subn->create_req_id == ltoh32(resp->cmn_hdr.request_id) &&
12491 !dhd->prot->h2dring_btlog_subn->create_pending) {
12501 if (dhd->prot->h2dring_info_subn->create_req_id == ltoh32(resp->cmn_hdr.request_id)) {
12502 dhd->prot->h2dring_info_subn->create_pending = FALSE;
12503 dhd->prot->h2dring_info_subn->inited = TRUE;
12505 dhd_prot_infobufpost(dhd, dhd->prot->h2dring_info_subn);
12508 if (dhd->prot->h2dring_btlog_subn &&
12509 dhd->prot->h2dring_btlog_subn->create_req_id == ltoh32(resp->cmn_hdr.request_id)) {
12510 dhd->prot->h2dring_btlog_subn->create_pending = FALSE;
12511 dhd->prot->h2dring_btlog_subn->inited = TRUE;
12513 dhd_prot_infobufpost(dhd, dhd->prot->h2dring_btlog_subn);
12520 dhd_prot_process_d2h_ring_create_complete(dhd_pub_t *dhd, void *buf) argument
12539 if (!dhd->dongle_edl_support)
12543 if (!dhd->prot->d2hring_info_cpln->create_pending) {
12553 dhd->prot->d2hring_info_cpln->create_pending = FALSE;
12554 dhd->prot->d2hring_info_cpln->inited = TRUE;
12558 if (!dhd->prot->d2hring_edl->create_pending) {
12568 dhd->prot->d2hring_edl->create_pending = FALSE;
12569 dhd->prot->d2hring_edl->inited = TRUE;
12576 if (!dhd->prot->d2hring_btlog_cpln->create_pending) {
12586 dhd->prot->d2hring_btlog_cpln->create_pending = FALSE;
12587 dhd->prot->d2hring_btlog_cpln->inited = TRUE;
12591 if (dhd->prot->d2hring_hp2p_txcpl &&
12593 if (!dhd->prot->d2hring_hp2p_txcpl->create_pending) {
12603 dhd->prot->d2hring_hp2p_txcpl->create_pending = FALSE;
12604 dhd->prot->d2hring_hp2p_txcpl->inited = TRUE;
12606 if (dhd->prot->d2hring_hp2p_rxcpl &&
12608 if (!dhd->prot->d2hring_hp2p_rxcpl->create_pending) {
12618 dhd->prot->d2hring_hp2p_rxcpl->create_pending = FALSE;
12619 dhd->prot->d2hring_hp2p_rxcpl->inited = TRUE;
12625 dhd_prot_process_d2h_mb_data(dhd_pub_t *dhd, void* buf) argument
12632 dhd_bus_handle_mb_data(dhd->bus, d2h_data->d2h_mailbox_data);
12636 dhd_prot_process_d2h_host_ts_complete(dhd_pub_t *dhd, void* buf) argument
12641 dhd_prot_t *prot = dhd->prot;
12658 dhd_timesync_handle_host_ts_complete(dhd->ts, host_ts_cpl->xt_id,
12667 void dhd_prot_clean_flow_ring(dhd_pub_t *dhd, void *msgbuf_flow_info) argument
12670 dhd_prot_ring_detach(dhd, flow_ring);
12674 void dhd_prot_print_flow_ring(dhd_pub_t *dhd, void *msgbuf_flow_info, bool h2d, argument
12688 if (dhd->bus->is_linkdown) {
12693 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, flow_ring->idx);
12694 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, flow_ring->idx);
12695 if (dhd->dma_d2h_ring_upd_support) {
12697 drd = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, flow_ring->idx);
12698 dwr = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_WR_UPD, flow_ring->idx);
12700 drd = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, flow_ring->idx);
12701 dwr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, flow_ring->idx);
12712 void dhd_prot_print_info(dhd_pub_t *dhd, struct bcmstrbuf *strbuf) argument
12714 dhd_prot_t *prot = dhd->prot;
12716 dhd->prot->device_ipc_version,
12717 dhd->prot->host_ipc_version,
12718 dhd->prot->active_ipc_version);
12721 dhd->prot->max_tsbufpost, dhd->prot->cur_ts_bufs_posted);
12723 dhd->prot->max_infobufpost, dhd->prot->infobufpost);
12726 dhd->prot->max_btlogbufpost, dhd->prot->btlogbufpost);
12729 dhd->prot->max_eventbufpost, dhd->prot->cur_event_bufs_posted);
12731 dhd->prot->max_ioctlrespbufpost, dhd->prot->cur_ioctlresp_bufs_posted);
12733 dhd->prot->max_rxbufpost, dhd->prot->rxbufpost);
12736 dhd->prot->tot_rxbufpost, dhd->prot->tot_rxcpl);
12739 dhd->actual_tx_pkts, dhd->tot_txcpl);
12746 dhd_prot_print_flow_ring(dhd, &prot->h2dring_ctrl_subn, TRUE, strbuf,
12749 dhd_prot_print_flow_ring(dhd, &prot->d2hring_ctrl_cpln, FALSE, strbuf,
12752 dhd_prot_print_flow_ring(dhd, &prot->h2dring_rxp_subn, TRUE, strbuf,
12755 dhd_prot_print_flow_ring(dhd, &prot->d2hring_rx_cpln, FALSE, strbuf,
12758 dhd_prot_print_flow_ring(dhd, &prot->d2hring_tx_cpln, FALSE, strbuf,
12760 if (dhd->prot->h2dring_info_subn != NULL && dhd->prot->d2hring_info_cpln != NULL) {
12762 dhd_prot_print_flow_ring(dhd, prot->h2dring_info_subn, TRUE, strbuf,
12765 dhd_prot_print_flow_ring(dhd, prot->d2hring_info_cpln, FALSE, strbuf,
12768 if (dhd->prot->d2hring_edl != NULL) {
12770 dhd_prot_print_flow_ring(dhd, prot->d2hring_edl, FALSE, strbuf,
12775 OSL_ATOMIC_READ(dhd->osh, &dhd->prot->active_tx_count),
12776 DHD_PKTID_AVAIL(dhd->prot->pktid_ctrl_map),
12777 DHD_PKTID_AVAIL(dhd->prot->pktid_rx_map),
12778 DHD_PKTID_AVAIL(dhd->prot->pktid_tx_map));
12781 dhd_prot_ioctl_dump(dhd->prot, strbuf);
12784 dhd_dump_bus_mmio_trace(dhd->bus, strbuf);
12786 dhd_dump_bus_ds_trace(dhd->bus, strbuf);
12788 dhd_dump_bus_flow_ring_status_isr_trace(dhd->bus, strbuf);
12789 dhd_dump_bus_flow_ring_status_dpc_trace(dhd->bus, strbuf);
12794 dhd_prot_flow_ring_delete(dhd_pub_t *dhd, flow_ring_node_t *flow_ring_node) argument
12797 dhd_prot_t *prot = dhd->prot;
12803 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
12811 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE);
12817 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
12841 dhd_prot_ring_write_complete(dhd, ring, flow_delete_rqst, 1);
12846 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
12852 BCMFASTPATH(dhd_prot_flow_ring_fastdelete)(dhd_pub_t *dhd, uint16 flowid, uint16 rd_idx)
12854 flow_ring_node_t *flow_ring_node = DHD_FLOW_RING(dhd, flowid);
12876 dhd_prot_txstatus_process(dhd, &txstatus);
12881 dhd_prot_flow_ring_delete_response_process(dhd_pub_t *dhd, void *msg) argument
12888 if (dhd->fast_delete_ring_support) {
12889 dhd_prot_flow_ring_fastdelete(dhd, flow_delete_resp->cmplt.flow_ring_id,
12892 dhd_bus_flow_ring_delete_response(dhd->bus, flow_delete_resp->cmplt.flow_ring_id,
12897 dhd_prot_process_flow_ring_resume_response(dhd_pub_t *dhd, void* msg) argument
12906 dhd_bus_flow_ring_resume_response(dhd->bus, flow_resume_resp->cmplt.flow_ring_id,
12912 dhd_prot_process_flow_ring_suspend_response(dhd_pub_t *dhd, void* msg) argument
12934 dhd_prot_flow_ring_flush(dhd_pub_t *dhd, flow_ring_node_t *flow_ring_node) argument
12937 dhd_prot_t *prot = dhd->prot;
12943 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
12951 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE);
12956 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
12975 dhd_prot_ring_write_complete(dhd, ring, flow_flush_rqst, 1);
12980 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
12986 dhd_prot_flow_ring_flush_response_process(dhd_pub_t *dhd, void *msg) argument
12993 dhd_bus_flow_ring_flush_response(dhd->bus, flow_flush_resp->cmplt.flow_ring_id,
13003 dhd_msgbuf_ring_config_d2h_soft_doorbell(dhd_pub_t *dhd) argument
13011 dhd_prot_t *prot = dhd->prot;
13018 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
13023 msg_start = dhd_prot_alloc_ring_space(dhd, ctrl_ring, d2h_rings, &alloced, TRUE);
13030 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
13076 dhd_prot_ring_write_complete(dhd, ctrl_ring, msg_start, d2h_rings);
13081 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
13087 dhd_prot_process_d2h_ring_config_complete(dhd_pub_t *dhd, void *msg) argument
13096 copy_ext_trap_sig(dhd_pub_t *dhd, trap_t *tr) argument
13098 uint32 *ext_data = dhd->extended_trap_data;
13282 copy_hang_info_stack(dhd_pub_t *dhd, char *dest, int *bytes_written, int *cnt) argument
13287 uint32 *ext_data = dhd->extended_trap_data;
13348 copy_hang_info_specific(dhd_pub_t *dhd, char *dest, int *bytes_written, int *cnt) argument
13353 uint32 *ext_data = dhd->extended_trap_data;
13420 get_hang_info_trap_subtype(dhd_pub_t *dhd, uint32 *subtype) argument
13423 uint32 *ext_data = dhd->extended_trap_data;
13446 copy_hang_info_etd_base64(dhd_pub_t *dhd, char *dest, int *bytes_written, int *cnt) argument
13449 uint32 *ext_data = dhd->extended_trap_data;
13477 base64_out = MALLOCZ(dhd->osh, HANG_INFO_BASE64_BUFFER_SIZE);
13494 MFREE(dhd->osh, base64_out, HANG_INFO_BASE64_BUFFER_SIZE);
13499 copy_hang_info_trap(dhd_pub_t *dhd) argument
13505 if (!dhd || !dhd->hang_info) {
13507 dhd, (dhd ? dhd->hang_info : NULL)));
13511 if (!dhd->dongle_trap_occured) {
13518 copy_ext_trap_sig(dhd, &tr);
13519 get_hang_info_trap_subtype(dhd, &trap_subtype);
13525 dhd->hang_info_cnt = 0;
13526 get_debug_dump_time(dhd->debug_dump_time_hang_str);
13527 copy_debug_dump_time(dhd->debug_dump_time_str, dhd->debug_dump_time_hang_str);
13529 copy_hang_info_head(dhd->hang_info, &tr, VENDOR_SEND_HANG_EXT_INFO_LEN, FALSE,
13530 &bytes_written, &dhd->hang_info_cnt, dhd->debug_dump_time_hang_str);
13533 dhd->hang_info_cnt, (int)strlen(dhd->hang_info), dhd->hang_info));
13535 clear_debug_dump_time(dhd->debug_dump_time_hang_str);
13541 if (dhd->hang_info_cnt < HANG_FIELD_CNT_MAX) {
13542 copy_hang_info_etd_base64(dhd, dhd->hang_info, &bytes_written, &dhd->hang_info_cnt);
13544 dhd->hang_info_cnt, (int)strlen(dhd->hang_info), dhd->hang_info));
13547 if (dhd->hang_info_cnt < HANG_FIELD_CNT_MAX) {
13548 copy_hang_info_stack(dhd, dhd->hang_info, &bytes_written, &dhd->hang_info_cnt);
13550 dhd->hang_info_cnt, (int)strlen(dhd->hang_info), dhd->hang_info));
13553 if (dhd->hang_info_cnt < HANG_FIELD_CNT_MAX) {
13554 copy_hang_info_trap_t(dhd->hang_info, &tr, VENDOR_SEND_HANG_EXT_INFO_LEN, FALSE,
13555 &bytes_written, &dhd->hang_info_cnt, dhd->debug_dump_time_hang_str);
13557 dhd->hang_info_cnt, (int)strlen(dhd->hang_info), dhd->hang_info));
13560 if (dhd->hang_info_cnt < HANG_FIELD_CNT_MAX) {
13561 copy_hang_info_specific(dhd, dhd->hang_info, &bytes_written, &dhd->hang_info_cnt);
13563 dhd->hang_info_cnt, (int)strlen(dhd->hang_info), dhd->hang_info));
13569 copy_hang_info_linkdown(dhd_pub_t *dhd) argument
13574 if (!dhd || !dhd->hang_info) {
13576 dhd, (dhd ? dhd->hang_info : NULL)));
13580 if (!dhd->bus->is_linkdown) {
13585 dhd->hang_info_cnt = 0;
13587 get_debug_dump_time(dhd->debug_dump_time_hang_str);
13588 copy_debug_dump_time(dhd->debug_dump_time_str, dhd->debug_dump_time_hang_str);
13591 if (dhd->hang_info_cnt < HANG_FIELD_CNT_MAX) {
13593 bytes_written += scnprintf(&dhd->hang_info[bytes_written], remain_len, "%d%c",
13595 dhd->hang_info_cnt++;
13599 if (dhd->hang_info_cnt < HANG_FIELD_CNT_MAX) {
13601 bytes_written += scnprintf(&dhd->hang_info[bytes_written], remain_len, "%d%c",
13603 dhd->hang_info_cnt++;
13607 if (dhd->hang_info_cnt < HANG_FIELD_CNT_MAX) {
13609 bytes_written += scnprintf(&dhd->hang_info[bytes_written], remain_len, "%s%c",
13610 dhd->debug_dump_time_hang_str, HANG_KEY_DEL);
13611 dhd->hang_info_cnt++;
13614 clear_debug_dump_time(dhd->debug_dump_time_hang_str);
13617 dhd_dump_pcie_rc_regs_for_linkdown(dhd, &bytes_written);
13620 dhd->hang_info_cnt, (int)strlen(dhd->hang_info), dhd->hang_info));
13626 dhd_prot_debug_info_print(dhd_pub_t *dhd) argument
13628 dhd_prot_t *prot = dhd->prot;
13642 DHD_ERROR(("memdump mode: %d\n", dhd->memdump_enabled));
13651 dhd->bus->d2h_intr_method ? "PCIE_MSI" : "PCIE_INTX",
13652 dhd->bus->d2h_intr_control ? "HOST_IRQ" : "D2H_INTMASK"));
13680 if (!dhd_pcie_dump_int_regs(dhd)) {
13682 dhd->bus->is_linkdown = TRUE;
13689 ring_tcm_rd_addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
13690 ring_tcm_wr_addr = dhd->bus->ring_sh[ring->idx].ring_state_w;
13696 if (dhd->dma_d2h_ring_upd_support) {
13697 drd = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, ring->idx);
13698 dwr = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_WR_UPD, ring->idx);
13701 if (dhd->bus->is_linkdown) {
13705 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, ring->idx);
13706 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, ring->idx);
13713 ring_tcm_rd_addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
13714 ring_tcm_wr_addr = dhd->bus->ring_sh[ring->idx].ring_state_w;
13720 if (dhd->dma_d2h_ring_upd_support) {
13721 drd = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
13722 dwr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
13725 if (dhd->bus->is_linkdown) {
13729 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, ring->idx);
13730 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, ring->idx);
13738 ring_tcm_rd_addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
13739 ring_tcm_wr_addr = dhd->bus->ring_sh[ring->idx].ring_state_w;
13746 if (dhd->dma_d2h_ring_upd_support) {
13747 drd = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, ring->idx);
13748 dwr = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_WR_UPD, ring->idx);
13751 if (dhd->bus->is_linkdown) {
13755 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, ring->idx);
13756 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, ring->idx);
13764 ring_tcm_rd_addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
13765 ring_tcm_wr_addr = dhd->bus->ring_sh[ring->idx].ring_state_w;
13772 if (dhd->dma_d2h_ring_upd_support) {
13773 drd = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
13774 dwr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
13777 if (dhd->bus->is_linkdown) {
13781 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, ring->idx);
13782 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, ring->idx);
13790 ring_tcm_rd_addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
13791 ring_tcm_wr_addr = dhd->bus->ring_sh[ring->idx].ring_state_w;
13799 if (dhd->dma_d2h_ring_upd_support) {
13800 drd = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
13801 dwr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
13804 if (dhd->bus->is_linkdown) {
13808 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, ring->idx);
13809 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, ring->idx);
13819 ring_tcm_rd_addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
13820 ring_tcm_wr_addr = dhd->bus->ring_sh[ring->idx].ring_state_w;
13828 if (dhd->dma_d2h_ring_upd_support) {
13829 drd = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
13830 dwr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
13833 if (dhd->bus->is_linkdown) {
13837 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, ring->idx);
13838 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, ring->idx);
13846 ring_tcm_rd_addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
13847 ring_tcm_wr_addr = dhd->bus->ring_sh[ring->idx].ring_state_w;
13855 if (dhd->dma_d2h_ring_upd_support) {
13856 drd = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
13857 dwr = dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
13860 if (dhd->bus->is_linkdown) {
13864 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, ring->idx);
13865 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, ring->idx);
13873 ring_tcm_rd_addr = dhd->bus->ring_sh[ring->idx].ring_state_r;
13874 ring_tcm_wr_addr = dhd->bus->ring_sh[ring->idx].ring_state_w;
13882 if (dhd->dma_d2h_ring_upd_support) {
13883 drd = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, ring->idx);
13884 dwr = dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_WR_UPD, ring->idx);
13887 if (dhd->bus->is_linkdown) {
13891 dhd_bus_cmn_readshared(dhd->bus, &rd, RING_RD_UPD, ring->idx);
13892 dhd_bus_cmn_readshared(dhd->bus, &wr, RING_WR_UPD, ring->idx);
13902 __FUNCTION__, dhd->multi_client_flow_rings, dhd->max_multi_client_flow_rings));
13908 dhd_pcie_debug_info_dump(dhd);
13911 dhd->lb_rxp_stop_thr_hitcnt, dhd->lb_rxp_strt_thr_hitcnt));
13913 dhd->lb_rxp_napi_sched_cnt, dhd->lb_rxp_napi_complete_cnt));
13916 dhd_timesync_debug_info_print(dhd);
13922 dhd_prot_ringupd_dump(dhd_pub_t *dhd, struct bcmstrbuf *b) argument
13927 if (dhd->prot->d2h_dma_indx_wr_buf.va) {
13929 uint32 max_h2d_queues = dhd_bus_max_h2d_queues(dhd->bus);
13931 OSL_CACHE_INV((void *)dhd->prot->d2h_dma_indx_wr_buf.va,
13932 dhd->prot->d2h_dma_indx_wr_buf.len);
13934 ptr = (uint32 *)(dhd->prot->d2h_dma_indx_wr_buf.va);
13954 if (dhd->prot->h2d_dma_indx_rd_buf.va) {
13955 OSL_CACHE_INV((void *)dhd->prot->h2d_dma_indx_rd_buf.va,
13956 dhd->prot->h2d_dma_indx_rd_buf.len);
13958 ptr = (uint32 *)(dhd->prot->h2d_dma_indx_rd_buf.va);
13975 dhd_prot_metadata_dbg_set(dhd_pub_t *dhd, bool val) argument
13977 dhd_prot_t *prot = dhd->prot;
13985 dhd_prot_metadata_dbg_get(dhd_pub_t *dhd) argument
13987 dhd_prot_t *prot = dhd->prot;
13992 dhd_prot_metadatalen_set(dhd_pub_t *dhd, uint32 val, bool rx) argument
13995 dhd_prot_t *prot = dhd->prot;
14001 return dhd_prot_metadatalen_get(dhd, rx);
14005 dhd_prot_metadatalen_get(dhd_pub_t *dhd, bool rx) argument
14007 dhd_prot_t *prot = dhd->prot;
14016 dhd_prot_txp_threshold(dhd_pub_t *dhd, bool set, uint32 val) argument
14018 dhd_prot_t *prot = dhd->prot;
14034 BCMFASTPATH(dhd_rxchain_frame)(dhd_pub_t *dhd, void *pkt, uint ifidx)
14038 dhd_prot_t *prot = dhd->prot;
14045 eh = PKTDATA(dhd->osh, pkt);
14048 if (rxchain->pkt_count && !(PKT_CTF_CHAINABLE(dhd, ifidx, eh, prio, rxchain->h_sa,
14051 dhd_rxchain_commit(dhd);
14073 if ((dhd_rx_pkt_chainable(dhd, ifidx)) && (!ETHER_ISMULTI(rxchain->h_da)) &&
14076 PKTSETCHAINED(dhd->osh, pkt);
14078 PKTCADDLEN(rxchain->pkthead, PKTLEN(dhd->osh, pkt));
14080 dhd_rxchain_commit(dhd);
14086 dhd_rxchain_commit(dhd);
14091 BCMFASTPATH(dhd_rxchain_commit)(dhd_pub_t *dhd)
14093 dhd_prot_t *prot = dhd->prot;
14100 dhd_bus_rx_frame(dhd->bus, rxchain->pkthead, rxchain->ifidx, rxchain->pkt_count);
14110 dhd_prot_flow_ring_resume(dhd_pub_t *dhd, flow_ring_node_t *flow_ring_node) argument
14114 dhd_prot_t *prot = dhd->prot;
14120 flow_ring = dhd_prot_flowrings_pool_fetch(dhd, flow_ring_node->flowid);
14128 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
14136 dhd_prot_alloc_ring_space(dhd, ctrl_ring, 1, &alloced, FALSE);
14139 dhd_prot_flowrings_pool_release(dhd, flow_ring_node->flowid, flow_ring);
14144 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
14164 if (IDMA_ACTIVE(dhd) || dhd->dma_h2d_ring_upd_support) {
14165 dhd_prot_dma_indx_set(dhd, flow_ring->wr,
14167 } else if (IFRM_ACTIVE(dhd) && (flow_ring->idx >= BCMPCIE_H2D_MSGRING_TXFLOW_IDX_START)) {
14168 dhd_prot_dma_indx_set(dhd, flow_ring->wr,
14172 dhd_bus_cmn_writeshared(dhd->bus, &(flow_ring->wr),
14177 dhd_prot_ring_write_complete(dhd, ctrl_ring, flow_resume_rqst, 1);
14182 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
14188 dhd_prot_flow_ring_batch_suspend_request(dhd_pub_t *dhd, uint16 *ringid, uint16 count) argument
14191 dhd_prot_t *prot = dhd->prot;
14198 if (dhd_prot_inc_hostactive_devwake_assert(dhd->bus) != BCME_OK)
14206 dhd_prot_alloc_ring_space(dhd, ring, 1, &alloced, FALSE);
14212 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
14235 dhd_prot_ring_write_complete(dhd, ring, flow_suspend_rqst, 1);
14240 dhd_prot_dec_hostactive_ack_pending_dsreq(dhd->bus);
14249 dhd_prot_ioctl_trace(dhd_pub_t *dhd, ioctl_req_msg_t *ioct_rqst, uchar *buf, int len) argument
14251 struct dhd_prot *prot = dhd->prot;
14837 dhd_prot_data_path_tx_timestamp_logging(dhd_pub_t *dhd, bool enable, bool set) argument
14840 dhd->prot->tx_ts_log_enabled = enable;
14842 return dhd->prot->tx_ts_log_enabled;
14846 dhd_prot_data_path_rx_timestamp_logging(dhd_pub_t *dhd, bool enable, bool set) argument
14849 dhd->prot->rx_ts_log_enabled = enable;
14851 return dhd->prot->rx_ts_log_enabled;
14855 dhd_prot_pkt_noretry(dhd_pub_t *dhd, bool enable, bool set) argument
14858 dhd->prot->no_retry = enable;
14860 return dhd->prot->no_retry;
14864 dhd_prot_pkt_noaggr(dhd_pub_t *dhd, bool enable, bool set) argument
14867 dhd->prot->no_aggr = enable;
14869 return dhd->prot->no_aggr;
14873 dhd_prot_pkt_fixed_rate(dhd_pub_t *dhd, bool enable, bool set) argument
14876 dhd->prot->fixed_rate = enable;
14878 return dhd->prot->fixed_rate;
14883 dhd_prot_dma_indx_free(dhd_pub_t *dhd) argument
14885 dhd_prot_t *prot = dhd->prot;
14887 dhd_dma_buf_free(dhd, &prot->h2d_dma_indx_wr_buf);
14888 dhd_dma_buf_free(dhd, &prot->d2h_dma_indx_rd_buf);
14892 dhd_msgbuf_delay_post_ts_bufs(dhd_pub_t *dhd) argument
14894 if (dhd->prot->max_tsbufpost > 0)
14895 dhd_msgbuf_rxbuf_post_ts_bufs(dhd);
14899 BCMFASTPATH(dhd_prot_process_fw_timestamp)(dhd_pub_t *dhd, void* buf)
14913 DHD_PKTID_AUDIT(dhd, dhd->prot->pktid_ctrl_map, pktid,
14920 if (!dhd->prot->cur_ts_bufs_posted) {
14925 dhd->prot->cur_ts_bufs_posted--;
14927 if (!dhd_timesync_delay_post_bufs(dhd)) {
14928 if (dhd->prot->max_tsbufpost > 0) {
14929 dhd_msgbuf_rxbuf_post_ts_bufs(dhd);
14933 pkt = dhd_prot_packet_get(dhd, pktid, PKTTYPE_TSBUF_RX, TRUE);
14940 PKTSETLEN(dhd->osh, pkt, buflen);
14941 dhd_timesync_handle_fw_timestamp(dhd->ts, PKTDATA(dhd->osh, pkt), buflen, seqnum);
14943 PKTFREE_STATIC(dhd->osh, pkt, TRUE);
14945 PKTFREE(dhd->osh, pkt, TRUE);
15071 int dhd_get_hscb_info(dhd_pub_t *dhd, void ** va, uint32 *len) argument
15073 if (!dhd->hscb_enable) {
15083 *va = dhd->prot->host_scb_buf.va;
15086 *len = dhd->prot->host_scb_buf.len;
15093 int dhd_get_hscb_buff(dhd_pub_t *dhd, uint32 offset, uint32 length, void * buff) argument
15095 if (!dhd->hscb_enable) {
15099 if (dhd->prot->host_scb_buf.va == NULL ||
15100 ((uint64)offset + length > (uint64)dhd->prot->host_scb_buf.len)) {
15104 memcpy(buff, (char*)dhd->prot->host_scb_buf.va + offset, length);
15112 dhd_prot_pkt_threshold(dhd_pub_t *dhd, bool set, uint32 val) argument
15115 dhd->pkt_thresh = (uint16)val;
15117 val = dhd->pkt_thresh;
15123 dhd_prot_time_threshold(dhd_pub_t *dhd, bool set, uint32 val) argument
15126 dhd->time_thresh = (uint16)val;
15128 val = dhd->time_thresh;
15134 dhd_prot_pkt_expiry(dhd_pub_t *dhd, bool set, uint32 val) argument
15137 dhd->pkt_expiry = (uint16)val;
15139 val = dhd->pkt_expiry;
15145 dhd_prot_hp2p_enable(dhd_pub_t *dhd, bool set, int enable) argument
15149 dhd->hp2p_enable = (enable & 0xf) ? TRUE : FALSE;
15150 dhd->hp2p_infra_enable = ((enable >> 4) & 0xf) ? TRUE : FALSE;
15153 dhd_update_flow_prio_map(dhd, DHD_FLOW_PRIO_TID_MAP);
15155 dhd_update_flow_prio_map(dhd, DHD_FLOW_PRIO_AC_MAP);
15158 ret = dhd->hp2p_infra_enable ? 0x1:0x0;
15160 ret |= dhd->hp2p_enable ? 0x1:0x0;
15166 dhd_update_hp2p_rxstats(dhd_pub_t *dhd, host_rxbuf_cmpl_t *rxstatus) argument
15172 hp2p_info = &dhd->hp2p_info[0];
15186 dhd_update_hp2p_txstats(dhd_pub_t *dhd, host_txbuf_cmpl_t *txstatus) argument
15193 hp2p_flowid = dhd->bus->max_submission_rings -
15194 dhd->bus->max_cmn_rings - flowid + 1;
15195 hp2p_info = &dhd->hp2p_info[hp2p_flowid];
15246 dhd_calc_hp2p_burst(dhd_pub_t *dhd, msgbuf_ring_t *ring, uint16 flowid) argument
15251 hp2p_flowid = dhd->bus->max_submission_rings -
15252 dhd->bus->max_cmn_rings - flowid + 1;
15253 hp2p_info = &dhd->hp2p_info[hp2p_flowid];
15255 if (ring->pend_items_count == dhd->pkt_thresh) {
15256 dhd_prot_txdata_write_flush(dhd, flowid);
15265 __FUNCTION__, flowid, hp2p_flowid, dhd->pkt_thresh));
15270 hp2p_info->dhd_pub = dhd;
15275 ktime_set(0, dhd->time_thresh * 1000), HRTIMER_MODE_REL);
15285 dhd_update_hp2p_txdesc(dhd_pub_t *dhd, host_txbuf_post_t *txdesc) argument
15295 txdesc->exp_time = dhd->pkt_expiry;
15334 dhd_prot_t *prot = bus->dhd->prot;
15411 dhd_bus_flow_ring_status_trace(dhd_pub_t *dhd, dhd_frs_trace_t *frs_trace) argument
15413 dhd_prot_t *prot = dhd->prot;
15418 dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, ring->idx);
15420 dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_WR_UPD, ring->idx);
15424 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
15426 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
15430 dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, ring->idx);
15432 dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_WR_UPD, ring->idx);
15436 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
15438 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
15442 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
15444 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
15446 if (dhd->prot->h2dring_info_subn != NULL && dhd->prot->d2hring_info_cpln != NULL) {
15449 dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_RD_UPD, ring->idx);
15451 dhd_prot_dma_indx_get(dhd, H2D_DMA_INDX_WR_UPD, ring->idx);
15455 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
15457 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
15462 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_RD_UPD, ring->idx);
15464 dhd_prot_dma_indx_get(dhd, D2H_DMA_INDX_WR_UPD, ring->idx);
15470 dhd_bus_flow_ring_status_isr_trace(dhd_pub_t *dhd) argument
15472 uint32 cnt = dhd->bus->frs_isr_count % FRS_TRACE_SIZE;
15473 dhd_frs_trace_t *frs_isr_trace = &dhd->bus->frs_isr_trace[cnt];
15481 dhd_bus_flow_ring_status_trace(dhd, frs_isr_trace);
15484 dhd->bus->frs_isr_count ++;
15488 dhd_bus_flow_ring_status_dpc_trace(dhd_pub_t *dhd) argument
15490 uint32 cnt = dhd->bus->frs_dpc_count % FRS_TRACE_SIZE;
15491 dhd_frs_trace_t *frs_dpc_trace = &dhd->bus->frs_dpc_trace[cnt];
15499 dhd_bus_flow_ring_status_trace(dhd, frs_dpc_trace);
15502 dhd->bus->frs_dpc_count ++;