Lines Matching +full:tcam +full:- +full:based

6  * Copyright (C) 1999-2017, Broadcom Corporation
27 * <<Broadcom-WL-IPTag/Open:>>
29 * $Id: dhd_pcie.c 702835 2017-06-05 07:19:55Z $
121 (bus)->shared_addr + OFFSETOF(pciedev_shared_t, member)
125 (bus)->pcie_sh->rings_info_ptr + OFFSETOF(ring_info_t, member)
129 (bus)->ring_sh[ringid].ring_mem_addr + OFFSETOF(ring_mem_t, member)
461 return bus->flr_force_fail; in dhd_bus_get_flr_force_fail()
507 uint dar_addr = DAR_PCIH2D_DB0_0(bus->sih->buscorerev); in dhd_bus_db0_addr_get()
509 return ((DAR_ACTIVE(bus->dhd)) ? dar_addr : addr); in dhd_bus_db0_addr_get()
515 return ((DAR_ACTIVE(bus->dhd)) ? DAR_PCIH2D_DB2_0(bus->sih->buscorerev) : PCIH2D_MailBox_2); in dhd_bus_db0_addr_2_get()
521 return ((DAR_ACTIVE(bus->dhd)) ? DAR_PCIH2D_DB0_1(bus->sih->buscorerev) : PCIH2D_DB1); in dhd_bus_db1_addr_get()
527 return ((DAR_ACTIVE(bus->dhd)) ? DAR_PCIH2D_DB1_1(bus->sih->buscorerev) : PCIH2D_DB1_1); in dhd_bus_db1_addr_1_get()
531 * WAR for SWWLAN-215055 - [4378B0] ARM fails to boot without DAR WL domain request
537 si_corereg(bus->sih, bus->sih->buscoreidx, in dhd_bus_pcie_pwr_req_wl_domain()
538 DAR_PCIE_PWR_CTRL((bus->sih)->buscorerev), in dhd_bus_pcie_pwr_req_wl_domain()
542 si_corereg(bus->sih, bus->sih->buscoreidx, in dhd_bus_pcie_pwr_req_wl_domain()
543 DAR_PCIE_PWR_CTRL((bus->sih)->buscorerev), in dhd_bus_pcie_pwr_req_wl_domain()
554 * If multiple de-asserts, decrement ref and return in _dhd_bus_pcie_pwr_req_clear_cmn()
558 if (bus->pwr_req_ref > 1) { in _dhd_bus_pcie_pwr_req_clear_cmn()
559 bus->pwr_req_ref--; in _dhd_bus_pcie_pwr_req_clear_cmn()
563 ASSERT(bus->pwr_req_ref == 1); in _dhd_bus_pcie_pwr_req_clear_cmn()
565 if (MULTIBP_ENAB(bus->sih)) { in _dhd_bus_pcie_pwr_req_clear_cmn()
572 si_srpwr_request(bus->sih, mask, 0); in _dhd_bus_pcie_pwr_req_clear_cmn()
573 bus->pwr_req_ref = 0; in _dhd_bus_pcie_pwr_req_clear_cmn()
581 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_pcie_pwr_req_clear()
583 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_pcie_pwr_req_clear()
598 if (bus->pwr_req_ref > 0) { in _dhd_bus_pcie_pwr_req_cmn()
599 bus->pwr_req_ref++; in _dhd_bus_pcie_pwr_req_cmn()
603 ASSERT(bus->pwr_req_ref == 0); in _dhd_bus_pcie_pwr_req_cmn()
605 if (MULTIBP_ENAB(bus->sih)) { in _dhd_bus_pcie_pwr_req_cmn()
614 si_srpwr_request(bus->sih, mask, val); in _dhd_bus_pcie_pwr_req_cmn()
616 bus->pwr_req_ref = 1; in _dhd_bus_pcie_pwr_req_cmn()
624 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_pcie_pwr_req()
626 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_pcie_pwr_req()
634 mask = SRPWR_DMN_ALL_MASK(bus->sih); in _dhd_bus_pcie_pwr_req_pd0123_cmn()
635 val = SRPWR_DMN_ALL_MASK(bus->sih); in _dhd_bus_pcie_pwr_req_pd0123_cmn()
637 si_srpwr_request(bus->sih, mask, val); in _dhd_bus_pcie_pwr_req_pd0123_cmn()
645 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_pcie_pwr_req_reload_war()
647 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_pcie_pwr_req_reload_war()
655 mask = SRPWR_DMN_ALL_MASK(bus->sih); in _dhd_bus_pcie_pwr_req_clear_pd0123_cmn()
657 si_srpwr_request(bus->sih, mask, 0); in _dhd_bus_pcie_pwr_req_clear_pd0123_cmn()
665 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_pcie_pwr_req_clear_reload_war()
667 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_pcie_pwr_req_clear_reload_war()
680 __FUNCTION__, bus->sih->buscorerev, si_chipid(bus->sih))); in dhdpcie_chip_support_msi()
681 if (bus->sih->buscorerev <= 14 || in dhdpcie_chip_support_msi()
682 si_chipid(bus->sih) == BCM4375_CHIP_ID || in dhdpcie_chip_support_msi()
683 si_chipid(bus->sih) == BCM4362_CHIP_ID || in dhdpcie_chip_support_msi()
684 si_chipid(bus->sih) == BCM43751_CHIP_ID || in dhdpcie_chip_support_msi()
685 si_chipid(bus->sih) == BCM4361_CHIP_ID || in dhdpcie_chip_support_msi()
686 si_chipid(bus->sih) == CYW55560_CHIP_ID) { in dhdpcie_chip_support_msi()
724 bus->regs = regs; in dhdpcie_bus_attach()
725 bus->tcm = tcm; in dhdpcie_bus_attach()
726 bus->osh = osh; in dhdpcie_bus_attach()
728 bus->dev = (struct pci_dev *)pci_dev; in dhdpcie_bus_attach()
730 dll_init(&bus->flowring_active_list); in dhdpcie_bus_attach()
732 bus->active_list_last_process_ts = OSL_SYSUPTIME(); in dhdpcie_bus_attach()
736 if (!(bus->pcie_sh = MALLOCZ(osh, sizeof(pciedev_shared_t)))) { in dhdpcie_bus_attach()
737 DHD_ERROR(("%s: MALLOC of bus->pcie_sh failed\n", __FUNCTION__)); in dhdpcie_bus_attach()
755 /* read otp variable customvar and store in dhd->customvar1 and dhd->customvar2 */ in dhdpcie_bus_attach()
769 "OTPed-module_variant=0x%x\n", __func__, in dhdpcie_bus_attach()
774 "OTPed-module_variant=0x%x didn't match\n", in dhdpcie_bus_attach()
780 "OTPed-module_variant=0x%x match\n", in dhdpcie_bus_attach()
784 if (!(bus->dhd = dhd_attach(osh, bus, PCMSGBUF_HDRLEN))) { in dhdpcie_bus_attach()
790 bus->dhd->busstate = DHD_BUS_DOWN; in dhdpcie_bus_attach()
791 bus->dhd->hostrdy_after_init = TRUE; in dhdpcie_bus_attach()
792 bus->db1_for_mb = TRUE; in dhdpcie_bus_attach()
793 bus->dhd->hang_report = TRUE; in dhdpcie_bus_attach()
794 bus->use_mailbox = FALSE; in dhdpcie_bus_attach()
795 bus->use_d0_inform = FALSE; in dhdpcie_bus_attach()
796 bus->intr_enabled = FALSE; in dhdpcie_bus_attach()
797 bus->flr_force_fail = FALSE; in dhdpcie_bus_attach()
799 bus->hwa_enab_bmap = 0; in dhdpcie_bus_attach()
802 dhdpcie_set_dma_ring_indices(bus->dhd, dma_ring_indices); in dhdpcie_bus_attach()
805 bus->dhd->h2d_phase_supported = h2d_phase ? TRUE : FALSE; in dhdpcie_bus_attach()
807 bus->dhd->force_dongletrap_on_bad_h2d_phase = in dhdpcie_bus_attach()
810 bus->enable_idle_flowring_mgmt = FALSE; in dhdpcie_bus_attach()
812 bus->irq_registered = FALSE; in dhdpcie_bus_attach()
816 bus->d2h_intr_method = PCIE_MSI; in dhdpcie_bus_attach()
818 bus->d2h_intr_method = enable_msi && dhdpcie_chip_support_msi(bus) ? in dhdpcie_bus_attach()
822 bus->d2h_intr_method = PCIE_INTX; in dhdpcie_bus_attach()
826 bus->hp2p_txcpl_max_items = DHD_MAX_ITEMS_HPP_TXCPL_RING; in dhdpcie_bus_attach()
827 bus->hp2p_rxcpl_max_items = DHD_MAX_ITEMS_HPP_RXCPL_RING; in dhdpcie_bus_attach()
839 if (bus && bus->pcie_sh) { in dhdpcie_bus_attach()
840 MFREE(osh, bus->pcie_sh, sizeof(pciedev_shared_t)); in dhdpcie_bus_attach()
864 ASSERT(bus->sih != NULL); in dhd_bus_chip()
865 return bus->sih->chip; in dhd_bus_chip()
872 ASSERT(bus->sih != NULL); in dhd_bus_chiprev()
873 return bus->sih->chiprev; in dhd_bus_chiprev()
879 return bus->dhd; in dhd_bus_pub()
885 return (void *)bus->sih; in dhd_bus_sih()
891 return &bus->txq; in dhd_bus_txq()
897 dhd_bus_t *bus = dhdp->bus; in dhd_bus_chip_id()
898 return bus->sih->chip; in dhd_bus_chip_id()
904 dhd_bus_t *bus = dhdp->bus; in dhd_bus_chiprev_id()
905 return bus->sih->chiprev; in dhd_bus_chiprev_id()
919 dhd_bus_t *bus = dhdp->bus; in dhd_bus_chippkg_id()
920 return bus->sih->chippkg; in dhd_bus_chippkg_id()
964 dhdp->bus->dpc_sched_time = OSL_LOCALTIME_NS(); in dhd_bus_set_dpc_sched_time()
971 dhd_bus_t *bus = dhdp->bus; in dhd_bus_query_dpc_sched_errors()
974 if (bus->dpc_entry_time < bus->isr_exit_time) { in dhd_bus_query_dpc_sched_errors()
977 } else if (bus->dpc_entry_time < bus->resched_dpc_time) { in dhd_bus_query_dpc_sched_errors()
994 GET_SEC_USEC(bus->isr_entry_time), in dhd_bus_query_dpc_sched_errors()
995 GET_SEC_USEC(bus->isr_exit_time), in dhd_bus_query_dpc_sched_errors()
996 GET_SEC_USEC(bus->dpc_entry_time), in dhd_bus_query_dpc_sched_errors()
997 GET_SEC_USEC(bus->dpc_exit_time), in dhd_bus_query_dpc_sched_errors()
998 GET_SEC_USEC(bus->dpc_sched_time), in dhd_bus_query_dpc_sched_errors()
999 GET_SEC_USEC(bus->resched_dpc_time))); in dhd_bus_query_dpc_sched_errors()
1012 if (bus->bus_low_power_state == DHD_BUS_D3_ACK_RECIEVED) { in dhdpcie_bus_intstatus()
1016 if ((bus->sih->buscorerev == 6) || (bus->sih->buscorerev == 4) || in dhdpcie_bus_intstatus()
1017 (bus->sih->buscorerev == 2)) { in dhdpcie_bus_intstatus()
1023 intstatus = si_corereg(bus->sih, bus->sih->buscoreidx, bus->pcie_mailbox_int, 0, 0); in dhdpcie_bus_intstatus()
1026 intmask = si_corereg(bus->sih, bus->sih->buscoreidx, bus->pcie_mailbox_mask, 0, 0); in dhdpcie_bus_intstatus()
1028 if (intstatus == (uint32)-1 || intmask == (uint32)-1) { in dhdpcie_bus_intstatus()
1032 bus->is_linkdown = TRUE; in dhdpcie_bus_intstatus()
1033 dhd_pcie_debug_info_dump(bus->dhd); in dhdpcie_bus_intstatus()
1038 bus->no_cfg_restore = 1; in dhdpcie_bus_intstatus()
1041 bus->dhd->hang_reason = HANG_REASON_PCIE_LINK_DOWN_EP_DETECT; in dhdpcie_bus_intstatus()
1042 dhd_os_send_hang_message(bus->dhd); in dhdpcie_bus_intstatus()
1056 si_corereg(bus->sih, bus->sih->buscoreidx, bus->pcie_mailbox_int, bus->def_intmask, in dhdpcie_bus_intstatus()
1059 intstatus &= bus->def_intmask; in dhdpcie_bus_intstatus()
1068 dhd_bus_t *bus = dhd->bus; in dhdpcie_cto_recovery_handler()
1088 dhd_prot_debug_info_print(bus->dhd); in dhdpcie_cto_recovery_handler()
1093 if (!bus->is_linkdown && bus->dhd->memdump_enabled) { in dhdpcie_cto_recovery_handler()
1095 bus->dhd->collect_sssr = TRUE; in dhdpcie_cto_recovery_handler()
1097 bus->dhd->memdump_type = DUMP_TYPE_CTO_RECOVERY; in dhdpcie_cto_recovery_handler()
1105 bus->no_cfg_restore = 1; in dhdpcie_cto_recovery_handler()
1108 bus->is_linkdown = TRUE; in dhdpcie_cto_recovery_handler()
1109 bus->dhd->hang_reason = HANG_REASON_PCIE_CTO_DETECT; in dhdpcie_cto_recovery_handler()
1111 dhd_os_send_hang_message(bus->dhd); in dhdpcie_cto_recovery_handler()
1118 * 1: IN int irq -- interrupt vector
1119 * 2: IN void *arg -- handle to private data structure
1140 if (bus->dhd->dongle_reset) { in dhdpcie_bus_isr()
1145 if (bus->dhd->busstate == DHD_BUS_DOWN) { in dhdpcie_bus_isr()
1151 if (!bus->intr_enabled) { in dhdpcie_bus_isr()
1163 intstatus, bus->cto_enable)); in dhdpcie_bus_isr()
1164 bus->cto_triggered = 1; in dhdpcie_bus_isr()
1176 dhd_schedule_cto_recovery(bus->dhd); in dhdpcie_bus_isr()
1182 if (bus->d2h_intr_method == PCIE_MSI) { in dhdpcie_bus_isr()
1195 bus->non_ours_irq_count++; in dhdpcie_bus_isr()
1196 bus->last_non_ours_irq_time = OSL_LOCALTIME_NS(); in dhdpcie_bus_isr()
1202 bus->intstatus = intstatus; in dhdpcie_bus_isr()
1205 if (intstatus == (uint32)-1) { in dhdpcie_bus_isr()
1214 * - Mask further interrupts in dhdpcie_bus_isr()
1215 * - Read/ack intstatus in dhdpcie_bus_isr()
1216 * - Take action based on bits and state in dhdpcie_bus_isr()
1217 * - Reenable interrupts (as per state) in dhdpcie_bus_isr()
1221 bus->intrcount++; in dhdpcie_bus_isr()
1223 bus->ipend = TRUE; in dhdpcie_bus_isr()
1225 bus->isr_intr_disable_count++; in dhdpcie_bus_isr()
1235 bus->intdis = TRUE; in dhdpcie_bus_isr()
1240 DHD_OS_WAKE_LOCK(bus->dhd); in dhdpcie_bus_isr()
1242 DHD_OS_WAKE_UNLOCK(bus->dhd); in dhdpcie_bus_isr()
1244 bus->dpc_sched = TRUE; in dhdpcie_bus_isr()
1245 dhd_sched_dpc(bus->dhd); /* queue DPC now!! */ in dhdpcie_bus_isr()
1262 osl_t *osh = bus->osh; in dhdpcie_set_pwr_state()
1320 val = OSL_PCI_READ_CONFIG(bus->osh, PCI_CFG_VID, sizeof(uint32)); in dhdpcie_config_check()
1335 osl_t *osh = bus->osh; in dhdpcie_config_restore()
1342 OSL_PCI_WRITE_CONFIG(osh, i << 2, sizeof(uint32), bus->saved_config.header[i]); in dhdpcie_config_restore()
1344 OSL_PCI_WRITE_CONFIG(osh, PCI_CFG_CMD, sizeof(uint32), bus->saved_config.header[1]); in dhdpcie_config_restore()
1348 sizeof(uint32), bus->saved_config.pmcsr); in dhdpcie_config_restore()
1350 OSL_PCI_WRITE_CONFIG(osh, PCIECFGREG_MSI_CAP, sizeof(uint32), bus->saved_config.msi_cap); in dhdpcie_config_restore()
1352 bus->saved_config.msi_addr0); in dhdpcie_config_restore()
1354 sizeof(uint32), bus->saved_config.msi_addr1); in dhdpcie_config_restore()
1356 sizeof(uint32), bus->saved_config.msi_data); in dhdpcie_config_restore()
1359 sizeof(uint32), bus->saved_config.exp_dev_ctrl_stat); in dhdpcie_config_restore()
1361 sizeof(uint32), bus->saved_config.exp_dev_ctrl_stat2); in dhdpcie_config_restore()
1363 sizeof(uint32), bus->saved_config.exp_link_ctrl_stat); in dhdpcie_config_restore()
1365 sizeof(uint32), bus->saved_config.exp_link_ctrl_stat2); in dhdpcie_config_restore()
1368 sizeof(uint32), bus->saved_config.l1pm0); in dhdpcie_config_restore()
1370 sizeof(uint32), bus->saved_config.l1pm1); in dhdpcie_config_restore()
1372 OSL_PCI_WRITE_CONFIG(bus->osh, PCI_BAR0_WIN, sizeof(uint32), in dhdpcie_config_restore()
1373 bus->saved_config.bar0_win); in dhdpcie_config_restore()
1374 dhdpcie_setbar1win(bus, bus->saved_config.bar1_win); in dhdpcie_config_restore()
1383 osl_t *osh = bus->osh; in dhdpcie_config_save()
1390 bus->saved_config.header[i] = OSL_PCI_READ_CONFIG(osh, i << 2, sizeof(uint32)); in dhdpcie_config_save()
1393 bus->saved_config.pmcsr = OSL_PCI_READ_CONFIG(osh, PCIECFGREG_PM_CSR, sizeof(uint32)); in dhdpcie_config_save()
1395 bus->saved_config.msi_cap = OSL_PCI_READ_CONFIG(osh, PCIECFGREG_MSI_CAP, in dhdpcie_config_save()
1397 bus->saved_config.msi_addr0 = OSL_PCI_READ_CONFIG(osh, PCIECFGREG_MSI_ADDR_L, in dhdpcie_config_save()
1399 bus->saved_config.msi_addr1 = OSL_PCI_READ_CONFIG(osh, PCIECFGREG_MSI_ADDR_H, in dhdpcie_config_save()
1401 bus->saved_config.msi_data = OSL_PCI_READ_CONFIG(osh, PCIECFGREG_MSI_DATA, in dhdpcie_config_save()
1404 bus->saved_config.exp_dev_ctrl_stat = OSL_PCI_READ_CONFIG(osh, in dhdpcie_config_save()
1406 bus->saved_config.exp_dev_ctrl_stat2 = OSL_PCI_READ_CONFIG(osh, in dhdpcie_config_save()
1408 bus->saved_config.exp_link_ctrl_stat = OSL_PCI_READ_CONFIG(osh, in dhdpcie_config_save()
1410 bus->saved_config.exp_link_ctrl_stat2 = OSL_PCI_READ_CONFIG(osh, in dhdpcie_config_save()
1413 bus->saved_config.l1pm0 = OSL_PCI_READ_CONFIG(osh, PCIECFGREG_PML1_SUB_CTRL1, in dhdpcie_config_save()
1415 bus->saved_config.l1pm1 = OSL_PCI_READ_CONFIG(osh, PCIECFGREG_PML1_SUB_CTRL2, in dhdpcie_config_save()
1418 bus->saved_config.bar0_win = OSL_PCI_READ_CONFIG(osh, PCI_BAR0_WIN, in dhdpcie_config_save()
1420 bus->saved_config.bar1_win = OSL_PCI_READ_CONFIG(osh, PCI_BAR1_WIN, in dhdpcie_config_save()
1433 uint buscorerev = bus->sih->buscorerev; in dhdpcie_bus_intr_init()
1434 bus->pcie_mailbox_int = PCIMailBoxInt(buscorerev); in dhdpcie_bus_intr_init()
1435 bus->pcie_mailbox_mask = PCIMailBoxMask(buscorerev); in dhdpcie_bus_intr_init()
1436 bus->d2h_mb_mask = PCIE_MB_D2H_MB_MASK(buscorerev); in dhdpcie_bus_intr_init()
1437 bus->def_intmask = PCIE_MB_D2H_MB_MASK(buscorerev); in dhdpcie_bus_intr_init()
1439 bus->def_intmask |= PCIE_MB_TOPCIE_FN0_0 | PCIE_MB_TOPCIE_FN0_1; in dhdpcie_bus_intr_init()
1446 uint32 wd_en = (bus->sih->buscorerev >= 66) ? WD_SSRESET_PCIE_F0_EN : in dhdpcie_cc_watchdog_reset()
1448 pcie_watchdog_reset(bus->osh, bus->sih, WD_ENABLE_MASK, wd_en); in dhdpcie_cc_watchdog_reset()
1457 if (bus->is_linkdown) { in dhdpcie_dongle_reset()
1478 if (BCM4378_CHIP(bus->sih->chip)) { in dhdpcie_bus_mpu_disable()
1479 cr4_regs = si_setcore(bus->sih, ARMCR4_CORE_ID, 0); in dhdpcie_bus_mpu_disable()
1484 if (R_REG(bus->osh, cr4_regs + ARMCR4REG_CORECAP) & ACC_MPU_MASK) { in dhdpcie_bus_mpu_disable()
1486 W_REG(bus->osh, cr4_regs + ARMCR4REG_MPUCTRL, 0); in dhdpcie_bus_mpu_disable()
1495 osl_t *osh = bus->osh; in dhdpcie_dongle_attach()
1496 volatile void *regsva = (volatile void*)bus->regs; in dhdpcie_dongle_attach()
1508 link_recovery = bus->dhd; in dhdpcie_dongle_attach()
1511 bus->alp_only = TRUE; in dhdpcie_dongle_attach()
1512 bus->sih = NULL; in dhdpcie_dongle_attach()
1521 bus->cl_devid = devid; in dhdpcie_dongle_attach()
1545 sbpcieregs = (sbpcieregs_t*)(bus->regs); in dhdpcie_dongle_attach()
1547 reg_val = R_REG(osh, &sbpcieregs->u1.dar_64.d2h_msg_reg0); in dhdpcie_dongle_attach()
1551 if (!(bus->sih = si_attach((uint)devid, osh, regsva, PCI_BUS, bus, in dhdpcie_dongle_attach()
1552 &bus->vars, &bus->varsz))) { in dhdpcie_dongle_attach()
1565 DHD_ERROR(("%s: error - pre chipid access sequence error %d\n", in dhdpcie_dongle_attach()
1575 if (!(bus->sih = si_attach((uint)devid, osh, regsva, PCI_BUS, bus, in dhdpcie_dongle_attach()
1576 &bus->vars, &bus->varsz))) { in dhdpcie_dongle_attach()
1584 bus->cto_enable = FALSE; in dhdpcie_dongle_attach()
1587 if (bus->sih->buscorerev >= 24) { in dhdpcie_dongle_attach()
1589 bus->cto_enable = TRUE; in dhdpcie_dongle_attach()
1594 bus->cto_enable = FALSE; in dhdpcie_dongle_attach()
1603 bus->sih->secureboot = isset(&secureboot, PCIECFGREG_SECURE_MODE_SHIFT); in dhdpcie_dongle_attach()
1605 if (MULTIBP_ENAB(bus->sih) && (bus->sih->buscorerev >= 66)) { in dhdpcie_dongle_attach()
1607 * HW JIRA - CRWLPCIEGEN2-672 in dhdpcie_dongle_attach()
1611 if (PCIE_ENUM_RESET_WAR_ENAB(bus->sih->buscorerev)) { in dhdpcie_dongle_attach()
1616 * dhdpcie_bus_release_dongle() --> si_detach() in dhdpcie_dongle_attach()
1617 * dhdpcie_dongle_attach() --> si_attach() in dhdpcie_dongle_attach()
1619 bus->pwr_req_ref = 0; in dhdpcie_dongle_attach()
1622 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_dongle_attach()
1628 if ((si_setcore(bus->sih, ARM7S_CORE_ID, 0)) || in dhdpcie_dongle_attach()
1629 (si_setcore(bus->sih, ARMCM3_CORE_ID, 0)) || in dhdpcie_dongle_attach()
1630 (si_setcore(bus->sih, ARMCR4_CORE_ID, 0)) || in dhdpcie_dongle_attach()
1631 (si_setcore(bus->sih, ARMCA7_CORE_ID, 0))) { in dhdpcie_dongle_attach()
1632 bus->armrev = si_corerev(bus->sih); in dhdpcie_dongle_attach()
1633 bus->coreid = si_coreid(bus->sih); in dhdpcie_dongle_attach()
1640 if (bus->coreid == ARMCA7_CORE_ID) { in dhdpcie_dongle_attach()
1646 /* Olympic EFI requirement - stop driver load if FW is already running in dhdpcie_dongle_attach()
1664 dhdpcie_clkreq(bus->osh, 1, 1); in dhdpcie_dongle_attach()
1668 * bus->dhd will be NULL if it is called from dhd_bus_attach, so need to reset in dhdpcie_dongle_attach()
1670 * like quiesce FLR, then based on dongle_isolation flag, watchdog_reset should in dhdpcie_dongle_attach()
1673 if (bus->dhd == NULL) { in dhdpcie_dongle_attach()
1677 dongle_isolation = bus->dhd->dongle_isolation; in dhdpcie_dongle_attach()
1682 * Issue CC watchdog to reset all the cores on the chip - similar to rmmod dhd in dhdpcie_dongle_attach()
1684 * dongle to a sane state (on host soft-reboot / watchdog-reboot). in dhdpcie_dongle_attach()
1694 bus->force_bt_quiesce = TRUE; in dhdpcie_dongle_attach()
1699 if (bus->sih->buscorerev >= 66) { in dhdpcie_dongle_attach()
1700 bus->force_bt_quiesce = FALSE; in dhdpcie_dongle_attach()
1709 si_setcore(bus->sih, PCIE2_CORE_ID, 0); in dhdpcie_dongle_attach()
1710 sbpcieregs = (sbpcieregs_t*)(bus->regs); in dhdpcie_dongle_attach()
1713 W_REG(osh, &sbpcieregs->configaddr, 0x4e0); in dhdpcie_dongle_attach()
1714 val = R_REG(osh, &sbpcieregs->configdata); in dhdpcie_dongle_attach()
1715 W_REG(osh, &sbpcieregs->configdata, val); in dhdpcie_dongle_attach()
1717 if (si_setcore(bus->sih, SYSMEM_CORE_ID, 0)) { in dhdpcie_dongle_attach()
1721 if (!bus->ramsize_adjusted) { in dhdpcie_dongle_attach()
1722 if (!(bus->orig_ramsize = si_sysmem_size(bus->sih))) { in dhdpcie_dongle_attach()
1726 switch ((uint16)bus->sih->chip) { in dhdpcie_dongle_attach()
1729 bus->dongle_ram_base = CA7_4368_RAM_BASE; in dhdpcie_dongle_attach()
1730 bus->orig_ramsize = 0x1c0000; in dhdpcie_dongle_attach()
1733 bus->dongle_ram_base = CA7_4367_RAM_BASE; in dhdpcie_dongle_attach()
1734 bus->orig_ramsize = 0x1e0000; in dhdpcie_dongle_attach()
1739 bus->dongle_ram_base = CA7_4365_RAM_BASE; in dhdpcie_dongle_attach()
1740 bus->orig_ramsize = 0x1c0000; /* Reserve 1.75MB for CA7 */ in dhdpcie_dongle_attach()
1744 } else if (!si_setcore(bus->sih, ARMCR4_CORE_ID, 0)) { in dhdpcie_dongle_attach()
1745 if (!(bus->orig_ramsize = si_socram_size(bus->sih))) { in dhdpcie_dongle_attach()
1751 if (!(bus->orig_ramsize = si_tcm_size(bus->sih))) { in dhdpcie_dongle_attach()
1752 DHD_ERROR(("%s: failed to find CR4-TCM memory!\n", __FUNCTION__)); in dhdpcie_dongle_attach()
1756 switch ((uint16)bus->sih->chip) { in dhdpcie_dongle_attach()
1759 bus->dongle_ram_base = CR4_4335_RAM_BASE; in dhdpcie_dongle_attach()
1767 bus->dongle_ram_base = CR4_4350_RAM_BASE; in dhdpcie_dongle_attach()
1770 bus->dongle_ram_base = CR4_4360_RAM_BASE; in dhdpcie_dongle_attach()
1774 bus->dongle_ram_base = CR4_4364_RAM_BASE; in dhdpcie_dongle_attach()
1778 bus->dongle_ram_base = (bus->sih->chiprev < 6) /* changed at 4345C0 */ in dhdpcie_dongle_attach()
1782 bus->dongle_ram_base = CR4_43602_RAM_BASE; in dhdpcie_dongle_attach()
1785 /* RAM based changed from 4349c0(revid=9) onwards */ in dhdpcie_dongle_attach()
1786 bus->dongle_ram_base = ((bus->sih->chiprev < 9) ? in dhdpcie_dongle_attach()
1792 bus->dongle_ram_base = CR4_4347_RAM_BASE; in dhdpcie_dongle_attach()
1795 bus->dongle_ram_base = CR4_4362_RAM_BASE; in dhdpcie_dongle_attach()
1798 bus->dongle_ram_base = CR4_43751_RAM_BASE; in dhdpcie_dongle_attach()
1802 bus->dongle_ram_base = CR4_4373_RAM_BASE; in dhdpcie_dongle_attach()
1806 bus->dongle_ram_base = CR4_4378_RAM_BASE; in dhdpcie_dongle_attach()
1809 bus->dongle_ram_base = CR4_4377_RAM_BASE; in dhdpcie_dongle_attach()
1814 bus->dongle_ram_base = CR4_4369_RAM_BASE; in dhdpcie_dongle_attach()
1817 bus->dongle_ram_base = CR4_55560_RAM_BASE; in dhdpcie_dongle_attach()
1820 bus->dongle_ram_base = 0; in dhdpcie_dongle_attach()
1822 __FUNCTION__, bus->dongle_ram_base)); in dhdpcie_dongle_attach()
1826 /* 55560, Dedicated space for TCAM patching and TRX Hader at RAMBASE */ in dhdpcie_dongle_attach()
1827 /* TCAM Patching - 2048[2K], TRX Header - 32Bytes */ in dhdpcie_dongle_attach()
1828 if (bus->sih->chip == CYW55560_CHIP_ID) { in dhdpcie_dongle_attach()
1829 bus->orig_ramsize -= (CR4_55560_TCAM_SZ + CR4_55560_TRX_HDR_SZ); in dhdpcie_dongle_attach()
1832 bus->ramsize = bus->orig_ramsize; in dhdpcie_dongle_attach()
1836 if (bus->ramsize > DONGLE_TCM_MAP_SIZE) { in dhdpcie_dongle_attach()
1838 __FUNCTION__, bus->ramsize, bus->ramsize)); in dhdpcie_dongle_attach()
1843 bus->ramsize, bus->orig_ramsize, bus->dongle_ram_base)); in dhdpcie_dongle_attach()
1845 bus->srmemsize = si_socram_srmem_size(bus->sih); in dhdpcie_dongle_attach()
1850 bus->intr = (bool)dhd_intr; in dhdpcie_dongle_attach()
1852 bus->idma_enabled = TRUE; in dhdpcie_dongle_attach()
1853 bus->ifrm_enabled = TRUE; in dhdpcie_dongle_attach()
1856 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_dongle_attach()
1863 * On 4378A0 (rev 66), PCIe enum reset is disabled due to CRWLPCIEGEN2-672 in dhdpcie_dongle_attach()
1865 si_srpwr_request(bus->sih, SRPWR_DMN0_PCIE_MASK, 0); in dhdpcie_dongle_attach()
1871 if (bus->sih->buscorerev >= 68) { in dhdpcie_dongle_attach()
1879 if (bus->sih != NULL) { in dhdpcie_dongle_attach()
1880 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_dongle_attach()
1886 si_detach(bus->sih); in dhdpcie_dongle_attach()
1887 bus->sih = NULL; in dhdpcie_dongle_attach()
1890 return -1; in dhdpcie_dongle_attach()
1912 if (bus->sih && !bus->is_linkdown) { in dhdpcie_bus_intr_enable()
1914 if (bus->bus_low_power_state == DHD_BUS_D3_ACK_RECIEVED) { in dhdpcie_bus_intr_enable()
1917 if ((bus->sih->buscorerev == 2) || (bus->sih->buscorerev == 6) || in dhdpcie_bus_intr_enable()
1918 (bus->sih->buscorerev == 4)) { in dhdpcie_bus_intr_enable()
1922 dhd_bus_mmio_trace(bus, bus->pcie_mailbox_mask, in dhdpcie_bus_intr_enable()
1923 bus->def_intmask, TRUE); in dhdpcie_bus_intr_enable()
1925 si_corereg(bus->sih, bus->sih->buscoreidx, bus->pcie_mailbox_mask, in dhdpcie_bus_intr_enable()
1926 bus->def_intmask, bus->def_intmask); in dhdpcie_bus_intr_enable()
1940 if (bus && bus->sih && !bus->is_linkdown) { in dhdpcie_bus_intr_disable()
1942 if (bus->bus_low_power_state == DHD_BUS_D3_ACK_RECIEVED) { in dhdpcie_bus_intr_disable()
1945 if ((bus->sih->buscorerev == 2) || (bus->sih->buscorerev == 6) || in dhdpcie_bus_intr_disable()
1946 (bus->sih->buscorerev == 4)) { in dhdpcie_bus_intr_disable()
1949 si_corereg(bus->sih, bus->sih->buscoreidx, bus->pcie_mailbox_mask, in dhdpcie_bus_intr_disable()
1950 bus->def_intmask, 0); in dhdpcie_bus_intr_disable()
1974 dhdp->dhd_watchdog_ms_backup = dhd_watchdog_ms; in dhdpcie_advertise_bus_cleanup()
1975 if (dhdp->dhd_watchdog_ms_backup) { in dhdpcie_advertise_bus_cleanup()
1980 if (dhdp->busstate != DHD_BUS_DOWN) { in dhdpcie_advertise_bus_cleanup()
1982 dhdp->busstate = DHD_BUS_DOWN_IN_PROGRESS; in dhdpcie_advertise_bus_cleanup()
1986 timeleft = dhd_os_busbusy_wait_negation(dhdp, &dhdp->dhd_bus_busy_state); in dhdpcie_advertise_bus_cleanup()
1989 __FUNCTION__, dhdp->dhd_bus_busy_state)); in dhdpcie_advertise_bus_cleanup()
2002 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_remove_prep()
2004 bus->dhd->busstate = DHD_BUS_DOWN; in dhdpcie_bus_remove_prep()
2005 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_remove_prep()
2007 dhd_os_sdlock(bus->dhd); in dhdpcie_bus_remove_prep()
2009 if (bus->sih && !bus->dhd->dongle_isolation) { in dhdpcie_bus_remove_prep()
2010 if (PCIE_RELOAD_WAR_ENAB(bus->sih->buscorerev) && in dhdpcie_bus_remove_prep()
2011 bus->sih->chip != CYW55560_CHIP_ID) { in dhdpcie_bus_remove_prep()
2017 /* HW4347-909 */ in dhdpcie_bus_remove_prep()
2018 if ((bus->sih->buscorerev == 19) || (bus->sih->buscorerev == 23)) { in dhdpcie_bus_remove_prep()
2020 pcie_set_trefup_time_100us(bus->sih); in dhdpcie_bus_remove_prep()
2027 if ((PMUREV(bus->sih->pmurev) > 31) && in dhdpcie_bus_remove_prep()
2028 (bus->sih->buscorerev != 66) && in dhdpcie_bus_remove_prep()
2029 (bus->sih->buscorerev != 68) && in dhdpcie_bus_remove_prep()
2030 (bus->sih->buscorerev != 69) && in dhdpcie_bus_remove_prep()
2031 (bus->sih->buscorerev != 70)) { in dhdpcie_bus_remove_prep()
2032 si_pmu_fast_lpo_disable(bus->sih); in dhdpcie_bus_remove_prep()
2040 if (!bus->is_linkdown) { in dhdpcie_bus_remove_prep()
2051 bus->dhd->is_pcie_watchdog_reset = TRUE; in dhdpcie_bus_remove_prep()
2054 dhd_os_sdunlock(bus->dhd); in dhdpcie_bus_remove_prep()
2062 if (!bus->bus_lock) { in dhd_init_bus_lock()
2063 bus->bus_lock = dhd_os_spin_lock_init(bus->dhd->osh); in dhd_init_bus_lock()
2070 if (bus->bus_lock) { in dhd_deinit_bus_lock()
2071 dhd_os_spin_lock_deinit(bus->dhd->osh, bus->bus_lock); in dhd_deinit_bus_lock()
2072 bus->bus_lock = NULL; in dhd_deinit_bus_lock()
2079 if (!bus->backplane_access_lock) { in dhd_init_backplane_access_lock()
2080 bus->backplane_access_lock = dhd_os_spin_lock_init(bus->dhd->osh); in dhd_init_backplane_access_lock()
2087 if (bus->backplane_access_lock) { in dhd_deinit_backplane_access_lock()
2088 dhd_os_spin_lock_deinit(bus->dhd->osh, bus->backplane_access_lock); in dhd_deinit_backplane_access_lock()
2089 bus->backplane_access_lock = NULL; in dhd_deinit_backplane_access_lock()
2108 osh = bus->osh; in dhdpcie_bus_release()
2111 if (bus->dhd) { in dhdpcie_bus_release()
2115 dhdpcie_advertise_bus_cleanup(bus->dhd); in dhdpcie_bus_release()
2116 dongle_isolation = bus->dhd->dongle_isolation; in dhdpcie_bus_release()
2117 bus->dhd->is_pcie_watchdog_reset = FALSE; in dhdpcie_bus_release()
2120 if (bus->intr) { in dhdpcie_bus_release()
2121 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_release()
2123 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_release()
2129 if (IDMA_ACTIVE(bus->dhd)) { in dhdpcie_bus_release()
2135 buscorerev = bus->sih->buscorerev; in dhdpcie_bus_release()
2136 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_bus_release()
2141 * dhdpcie_bus_release_dongle free bus->sih handle, which is needed to in dhdpcie_bus_release()
2146 dhd_detach(bus->dhd); in dhdpcie_bus_release()
2148 dhd_free(bus->dhd); in dhdpcie_bus_release()
2149 bus->dhd = NULL; in dhdpcie_bus_release()
2152 if (bus->regs) { in dhdpcie_bus_release()
2153 dhdpcie_bus_reg_unmap(osh, bus->regs, DONGLE_REG_MAP_SIZE); in dhdpcie_bus_release()
2154 bus->regs = NULL; in dhdpcie_bus_release()
2156 if (bus->tcm) { in dhdpcie_bus_release()
2157 dhdpcie_bus_reg_unmap(osh, bus->tcm, DONGLE_TCM_MAP_SIZE); in dhdpcie_bus_release()
2158 bus->tcm = NULL; in dhdpcie_bus_release()
2163 if (bus->pcie_sh) { in dhdpcie_bus_release()
2164 MFREE(osh, bus->pcie_sh, sizeof(pciedev_shared_t)); in dhdpcie_bus_release()
2165 bus->pcie_sh = NULL; in dhdpcie_bus_release()
2168 if (bus->console.buf != NULL) { in dhdpcie_bus_release()
2169 MFREE(osh, bus->console.buf, bus->console.bufsize); in dhdpcie_bus_release()
2184 DHD_TRACE(("%s: Enter bus->dhd %p bus->dhd->dongle_reset %d \n", __FUNCTION__, in dhdpcie_bus_release_dongle()
2185 bus->dhd, bus->dhd->dongle_reset)); in dhdpcie_bus_release_dongle()
2187 if ((bus->dhd && bus->dhd->dongle_reset) && reset_flag) { in dhdpcie_bus_release_dongle()
2192 if (bus->is_linkdown) { in dhdpcie_bus_release_dongle()
2197 if (bus->sih) { in dhdpcie_bus_release_dongle()
2200 (bus->dhd && !bus->dhd->is_pcie_watchdog_reset)) { in dhdpcie_bus_release_dongle()
2206 if (bus->ltrsleep_on_unload) { in dhdpcie_bus_release_dongle()
2207 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_bus_release_dongle()
2211 if (bus->sih->buscorerev == 13) in dhdpcie_bus_release_dongle()
2212 pcie_serdes_iddqdisable(bus->osh, bus->sih, in dhdpcie_bus_release_dongle()
2213 (sbpcieregs_t *) bus->regs); in dhdpcie_bus_release_dongle()
2221 dhdpcie_clkreq(bus->osh, 1, 0); in dhdpcie_bus_release_dongle()
2228 if (bus->sih != NULL) { in dhdpcie_bus_release_dongle()
2229 si_detach(bus->sih); in dhdpcie_bus_release_dongle()
2230 bus->sih = NULL; in dhdpcie_bus_release_dongle()
2232 if (bus->vars && bus->varsz) in dhdpcie_bus_release_dongle()
2233 MFREE(osh, bus->vars, bus->varsz); in dhdpcie_bus_release_dongle()
2234 bus->vars = NULL; in dhdpcie_bus_release_dongle()
2243 uint32 data = OSL_PCI_READ_CONFIG(bus->osh, addr, size); in dhdpcie_bus_cfg_read_dword()
2251 OSL_PCI_WRITE_CONFIG(bus->osh, addr, size, data); in dhdpcie_bus_cfg_write_dword()
2257 OSL_PCI_WRITE_CONFIG(bus->osh, PCI_BAR0_WIN, 4, data); in dhdpcie_bus_cfg_set_bar0_win()
2268 (dhd_dongle_memsize < (int32)bus->orig_ramsize)) in dhdpcie_bus_dongle_setmemsize()
2269 bus->ramsize = dhd_dongle_memsize; in dhdpcie_bus_dongle_setmemsize()
2277 if (bus->dhd && bus->dhd->dongle_reset) in dhdpcie_bus_release_malloc()
2280 if (bus->vars && bus->varsz) { in dhdpcie_bus_release_malloc()
2281 MFREE(osh, bus->vars, bus->varsz); in dhdpcie_bus_release_malloc()
2282 bus->vars = NULL; in dhdpcie_bus_release_malloc()
2297 if (!bus->dhd) in dhd_bus_stop()
2300 if (bus->dhd->busstate == DHD_BUS_DOWN) { in dhd_bus_stop()
2305 DHD_DISABLE_RUNTIME_PM(bus->dhd); in dhd_bus_stop()
2307 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_stop()
2309 bus->dhd->busstate = DHD_BUS_DOWN; in dhd_bus_stop()
2310 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_stop()
2313 atomic_set(&bus->dhd->block_bus, TRUE); in dhd_bus_stop()
2316 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhd_bus_stop()
2318 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhd_bus_stop()
2320 if (!bus->is_linkdown) { in dhd_bus_stop()
2327 dhd_dpc_kill(bus->dhd); in dhd_bus_stop()
2338 dhd_wakeup_ioctl_event(bus->dhd, IOCTL_RETURN_ON_BUS_STOP); in dhd_bus_stop()
2351 dhd_bus_t *bus = dhd->bus; in dhd_bus_watchdog()
2367 if (dhd->busstate == DHD_BUS_DATA && in dhd_bus_watchdog()
2368 dhd->dhd_console_ms != 0 && in dhd_bus_watchdog()
2369 bus->bus_low_power_state == DHD_BUS_NO_LOW_POWER_STATE) { in dhd_bus_watchdog()
2370 bus->console.count += dhd_watchdog_ms; in dhd_bus_watchdog()
2371 if (bus->console.count >= dhd->dhd_console_ms) { in dhd_bus_watchdog()
2372 bus->console.count -= dhd->dhd_console_ms; in dhd_bus_watchdog()
2374 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_watchdog()
2380 dhd->dhd_console_ms = 0; /* On error, stop trying */ in dhd_bus_watchdog()
2383 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_watchdog()
2409 DHD_ERROR(("----- CHIP 4358 A0 -----\n")); in concate_revision_bcm4358()
2412 DHD_ERROR(("----- CHIP 4358 A1 -----\n")); in concate_revision_bcm4358()
2417 DHD_ERROR(("----- CHIP 4358 A3 -----\n")); in concate_revision_bcm4358()
2422 DHD_ERROR(("----- Unknown chip version, ver=%x -----\n", chiprev)); in concate_revision_bcm4358()
2442 DHD_ERROR(("----- Board Rev [%d]-----\n", system_rev)); in concate_revision_bcm4358()
2457 int module_type = -1; in concate_revision_bcm4359()
2460 chip_ver = bus->sih->chiprev; in concate_revision_bcm4359()
2462 DHD_ERROR(("----- CHIP 4359 B0 -----\n")); in concate_revision_bcm4359()
2465 DHD_ERROR(("----- CHIP 4359 B1 -----\n")); in concate_revision_bcm4359()
2468 DHD_ERROR(("----- CHIP 4359 C0 -----\n")); in concate_revision_bcm4359()
2471 DHD_ERROR(("----- Unknown chip version, ver=%x -----\n", chip_ver)); in concate_revision_bcm4359()
2472 return -1; in concate_revision_bcm4359()
2530 #define CIS_TUPLE_MAX_COUNT (uint32)((CIS_TUPLE_END_ADDRESS - CIS_TUPLE_START_ADDRESS\
2576 { {"r031_1kl_b0"}, {"_r030_b0"}, {"_b0"} }, /* exceptional case : r31 -> r30 */
2680 if (si_backplane_access(bus->sih, boardtype_backplane_addr[i], sizeof(int), in dhd_parse_board_information_bcm()
2686 if (si_backplane_access(bus->sih, boardtype_backplane_addr[i], sizeof(int), in dhd_parse_board_information_bcm()
2698 if (si_backplane_access(bus->sih, CIS_TUPLE_START_ADDRESS + i * sizeof(uint32), in dhd_parse_board_information_bcm()
2708 if (tuple->id != CIS_TUPLE_TAG_START) { in dhd_parse_board_information_bcm()
2715 while ((totlen >= (tuple->len + CIS_TUPLE_HDR_LEN)) && in dhd_parse_board_information_bcm()
2717 len = tuple->len; in dhd_parse_board_information_bcm()
2719 if ((tuple->tag == CIS_TUPLE_TAG_VENDOR) && in dhd_parse_board_information_bcm()
2722 memcpy(vid, tuple->data, tuple->len - CIS_TUPLE_TAG_LENGTH); in dhd_parse_board_information_bcm()
2723 *vid_length = tuple->len - CIS_TUPLE_TAG_LENGTH; in dhd_parse_board_information_bcm()
2724 prhex("OTP VID", tuple->data, tuple->len - CIS_TUPLE_TAG_LENGTH); in dhd_parse_board_information_bcm()
2726 else if ((tuple->tag == CIS_TUPLE_TAG_BOARDTYPE) && in dhd_parse_board_information_bcm()
2729 *boardtype = (int)tuple->data[0]; in dhd_parse_board_information_bcm()
2730 prhex("OTP boardtype", tuple->data, tuple->len - CIS_TUPLE_TAG_LENGTH); in dhd_parse_board_information_bcm()
2734 totlen -= (len + CIS_TUPLE_HDR_LEN); in dhd_parse_board_information_bcm()
2756 if (!bus || !bus->sih) { in dhd_find_naming_info_by_chip_rev()
2757 DHD_ERROR(("%s:bus(%p) or bus->sih is NULL\n", __FUNCTION__, bus)); in dhd_find_naming_info_by_chip_rev()
2760 chip_rev = bus->sih->chiprev; in dhd_find_naming_info_by_chip_rev()
2833 strncat(nv_path, info->nvram_ext, strlen(info->nvram_ext)); in concate_revision_bcm4361()
2834 strncat(fw_path, info->fw_ext, strlen(info->fw_ext)); in concate_revision_bcm4361()
2871 strncat(nv_path, info->nvram_ext, strlen(info->nvram_ext)); in concate_revision_bcm4375()
2872 strncat(fw_path, info->fw_ext, strlen(info->fw_ext)); in concate_revision_bcm4375()
2892 if (!bus || !bus->sih) { in concate_revision()
2894 return -1; in concate_revision()
2902 switch (si_chipid(bus->sih)) { in concate_revision()
2931 dhd_bus_t *bus = dhd->bus; in dhd_get_chipid()
2933 if (bus && bus->sih) in dhd_get_chipid()
2934 return (uint16)si_chipid(bus->sih); in dhd_get_chipid()
2947 * If bus->fw_path is empty, or if the download of bus->fw_path failed, firmware contained in header
2958 bus->fw_path = pfw_path; in dhd_bus_download_firmware()
2959 bus->nv_path = pnv_path; in dhd_bus_download_firmware()
2962 if (concate_revision(bus, bus->fw_path, bus->nv_path) != 0) { in dhd_bus_download_firmware()
2970 dhd_set_blob_support(bus->dhd, bus->fw_path); in dhd_bus_download_firmware()
2974 __FUNCTION__, bus->fw_path, bus->nv_path)); in dhd_bus_download_firmware()
2983 * Loads firmware given by 'bus->fw_path' into PCIe dongle.
2990 * If bus->fw_path is empty, or if the download of bus->fw_path failed, firmware contained in header
3000 uint chipid = bus->sih->chip; in dhdpcie_download_firmware()
3001 uint revid = bus->sih->chiprev; in dhdpcie_download_firmware()
3004 bus->fw_path = fw_path; in dhdpcie_download_firmware()
3005 bus->nv_path = nv_path; in dhdpcie_download_firmware()
3028 snprintf(bus->nv_path, sizeof(nv_path), "%s.nvm", fw_path); in dhdpcie_download_firmware()
3030 snprintf(bus->fw_path, sizeof(fw_path), "%s-firmware.bin", fw_path); in dhdpcie_download_firmware()
3033 DHD_OS_WAKE_LOCK(bus->dhd); in dhdpcie_download_firmware()
3036 DHD_OS_WAKE_UNLOCK(bus->dhd); in dhdpcie_download_firmware()
3057 int offset_end = bus->ramsize; in dhdpcie_download_code_file()
3063 if (dhd_bus_get_fw_mode(bus->dhd) == DHD_FLAG_MFG_MODE) { in dhdpcie_download_code_file()
3081 imgbuf = dhd_os_open_image1(bus->dhd, pfw_path); in dhdpcie_download_code_file()
3092 memptr = memblock = MALLOC(bus->dhd->osh, MEMBLOCK + DHD_SDALIGN); in dhdpcie_download_code_file()
3099 memptr += (DHD_SDALIGN - ((uint32)(uintptr)memblock % DHD_SDALIGN)); in dhdpcie_download_code_file()
3103 store_reset = (si_setcore(bus->sih, ARMCR4_CORE_ID, 0) || in dhdpcie_download_code_file()
3104 si_setcore(bus->sih, ARMCA7_CORE_ID, 0)); in dhdpcie_download_code_file()
3116 len, read_len, file_size, (len - (read_len - file_size)))); in dhdpcie_download_code_file()
3117 len -= (read_len - file_size); in dhdpcie_download_code_file()
3123 bus->resetinstr = *(((uint32*)memptr)); in dhdpcie_download_code_file()
3125 offset += bus->dongle_ram_base; in dhdpcie_download_code_file()
3134 if (trx_hdr->magic == TRX_MAGIC) { in dhdpcie_download_code_file()
3136 offset -= sizeof(struct trx_header); in dhdpcie_download_code_file()
3161 MFREE(bus->dhd->osh, memblock, MEMBLOCK + DHD_SDALIGN); in dhdpcie_download_code_file()
3165 dhd_os_close_image1(bus->dhd, imgbuf); in dhdpcie_download_code_file()
3186 pnv_path = bus->nv_path; in dhdpcie_download_nvram()
3192 dhd_get_download_buffer(bus->dhd, NULL, NVRAM, &memblock, (int *)&len); in dhdpcie_download_nvram()
3199 dhd_get_download_buffer(bus->dhd, pnv_path, NVRAM, &memblock, (int *)&len); in dhdpcie_download_nvram()
3235 len += 4 - (len % 4); in dhdpcie_download_nvram()
3250 MFREE(bus->dhd->osh, memblock, MAX_NVRAMBUF_SIZE); in dhdpcie_download_nvram()
3252 dhd_free_download_buffer(bus->dhd, memblock, MAX_NVRAMBUF_SIZE); in dhdpcie_download_nvram()
3269 if ((bus->fw_path != NULL) && (bus->fw_path[0] != '\0')) { in dhdpcie_ramsize_read_image()
3271 imgbuf = dhd_os_open_image1(bus->dhd, bus->fw_path); in dhdpcie_ramsize_read_image()
3288 dhd_os_close_image1(bus->dhd, imgbuf); in dhdpcie_ramsize_read_image()
3310 if (bus->ramsize_adjusted) { in dhdpcie_ramsize_adj()
3317 bus->ramsize_adjusted = TRUE; in dhdpcie_ramsize_adj()
3327 if ((bus->fw_path == NULL) || (bus->fw_path[0] == '\0')) { in dhdpcie_ramsize_adj()
3346 memptr = MALLOC(bus->dhd->osh, search_len); in dhdpcie_ramsize_adj()
3374 bus->orig_ramsize = LTOH32(ramsize_info.ram_size); in dhdpcie_ramsize_adj()
3375 bus->ramsize = LTOH32(ramsize_info.ram_size); in dhdpcie_ramsize_adj()
3377 bus->ramsize)); in dhdpcie_ramsize_adj()
3385 MFREE(bus->dhd->osh, memptr, search_len); in dhdpcie_ramsize_adj()
3391 * Downloads firmware file given by 'bus->fw_path' into PCIe dongle
3394 * If bus->fw_path is empty, or if the download of bus->fw_path failed, firmware contained in header
3401 int bcmerror = -1; in _dhdpcie_download_firmware()
3407 if ((bus->fw_path == NULL) || (bus->fw_path[0] == '\0')) { in _dhdpcie_download_firmware()
3421 if ((bus->fw_path != NULL) && (bus->fw_path[0] != '\0')) { in _dhdpcie_download_firmware()
3422 if (dhdpcie_download_code_file(bus, bus->fw_path)) { in _dhdpcie_download_firmware()
3463 dhd_console_t *c = &bus->console; in dhdpcie_bus_readconsole()
3474 if (bus->console_addr == 0) in dhdpcie_bus_readconsole()
3475 return -1; in dhdpcie_bus_readconsole()
3478 addr = bus->console_addr + OFFSETOF(hnd_cons_t, log); in dhdpcie_bus_readconsole()
3482 if (c->log_addr != addr) { in dhdpcie_bus_readconsole()
3484 c->last = 0; in dhdpcie_bus_readconsole()
3485 /* Re-allocate memory if console address changes */ in dhdpcie_bus_readconsole()
3486 if (c->buf) { in dhdpcie_bus_readconsole()
3487 MFREE(bus->dhd->osh, c->buf, c->bufsize); in dhdpcie_bus_readconsole()
3488 c->buf = NULL; in dhdpcie_bus_readconsole()
3491 c->log_addr = addr; in dhdpcie_bus_readconsole()
3494 if ((rv = dhdpcie_bus_membytes(bus, FALSE, addr, (uint8 *)&c->log, sizeof(c->log))) < 0) in dhdpcie_bus_readconsole()
3498 if (c->buf == NULL) { in dhdpcie_bus_readconsole()
3499 c->bufsize = ltoh32(c->log.buf_size); in dhdpcie_bus_readconsole()
3500 if ((c->buf = MALLOC(bus->dhd->osh, c->bufsize)) == NULL) in dhdpcie_bus_readconsole()
3502 DHD_ERROR(("conlog: bufsize=0x%x\n", c->bufsize)); in dhdpcie_bus_readconsole()
3504 idx = ltoh32(c->log.idx); in dhdpcie_bus_readconsole()
3507 if (idx > c->bufsize) in dhdpcie_bus_readconsole()
3511 if (idx == c->last) in dhdpcie_bus_readconsole()
3514 DHD_ERROR(("conlog: addr=0x%x, idx=0x%x, last=0x%x \n", c->log.buf, in dhdpcie_bus_readconsole()
3515 idx, c->last)); in dhdpcie_bus_readconsole()
3519 * important to handle wrap-around. in dhdpcie_bus_readconsole()
3521 addr = ltoh32(c->log.buf); in dhdpcie_bus_readconsole()
3523 /* wrap around case - write ptr < read ptr */ in dhdpcie_bus_readconsole()
3524 if (idx < c->last) { in dhdpcie_bus_readconsole()
3526 readlen = c->bufsize - c->last; in dhdpcie_bus_readconsole()
3528 addr + c->last, c->buf, readlen)) < 0) { in dhdpcie_bus_readconsole()
3534 addr, c->buf + readlen, in dhdpcie_bus_readconsole()
3541 /* non-wraparound case, write ptr > read ptr */ in dhdpcie_bus_readconsole()
3542 readlen = (uint)idx - c->last; in dhdpcie_bus_readconsole()
3544 addr + c->last, c->buf, readlen)) < 0) { in dhdpcie_bus_readconsole()
3550 c->last = idx; in dhdpcie_bus_readconsole()
3554 for (n = 0; n < CONSOLE_LINE_MAX - 2 && i < readlen; n++) { in dhdpcie_bus_readconsole()
3555 ch = c->buf[i]; in dhdpcie_bus_readconsole()
3563 if (line[n - 1] == '\r') in dhdpcie_bus_readconsole()
3564 n--; in dhdpcie_bus_readconsole()
3586 if (bus->is_linkdown) { in dhd_bus_dump_console_buffer()
3591 addr = bus->pcie_sh->console_addr + OFFSETOF(hnd_cons_t, log); in dhd_bus_dump_console_buffer()
3597 addr = bus->pcie_sh->console_addr + OFFSETOF(hnd_cons_t, log.buf_size); in dhd_bus_dump_console_buffer()
3603 addr = bus->pcie_sh->console_addr + OFFSETOF(hnd_cons_t, log.idx); in dhd_bus_dump_console_buffer()
3614 !(console_buffer = MALLOC(bus->dhd->osh, console_size))) { in dhd_bus_dump_console_buffer()
3624 for (n = 0; n < CONSOLE_LINE_MAX - 2; n++) { in dhd_bus_dump_console_buffer()
3632 if (line[n - 1] == '\r') in dhd_bus_dump_console_buffer()
3633 n--; in dhd_bus_dump_console_buffer()
3646 MFREE(bus->dhd->osh, console_buffer, console_size); in dhd_bus_dump_console_buffer()
3651 * Opens the file given by bus->fw_path, reads part of the file into a buffer and closes the file.
3663 pciedev_shared_t *local_pciedev_shared = bus->pcie_sh; in dhdpcie_checkdied()
3680 mbuffer = data = MALLOC(bus->dhd->osh, msize); in dhdpcie_checkdied()
3689 if ((str = MALLOC(bus->dhd->osh, maxstrlen)) == NULL) { in dhdpcie_checkdied()
3694 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_checkdied()
3695 DHD_BUS_BUSY_SET_IN_CHECKDIED(bus->dhd); in dhdpcie_checkdied()
3696 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_checkdied()
3698 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_checkdied()
3708 local_pciedev_shared->msgtrace_addr, local_pciedev_shared->console_addr); in dhdpcie_checkdied()
3710 if ((local_pciedev_shared->flags & PCIE_SHARED_ASSERT_BUILT) == 0) { in dhdpcie_checkdied()
3711 /* NOTE: Misspelled assert is intentional - DO NOT FIX. in dhdpcie_checkdied()
3717 if ((bus->pcie_sh->flags & (PCIE_SHARED_ASSERT|PCIE_SHARED_TRAP)) == 0) { in dhdpcie_checkdied()
3718 /* NOTE: Misspelled assert is intentional - DO NOT FIX. in dhdpcie_checkdied()
3722 (bus->pcie_sh->flags & PCIE_SHARED_ASSERT_BUILT) in dhdpcie_checkdied()
3725 if (bus->pcie_sh->flags & PCIE_SHARED_ASSERT) { in dhdpcie_checkdied()
3728 if (bus->pcie_sh->assert_exp_addr != 0) { in dhdpcie_checkdied()
3731 bus->pcie_sh->assert_exp_addr, in dhdpcie_checkdied()
3736 str[maxstrlen - 1] = '\0'; in dhdpcie_checkdied()
3740 if (bus->pcie_sh->assert_file_addr != 0) { in dhdpcie_checkdied()
3743 bus->pcie_sh->assert_file_addr, in dhdpcie_checkdied()
3748 str[maxstrlen - 1] = '\0'; in dhdpcie_checkdied()
3752 bcm_bprintf(&strbuf, " line %d ", bus->pcie_sh->assert_line); in dhdpcie_checkdied()
3755 if (bus->pcie_sh->flags & PCIE_SHARED_TRAP) { in dhdpcie_checkdied()
3756 trap_t *tr = &bus->dhd->last_trap_info; in dhdpcie_checkdied()
3759 bus->pcie_sh->trap_addr, (uint8*)tr, sizeof(trap_t))) < 0) { in dhdpcie_checkdied()
3760 bus->dhd->dongle_trap_occured = TRUE; in dhdpcie_checkdied()
3767 if (bus->pcie_sh->flags & (PCIE_SHARED_ASSERT | PCIE_SHARED_TRAP)) { in dhdpcie_checkdied()
3771 dhd_prot_debug_info_print(bus->dhd); in dhdpcie_checkdied()
3775 if (bus->dhd->memdump_enabled) { in dhdpcie_checkdied()
3777 bus->dhd->collect_sssr = TRUE; in dhdpcie_checkdied()
3779 bus->dhd->memdump_type = DUMP_TYPE_DONGLE_TRAP; in dhdpcie_checkdied()
3788 bus->dhd->dongle_trap_occured = TRUE; in dhdpcie_checkdied()
3792 dhd_wakeup_ioctl_event(bus->dhd, IOCTL_RETURN_ON_TRAP); in dhdpcie_checkdied()
3795 copy_hang_info_trap(bus->dhd); in dhdpcie_checkdied()
3797 dhd_schedule_reset(bus->dhd); in dhdpcie_checkdied()
3802 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_checkdied()
3806 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_checkdied()
3807 DHD_BUS_BUSY_CLEAR_IN_CHECKDIED(bus->dhd); in dhdpcie_checkdied()
3808 dhd_os_busbusy_wake(bus->dhd); in dhdpcie_checkdied()
3809 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_checkdied()
3812 MFREE(bus->dhd->osh, mbuffer, msize); in dhdpcie_checkdied()
3814 MFREE(bus->dhd->osh, str, maxstrlen); in dhdpcie_checkdied()
3832 start = bus->dongle_ram_base; in dhdpcie_mem_dump_bugcheck()
3845 size = bus->ramsize; in dhdpcie_mem_dump_bugcheck()
3855 size -= read_size; in dhdpcie_mem_dump_bugcheck()
3859 bus->dhd->soc_ram = buf; in dhdpcie_mem_dump_bugcheck()
3860 bus->dhd->soc_ram_length = bus->ramsize; in dhdpcie_mem_dump_bugcheck()
3879 if (!bus->dhd) { in dhdpcie_get_mem_dump()
3884 size = bus->ramsize; /* Full mem size */ in dhdpcie_get_mem_dump()
3885 start = bus->dongle_ram_base; /* Start address */ in dhdpcie_get_mem_dump()
3888 p_buf = dhd_get_fwdump_buf(bus->dhd, size); in dhdpcie_get_mem_dump()
3904 bus->dhd->memdump_success = FALSE; in dhdpcie_get_mem_dump()
3911 size -= read_size; in dhdpcie_get_mem_dump()
3930 if (bus->is_linkdown) { in dhdpcie_mem_dump()
3933 ASSERT(bus->dhd->memdump_enabled != DUMP_MEMFILE_BUGON); in dhdpcie_mem_dump()
3938 dhdp = bus->dhd; in dhdpcie_mem_dump()
3961 dhd_schedule_memdump(dhdp, dhdp->soc_ram, dhdp->soc_ram_length); in dhdpcie_mem_dump()
3980 return dhdpcie_get_mem_dump(dhdp->bus); in dhd_bus_get_mem_dump()
3986 dhd_bus_t *bus = dhdp->bus; in dhd_bus_mem_dump()
3989 if (dhdp->busstate == DHD_BUS_DOWN) { in dhd_bus_mem_dump()
4002 __FUNCTION__, dhdp->busstate, dhdp->dhd_bus_busy_state)); in dhd_bus_mem_dump()
4017 DHD_OS_WAKE_LOCK(bus->dhd); in dhd_socram_dump()
4018 dhd_bus_mem_dump(bus->dhd); in dhd_socram_dump()
4019 DHD_OS_WAKE_UNLOCK(bus->dhd); in dhd_socram_dump()
4022 return -1; in dhd_socram_dump()
4037 if (write && bus->is_linkdown) { in dhdpcie_bus_membytes()
4042 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_bus_membytes()
4078 if ((size -= dsize)) { in dhdpcie_bus_membytes()
4102 if ((size -= dsize) > 0) { in dhdpcie_bus_membytes()
4108 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_bus_membytes()
4124 dhd_pub_t *dhdp = bus->dhd; in dhd_bus_schedule_queue()
4129 if (flow_id >= bus->max_submission_rings) { in dhd_bus_schedule_queue()
4131 flow_id, bus->max_submission_rings)); in dhd_bus_schedule_queue()
4135 flow_ring_node = DHD_FLOW_RING(bus->dhd, flow_id); in dhd_bus_schedule_queue()
4137 if (flow_ring_node->prot_info == NULL) { in dhd_bus_schedule_queue()
4143 if ((dhdp->dequeue_prec_map & (1 << flow_ring_node->flow_info.tid)) == 0) { in dhd_bus_schedule_queue()
4145 __FUNCTION__, flow_ring_node->flow_info.tid)); in dhd_bus_schedule_queue()
4159 queue = &flow_ring_node->queue; /* queue associated with flow ring */ in dhd_bus_schedule_queue()
4161 DHD_FLOWRING_LOCK(flow_ring_node->lock, flags); in dhd_bus_schedule_queue()
4163 if (flow_ring_node->status != FLOW_RING_STATUS_OPEN) { in dhd_bus_schedule_queue()
4164 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_schedule_queue()
4168 while ((txp = dhd_flow_queue_dequeue(bus->dhd, queue)) != NULL) { in dhd_bus_schedule_queue()
4180 if (bus->dhd->tcpack_sup_mode != TCPACK_SUP_HOLD) { in dhd_bus_schedule_queue()
4181 ret = dhd_tcpack_check_xmit(bus->dhd, txp); in dhd_bus_schedule_queue()
4191 if (eh->ether_type == hton16(ETHER_TYPE_802_1X)) { in dhd_bus_schedule_queue()
4195 PKTSETPRIO(txp, dhdp->prio_8021x); in dhd_bus_schedule_queue()
4200 ret = dhd_prot_txdata(bus->dhd, txp, flow_ring_node->flow_info.ifindex); in dhd_bus_schedule_queue()
4203 dhd_prot_txdata_write_flush(bus->dhd, flow_id); in dhd_bus_schedule_queue()
4205 dhd_flow_queue_reinsert(bus->dhd, queue, txp); in dhd_bus_schedule_queue()
4206 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_schedule_queue()
4214 if (!flow_ring_node->hp2p_ring) { in dhd_bus_schedule_queue()
4215 dhd_prot_txdata_write_flush(bus->dhd, flow_id); in dhd_bus_schedule_queue()
4218 dhd_prot_txdata_write_flush(bus->dhd, flow_id); in dhd_bus_schedule_queue()
4220 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_schedule_queue()
4240 if (!bus->dhd->flowid_allocator) { in dhd_bus_txdata()
4247 flow_ring_node = DHD_FLOW_RING(bus->dhd, flowid); in dhd_bus_txdata()
4250 __FUNCTION__, flowid, flow_ring_node->status, flow_ring_node->active)); in dhd_bus_txdata()
4252 DHD_FLOWRING_LOCK(flow_ring_node->lock, flags); in dhd_bus_txdata()
4253 if ((flowid >= bus->dhd->num_flow_rings) || in dhd_bus_txdata()
4255 (!flow_ring_node->active)) in dhd_bus_txdata()
4257 (!flow_ring_node->active) || in dhd_bus_txdata()
4258 (flow_ring_node->status == FLOW_RING_STATUS_DELETE_PENDING) || in dhd_bus_txdata()
4259 (flow_ring_node->status == FLOW_RING_STATUS_STA_FREEING)) in dhd_bus_txdata()
4262 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_txdata()
4264 __FUNCTION__, flowid, flow_ring_node->status, in dhd_bus_txdata()
4265 flow_ring_node->active)); in dhd_bus_txdata()
4271 node_status = flow_ring_node->status; in dhd_bus_txdata()
4278 if (bus->enable_idle_flowring_mgmt) { in dhd_bus_txdata()
4304 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_txdata()
4310 queue = &flow_ring_node->queue; /* queue associated with flow ring */ in dhd_bus_txdata()
4312 if ((ret = dhd_flow_queue_enqueue(bus->dhd, queue, txp)) != BCME_OK) in dhd_bus_txdata()
4315 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_txdata()
4317 if (flow_ring_node->status) { in dhd_bus_txdata()
4319 __FUNCTION__, flowid, flow_ring_node->status, in dhd_bus_txdata()
4320 flow_ring_node->active)); in dhd_bus_txdata()
4331 DHD_FLOWRING_LOCK(flow_ring_node->lock, flags); in dhd_bus_txdata()
4333 if ((ret = dhd_flow_queue_enqueue(bus->dhd, queue, txp_pend)) != BCME_OK) { in dhd_bus_txdata()
4334 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_txdata()
4339 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_txdata()
4346 PKTCFREE(bus->dhd->osh, txp, TRUE); in dhd_bus_txdata()
4353 dhd_txflowcontrol(bus->dhd, ALL_INTERFACES, ON); in dhd_bus_stop_queue()
4363 * Note that we try to re-start network interface only in dhd_bus_start_queue()
4367 if (dhd_prot_check_tx_resource(bus->dhd)) { in dhd_bus_start_queue()
4372 dhd_txflowcontrol(bus->dhd, ALL_INTERFACES, OFF); in dhd_bus_start_queue()
4378 dhd_bus_t *bus = dhd->bus; in dhd_bus_console_in()
4382 if (bus->console_addr == 0) in dhd_bus_console_in()
4386 if (bus->dhd->dongle_reset) { in dhd_bus_console_in()
4391 addr = bus->console_addr + OFFSETOF(hnd_cons_t, cbuf_idx); in dhd_bus_console_in()
4397 addr = bus->console_addr + OFFSETOF(hnd_cons_t, cbuf); in dhd_bus_console_in()
4402 addr = bus->console_addr + OFFSETOF(hnd_cons_t, vcons_in); in dhd_bus_console_in()
4420 dhd_rx_frame(bus->dhd, ifidx, pkt, pkt_count, 0); in dhd_bus_rx_frame()
4433 if (bus->is_linkdown) { in dhdpcie_bus_wtcm8()
4445 if (bus->is_linkdown) { in dhdpcie_bus_rtcm8()
4447 data = (uint8)-1; in dhdpcie_bus_rtcm8()
4457 if (bus->is_linkdown) { in dhdpcie_bus_wtcm32()
4467 if (bus->is_linkdown) { in dhdpcie_bus_wtcm16()
4478 if (bus->is_linkdown) { in dhdpcie_bus_wtcm64()
4491 if (bus->is_linkdown) { in dhdpcie_bus_rtcm16()
4493 data = (uint16)-1; in dhdpcie_bus_rtcm16()
4504 if (bus->is_linkdown) { in dhdpcie_bus_rtcm32()
4506 data = (uint32)-1; in dhdpcie_bus_rtcm32()
4518 if (bus->is_linkdown) { in dhdpcie_bus_rtcm64()
4520 data = (uint64)-1; in dhdpcie_bus_rtcm64()
4537 if (bus->is_linkdown) { in dhd_bus_cmn_writeshared()
4542 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_cmn_writeshared()
4628 addr = bus->ring_sh[ringid].ring_state_w; in dhd_bus_cmn_writeshared()
4633 addr = bus->ring_sh[ringid].ring_state_r; in dhd_bus_cmn_writeshared()
4638 addr = bus->d2h_mb_data_ptr_addr; in dhd_bus_cmn_writeshared()
4643 addr = bus->h2d_mb_data_ptr_addr; in dhd_bus_cmn_writeshared()
4673 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_cmn_writeshared()
4684 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_cmn_readshared()
4689 addr = bus->ring_sh[ringid].ring_state_w; in dhd_bus_cmn_readshared()
4694 addr = bus->ring_sh[ringid].ring_state_r; in dhd_bus_cmn_readshared()
4704 addr = bus->h2d_mb_data_ptr_addr; in dhd_bus_cmn_readshared()
4709 addr = bus->d2h_mb_data_ptr_addr; in dhd_bus_cmn_readshared()
4726 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_cmn_readshared()
4733 return ((pciedev_shared_t*)bus->pcie_sh)->flags; in dhd_bus_get_sharedflags()
4750 dhd_bus_t *bus = dhdp->bus; in dhd_bus_iovar_op()
4781 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_iovar_op()
4782 if (vi->flags & DHD_IOVF_PWRREQ_BYPASS) { in dhd_bus_iovar_op()
4797 if (vi->type == IOVT_VOID) in dhd_bus_iovar_op()
4799 else if (vi->type == IOVT_BUFFER) in dhd_bus_iovar_op()
4805 actionid = set ? IOV_SVAL(vi->varid) : IOV_GVAL(vi->varid); in dhd_bus_iovar_op()
4810 * this includes dongle re-attach which initialize pwr_req_ref count to 0 and in dhd_bus_iovar_op()
4811 * causes pwr_req_ref count miss-match in pwr req clear function and hang. in dhd_bus_iovar_op()
4817 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_iovar_op()
4818 if (vi->flags & DHD_IOVF_PWRREQ_BYPASS) { in dhd_bus_iovar_op()
4847 delta[ctr] = curr[ctr] + (~0U - prev[ctr]); in dhd_buzzz_dump_cntrs()
4849 delta[ctr] = (curr[ctr] - prev[ctr]); in dhd_buzzz_dump_cntrs()
4881 delta = curr + (~0U - prev); in dhd_bcm_buzzz_dump_cntrs6()
4883 delta = (curr - prev); in dhd_bcm_buzzz_dump_cntrs6()
4896 delta.u8[i] = curr.u8[i] + (max8 - prev.u8[i]); in dhd_bcm_buzzz_dump_cntrs6()
4898 delta.u8[i] = (curr.u8[i] - prev.u8[i]); in dhd_bcm_buzzz_dump_cntrs6()
4909 delta = curr + (max8 - prev); in dhd_bcm_buzzz_dump_cntrs6()
4911 delta = (curr - prev); in dhd_bcm_buzzz_dump_cntrs6()
4916 instrcnt = cyccnt - (cm3_cnts.u8[0] + cm3_cnts.u8[1] + cm3_cnts.u8[2] in dhd_bcm_buzzz_dump_cntrs6()
4932 if (buzzz->counters == 6) { in dhd_buzzz_dump_log()
4936 bytes += dhd_buzzz_dump_cntrs(p, core, log, buzzz->counters); in dhd_buzzz_dump_log()
4937 log += buzzz->counters; /* (N x 32bit) CR4=3, CA7=4 */ in dhd_buzzz_dump_log()
4995 log_sz = buzzz_p->log_sz; in dhd_buzzz_dump()
4997 part1 = ((uint32)buzzz_p->cur - (uint32)buzzz_p->log) / log_sz; in dhd_buzzz_dump()
4999 if (buzzz_p->wrap == TRUE) { in dhd_buzzz_dump()
5000 part2 = ((uint32)buzzz_p->end - (uint32)buzzz_p->cur) / log_sz; in dhd_buzzz_dump()
5001 total = (buzzz_p->buffer_sz - BCM_BUZZZ_LOGENTRY_MAXSZ) / log_sz; in dhd_buzzz_dump()
5004 total = buzzz_p->count; in dhd_buzzz_dump()
5016 log = (void*)((size_t)buffer_p + (buzzz_p->cur - buzzz_p->log)); in dhd_buzzz_dump()
5017 while (part2--) { /* from cur to end : part2 */ in dhd_buzzz_dump()
5021 log = (void*)((size_t)log + buzzz_p->log_sz); in dhd_buzzz_dump()
5026 while (part1--) { in dhd_buzzz_dump()
5030 log = (void*)((size_t)log + buzzz_p->log_sz); in dhd_buzzz_dump()
5044 if (bus->dhd->busstate != DHD_BUS_DATA) { in dhd_buzzz_dump_dngl()
5047 if ((page_p = (char *)MALLOC(bus->dhd->osh, 4096)) == NULL) { in dhd_buzzz_dump_dngl()
5051 if ((buzzz_p = MALLOC(bus->dhd->osh, sizeof(bcm_buzzz_t))) == NULL) { in dhd_buzzz_dump_dngl()
5062 sh = bus->pcie_sh; in dhd_buzzz_dump_dngl()
5064 DHD_INFO(("%s buzzz:%08x\n", __FUNCTION__, sh->buzz_dbg_ptr)); in dhd_buzzz_dump_dngl()
5066 if (sh->buzz_dbg_ptr != 0U) { /* Fetch and display dongle BUZZZ Trace */ in dhd_buzzz_dump_dngl()
5068 dhdpcie_bus_membytes(bus, FALSE, (ulong)sh->buzz_dbg_ptr, in dhd_buzzz_dump_dngl()
5074 (int)sh->buzz_dbg_ptr, in dhd_buzzz_dump_dngl()
5075 (int)buzzz_p->log, (int)buzzz_p->cur, (int)buzzz_p->end, in dhd_buzzz_dump_dngl()
5076 buzzz_p->count, buzzz_p->status, buzzz_p->wrap, in dhd_buzzz_dump_dngl()
5077 buzzz_p->cpu_idcode, buzzz_p->counters, buzzz_p->group, in dhd_buzzz_dump_dngl()
5078 buzzz_p->buffer_sz, buzzz_p->log_sz); in dhd_buzzz_dump_dngl()
5080 if (buzzz_p->count == 0) { in dhd_buzzz_dump_dngl()
5086 buffer_p = MALLOC(bus->dhd->osh, buzzz_p->buffer_sz); in dhd_buzzz_dump_dngl()
5093 dhdpcie_bus_membytes(bus, FALSE, (uint32)buzzz_p->log, /* Trace */ in dhd_buzzz_dump_dngl()
5094 (uint8 *)buffer_p, buzzz_p->buffer_sz); in dhd_buzzz_dump_dngl()
5100 for (ctr = 0; ctr < buzzz_p->counters; ctr++) { in dhd_buzzz_dump_dngl()
5101 printf("<Evt[%02X]> ", buzzz_p->eventid[ctr]); in dhd_buzzz_dump_dngl()
5108 printf("----- End of dongle BCM BUZZZ Trace -----\n\n"); in dhd_buzzz_dump_dngl()
5110 MFREE(bus->dhd->osh, buffer_p, buzzz_p->buffer_sz); buffer_p = NULL; in dhd_buzzz_dump_dngl()
5115 if (page_p) MFREE(bus->dhd->osh, page_p, 4096); in dhd_buzzz_dump_dngl()
5116 if (buzzz_p) MFREE(bus->dhd->osh, buzzz_p, sizeof(bcm_buzzz_t)); in dhd_buzzz_dump_dngl()
5117 if (buffer_p) MFREE(bus->dhd->osh, buffer_p, buzzz_p->buffer_sz); in dhd_buzzz_dump_dngl()
5123 #define PCIE_GEN2(sih) ((BUSTYPE((sih)->bustype) == PCI_BUS) && \
5124 ((sih)->buscoretype == PCIE2_CORE_ID))
5135 si_corereg(bus->sih, bus->sih->buscoreidx, PCIE2_MDIO_CONTROL, ~0, mdioctrl); in pcie2_mdiosetblock()
5136 si_corereg(bus->sih, bus->sih->buscoreidx, PCIE2_MDIO_WR_DATA, ~0, mdiodata); in pcie2_mdiosetblock()
5141 uint mdioctrl_read = si_corereg(bus->sih, bus->sih->buscoreidx, PCIE2_MDIO_WR_DATA, in pcie2_mdiosetblock()
5171 * Increase SSReset de-assert time to 8ms.
5172 * since it takes longer time if re-scan time on 4378B0.
5180 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5185 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5188 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5190 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5194 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5197 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5200 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5202 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5206 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5209 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5211 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5215 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5218 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5220 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_enum_reg_init()
5233 if (PCIE_ENUM_RESET_WAR_ENAB(bus->sih->buscorerev)) { in dhd_bus_perform_flr()
5234 if (bus->pcie_mailbox_mask != 0) { in dhd_bus_perform_flr()
5242 val = OSL_PCI_READ_CONFIG(bus->osh, PCIE_CFG_DEVICE_CAPABILITY, sizeof(val)); in dhd_bus_perform_flr()
5251 else if ((bus->sih->chip == CYW55560_CHIP_ID) || (bus->sih->chip == BCM4375_CHIP_ID)) { in dhd_bus_perform_flr()
5263 val = OSL_PCI_READ_CONFIG(bus->osh, PCIE_CFG_DEVICE_CONTROL, sizeof(val)); in dhd_bus_perform_flr()
5267 OSL_PCI_WRITE_CONFIG(bus->osh, PCIE_CFG_DEVICE_CONTROL, sizeof(val), val); in dhd_bus_perform_flr()
5276 val = OSL_PCI_READ_CONFIG(bus->osh, PCIE_CFG_SUBSYSTEM_CONTROL, sizeof(val)); in dhd_bus_perform_flr()
5282 OSL_PCI_WRITE_CONFIG(bus->osh, PCIE_CFG_SUBSYSTEM_CONTROL, sizeof(val), val); in dhd_bus_perform_flr()
5284 val = OSL_PCI_READ_CONFIG(bus->osh, PCIE_CFG_SUBSYSTEM_CONTROL, sizeof(val)); in dhd_bus_perform_flr()
5292 val = OSL_PCI_READ_CONFIG(bus->osh, PCIE_CFG_DEVICE_CONTROL, sizeof(val)); in dhd_bus_perform_flr()
5296 OSL_PCI_WRITE_CONFIG(bus->osh, PCIE_CFG_DEVICE_CONTROL, sizeof(val), val); in dhd_bus_perform_flr()
5302 val = OSL_PCI_READ_CONFIG(bus->osh, PCIE_CFG_SUBSYSTEM_CONTROL, sizeof(val)); in dhd_bus_perform_flr()
5314 bus->flr_force_fail = FALSE; in dhd_bus_perform_flr()
5342 uint dar_clk_ctrl_status_reg = DAR_CLK_CTRL(bus->sih->buscorerev); in dhd_bus_perform_bp_reset()
5349 DHD_INFO(("Disable ASPM: Clear bits(1-0) of PCIECFGREG_LINK_STATUS_CTRL(0x%x)\n", in dhd_bus_perform_bp_reset()
5351 val = OSL_PCI_READ_CONFIG(bus->osh, PCIECFGREG_LINK_STATUS_CTRL, sizeof(val)); in dhd_bus_perform_bp_reset()
5355 OSL_PCI_WRITE_CONFIG(bus->osh, PCIECFGREG_LINK_STATUS_CTRL, sizeof(val), val); in dhd_bus_perform_bp_reset()
5364 val = OSL_PCI_READ_CONFIG(bus->osh, PCIECFGREG_SPROM_CTRL, sizeof(val)); in dhd_bus_perform_bp_reset()
5368 OSL_PCI_WRITE_CONFIG(bus->osh, PCIECFGREG_SPROM_CTRL, sizeof(val), val); in dhd_bus_perform_bp_reset()
5379 val = OSL_PCI_READ_CONFIG(bus->osh, PCIECFGREG_SPROM_CTRL, sizeof(val)); in dhd_bus_perform_bp_reset()
5397 val = si_corereg(bus->sih, bus->sih->buscoreidx, in dhd_bus_perform_bp_reset()
5415 val = OSL_PCI_READ_CONFIG(bus->osh, PCIECFGREG_LINK_STATUS_CTRL, sizeof(val)); in dhd_bus_perform_bp_reset()
5419 OSL_PCI_WRITE_CONFIG(bus->osh, PCIECFGREG_LINK_STATUS_CTRL, sizeof(val), val); in dhd_bus_perform_bp_reset()
5430 dhd_bus_t *bus = dhdp->bus; in dhd_bus_devreset()
5442 bus->dhd->up = FALSE; in dhd_bus_devreset()
5444 /* wait for other contexts to finish -- if required a call in dhd_bus_devreset()
5448 dhdpcie_advertise_bus_cleanup(bus->dhd); in dhd_bus_devreset()
5450 if (bus->dhd->busstate != DHD_BUS_DOWN) { in dhd_bus_devreset()
5452 atomic_set(&bus->dhd->block_bus, TRUE); in dhd_bus_devreset()
5453 dhd_flush_rx_tx_wq(bus->dhd); in dhd_bus_devreset()
5458 dhd_bus_oob_intr_set(bus->dhd, FALSE); in dhd_bus_devreset()
5459 dhd_bus_oob_intr_unregister(bus->dhd); in dhd_bus_devreset()
5463 if (bus->intr) { in dhd_bus_devreset()
5464 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhd_bus_devreset()
5466 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhd_bus_devreset()
5478 atomic_set(&bus->dhd->block_bus, FALSE); in dhd_bus_devreset()
5485 * DMA-unmapped memory access from the devices which use the in dhd_bus_devreset()
5487 * possible that dongle can access to DMA-unmapped memory after in dhd_bus_devreset()
5500 atomic_set(&bus->dhd->block_bus, FALSE); in dhd_bus_devreset()
5505 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_devreset()
5507 bus->dhd->busstate = DHD_BUS_DOWN; in dhd_bus_devreset()
5508 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_devreset()
5510 atomic_set(&bus->dhd->block_bus, FALSE); in dhd_bus_devreset()
5513 if (bus->intr) { in dhd_bus_devreset()
5518 dhd_bus_oob_intr_set(bus->dhd, FALSE); in dhd_bus_devreset()
5519 dhd_bus_oob_intr_unregister(bus->dhd); in dhd_bus_devreset()
5521 dhd_dpc_kill(bus->dhd); in dhd_bus_devreset()
5522 if (!bus->no_bus_init) { in dhd_bus_devreset()
5535 * to attempt to DMA-unmapped memory access from the devices in dhd_bus_devreset()
5538 * that dongle can access to DMA-unmapped memory after calling in dhd_bus_devreset()
5546 bus->no_bus_init = FALSE; in dhd_bus_devreset()
5558 bus->dhd->dongle_reset = TRUE; in dhd_bus_devreset()
5562 if (bus->dhd->busstate == DHD_BUS_DOWN) { in dhd_bus_devreset()
5566 while (--retry) { in dhd_bus_devreset()
5586 bus->is_linkdown = 0; in dhd_bus_devreset()
5587 bus->cto_triggered = 0; in dhd_bus_devreset()
5589 bus->read_shm_fail = FALSE; in dhd_bus_devreset()
5619 bus->dhd->dongle_reset = FALSE; in dhd_bus_devreset()
5622 dhd_irq_set_affinity(bus->dhd, cpumask_of(1)); in dhd_bus_devreset()
5632 bus->dhd->up = TRUE; in dhd_bus_devreset()
5634 if (bus->dhd->dhd_watchdog_ms_backup) { in dhd_bus_devreset()
5637 dhd_os_wd_timer(bus->dhd, bus->dhd->dhd_watchdog_ms_backup); in dhd_bus_devreset()
5648 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_devreset()
5650 bus->dhd->busstate = DHD_BUS_DOWN; in dhd_bus_devreset()
5651 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_devreset()
5676 si_corereg(bus->sih, bus->sih->buscoreidx, PCIE2_MDIO_CONTROL, ~0, mdio_ctrl); in pcie2_mdioop()
5680 si_corereg(bus->sih, bus->sih->buscoreidx, PCIE2_MDIO_WR_DATA, ~0, in pcie2_mdioop()
5687 uint done_val = si_corereg(bus->sih, bus->sih->buscoreidx, reg32, 0, 0); in pcie2_mdioop()
5690 *val = si_corereg(bus->sih, bus->sih->buscoreidx, in pcie2_mdioop()
5699 return -1; in pcie2_mdioop()
5703 /* si_backplane_access() manages a shared resource - BAR0 mapping, hence its
5713 * spinlock-based.
5724 DHD_BACKPLANE_ACCESS_LOCK(bus->backplane_access_lock, flags); in serialized_backplane_access()
5725 ret = si_backplane_access(bus->sih, addr, size, val, read); in serialized_backplane_access()
5726 DHD_BACKPLANE_ACCESS_UNLOCK(bus->backplane_access_lock, flags); in serialized_backplane_access()
5735 d2h_support = dhd->dma_d2h_ring_upd_support ? 1 : 0; in dhdpcie_get_dma_ring_indices()
5736 h2d_support = dhd->dma_h2d_ring_upd_support ? 1 : 0; in dhdpcie_get_dma_ring_indices()
5745 if (dhd->busstate == DHD_BUS_DOWN) { in dhdpcie_set_dma_ring_indices()
5750 dhd->dma_d2h_ring_upd_support = (int_val & 1) ? TRUE : FALSE; in dhdpcie_set_dma_ring_indices()
5751 dhd->dma_h2d_ring_upd_support = (int_val & 2) ? TRUE : FALSE; in dhdpcie_set_dma_ring_indices()
5752 dhd->dma_ring_upd_overwrite = TRUE; in dhdpcie_set_dma_ring_indices()
5800 if (bus->dhd->dongle_reset && !(actionid == IOV_SVAL(IOV_DEVRESET) || in dhdpcie_bus_doiovar()
5813 si_corereg(bus->sih, bus->sih->buscoreidx, OFFSETOF(sbpcieregs_t, configaddr), ~0, in dhdpcie_bus_doiovar()
5815 si_corereg(bus->sih, bus->sih->buscoreidx, OFFSETOF(sbpcieregs_t, configdata), ~0, in dhdpcie_bus_doiovar()
5820 si_corereg(bus->sih, bus->sih->buscoreidx, OFFSETOF(sbpcieregs_t, configaddr), ~0, in dhdpcie_bus_doiovar()
5822 int_val = si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_bus_doiovar()
5828 si_corereg(bus->sih, bus->sih->buscoreidx, int_val, ~0, int_val2); in dhdpcie_bus_doiovar()
5874 addr = sdreg.offset | SI_ENUM_BASE(bus->sih); in dhdpcie_bus_doiovar()
5894 addr = sdreg.offset | SI_ENUM_BASE(bus->sih); in dhdpcie_bus_doiovar()
5907 if (!PCIE_GEN2(bus->sih)) { in dhdpcie_bus_doiovar()
5923 if (!PCIE_GEN2(bus->sih)) { in dhdpcie_bus_doiovar()
5934 int_val = si_corereg(bus->sih, bus->sih->buscoreidx, int_val, 0, 0); in dhdpcie_bus_doiovar()
5939 OSL_PCI_WRITE_CONFIG(bus->osh, int_val, 4, int_val2); in dhdpcie_bus_doiovar()
5943 int_val = OSL_PCI_READ_CONFIG(bus->osh, int_val, 4); in dhdpcie_bus_doiovar()
5956 if (dmaxfer->version != DHD_DMAXFER_VERSION) in dhdpcie_bus_doiovar()
5958 if (dmaxfer->length != sizeof(dma_xfer_info_t)) { in dhdpcie_bus_doiovar()
5962 bcmerror = dhdpcie_bus_dmaxfer_req(bus, dmaxfer->num_bytes, in dhdpcie_bus_doiovar()
5963 dmaxfer->src_delay, dmaxfer->dest_delay, in dhdpcie_bus_doiovar()
5964 dmaxfer->type, dmaxfer->core_num, in dhdpcie_bus_doiovar()
5965 dmaxfer->should_wait); in dhdpcie_bus_doiovar()
5967 if (dmaxfer->should_wait && bcmerror >= 0) { in dhdpcie_bus_doiovar()
5968 bcmerror = dhdmsgbuf_dmaxfer_status(bus->dhd, dmaxfer); in dhdpcie_bus_doiovar()
5977 if (dmaxfer->version != DHD_DMAXFER_VERSION) in dhdpcie_bus_doiovar()
5979 if (dmaxfer->length != sizeof(dma_xfer_info_t)) { in dhdpcie_bus_doiovar()
5982 bcmerror = dhdmsgbuf_dmaxfer_status(bus->dhd, dmaxfer); in dhdpcie_bus_doiovar()
5987 int_val = (bus->dhd->busstate == DHD_BUS_SUSPEND) ? 1 : 0; in dhdpcie_bus_doiovar()
6000 ret = dhd_os_busbusy_wait_condition(bus->dhd, in dhdpcie_bus_doiovar()
6001 &bus->dhd->dhd_bus_busy_state, DHD_BUS_BUSY_IN_DHD_IOVAR); in dhdpcie_bus_doiovar()
6004 __FUNCTION__, bus->dhd->dhd_bus_busy_state)); in dhdpcie_bus_doiovar()
6008 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_doiovar()
6009 DHD_BUS_BUSY_SET_SUSPEND_IN_PROGRESS(bus->dhd); in dhdpcie_bus_doiovar()
6010 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_doiovar()
6017 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_doiovar()
6018 DHD_BUS_BUSY_CLEAR_SUSPEND_IN_PROGRESS(bus->dhd); in dhdpcie_bus_doiovar()
6019 dhd_os_busbusy_wake(bus->dhd); in dhdpcie_bus_doiovar()
6020 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_doiovar()
6023 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_doiovar()
6024 DHD_BUS_BUSY_SET_RESUME_IN_PROGRESS(bus->dhd); in dhdpcie_bus_doiovar()
6025 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_doiovar()
6029 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_doiovar()
6030 DHD_BUS_BUSY_CLEAR_RESUME_IN_PROGRESS(bus->dhd); in dhdpcie_bus_doiovar()
6031 dhd_os_busbusy_wake(bus->dhd); in dhdpcie_bus_doiovar()
6032 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_doiovar()
6037 int_val = (int32)bus->ramsize; in dhdpcie_bus_doiovar()
6057 dsize = set ? plen - (2 * sizeof(int)) : len; in dhdpcie_bus_doiovar()
6069 if (si_setcore(bus->sih, ARMCR4_CORE_ID, 0) || in dhdpcie_bus_doiovar()
6070 si_setcore(bus->sih, SYSMEM_CORE_ID, 0)) { in dhdpcie_bus_doiovar()
6072 if (set && address == bus->dongle_ram_base) { in dhdpcie_bus_doiovar()
6073 bus->resetinstr = *(((uint32*)params) + 2); in dhdpcie_bus_doiovar()
6077 if ((bus->orig_ramsize) && in dhdpcie_bus_doiovar()
6078 ((address > bus->orig_ramsize) || (address + size > bus->orig_ramsize))) in dhdpcie_bus_doiovar()
6081 si_socdevram(bus->sih, FALSE, &enable, &protect, &remap); in dhdpcie_bus_doiovar()
6084 __FUNCTION__, bus->orig_ramsize, size, address)); in dhdpcie_bus_doiovar()
6092 uint32 devramsize = si_socdevram_size(bus->sih); in dhdpcie_bus_doiovar()
6103 address -= SOCDEVRAM_ARM_ADDR; in dhdpcie_bus_doiovar()
6134 uint32 *p = ddo->val; in dhdpcie_bus_doiovar()
6135 const uint max_offset = 4096 - 1; /* one core contains max 4096/4 registers */ in dhdpcie_bus_doiovar()
6144 ddo->n_bytes = 0; in dhdpcie_bus_doiovar()
6146 if (si_setcoreidx(bus->sih, ddi.index) == NULL) { in dhdpcie_bus_doiovar()
6150 ddo->address = si_addrspace(bus->sih, CORE_SLAVE_PORT_0, CORE_BASE_ADDR_0); in dhdpcie_bus_doiovar()
6151 ddo->address += ddi.offset; // BP address at which this dump starts in dhdpcie_bus_doiovar()
6153 ddo->id = si_coreid(bus->sih); in dhdpcie_bus_doiovar()
6154 ddo->rev = si_corerev(bus->sih); in dhdpcie_bus_doiovar()
6157 sizeof(dump_dongle_out_t) + ddo->n_bytes < (uint)len) { in dhdpcie_bus_doiovar()
6158 *p++ = si_corereg(bus->sih, ddi.index, ddi.offset, 0, 0); in dhdpcie_bus_doiovar()
6160 ddo->n_bytes += sizeof(uint32); in dhdpcie_bus_doiovar()
6174 strncpy(arg, bus->dhd->fw_capabilities, in dhdpcie_bus_doiovar()
6175 MIN(strlen(bus->dhd->fw_capabilities), (size_t)len)); in dhdpcie_bus_doiovar()
6176 ((char*)arg)[len - 1] = '\0'; in dhdpcie_bus_doiovar()
6183 DHD_PERIM_UNLOCK(bus->dhd); in dhdpcie_bus_doiovar()
6185 debugger_init((void *) bus, &bus_ops, int_val, SI_ENUM_BASE(bus->sih)); in dhdpcie_bus_doiovar()
6189 DHD_PERIM_LOCK(bus->dhd); in dhdpcie_bus_doiovar()
6205 int_val = (int32)bus->ramsize; in dhdpcie_bus_doiovar()
6210 bus->ramsize = int_val; in dhdpcie_bus_doiovar()
6211 bus->orig_ramsize = int_val; in dhdpcie_bus_doiovar()
6215 int_val = (int32)bus->dongle_ram_base; in dhdpcie_bus_doiovar()
6229 bool_val = bus->sleep_allowed; in dhdpcie_bus_doiovar()
6234 bus->sleep_allowed = bool_val; in dhdpcie_bus_doiovar()
6238 int_val = bus->dhd->dongle_isolation; in dhdpcie_bus_doiovar()
6243 bus->dhd->dongle_isolation = bool_val; in dhdpcie_bus_doiovar()
6247 int_val = bus->ltrsleep_on_unload; in dhdpcie_bus_doiovar()
6252 bus->ltrsleep_on_unload = bool_val; in dhdpcie_bus_doiovar()
6259 bcmerror = dhd_prot_ringupd_dump(bus->dhd, &dump_b); in dhdpcie_bus_doiovar()
6264 int_val = dhdpcie_get_dma_ring_indices(bus->dhd); in dhdpcie_bus_doiovar()
6269 bcmerror = dhdpcie_set_dma_ring_indices(bus->dhd, int_val); in dhdpcie_bus_doiovar()
6273 int_val = dhd_prot_metadata_dbg_get(bus->dhd); in dhdpcie_bus_doiovar()
6277 dhd_prot_metadata_dbg_set(bus->dhd, (int_val != 0)); in dhdpcie_bus_doiovar()
6281 int_val = dhd_prot_metadatalen_get(bus->dhd, TRUE); in dhdpcie_bus_doiovar()
6290 dhd_prot_metadatalen_set(bus->dhd, int_val, TRUE); in dhdpcie_bus_doiovar()
6294 dhd_prot_txp_threshold(bus->dhd, TRUE, int_val); in dhdpcie_bus_doiovar()
6298 int_val = dhd_prot_txp_threshold(bus->dhd, FALSE, int_val); in dhdpcie_bus_doiovar()
6304 bus->db1_for_mb = TRUE; in dhdpcie_bus_doiovar()
6306 bus->db1_for_mb = FALSE; in dhdpcie_bus_doiovar()
6310 if (bus->db1_for_mb) in dhdpcie_bus_doiovar()
6318 int_val = dhd_prot_metadatalen_get(bus->dhd, FALSE); in dhdpcie_bus_doiovar()
6327 dhd_prot_metadatalen_set(bus->dhd, int_val, FALSE); in dhdpcie_bus_doiovar()
6333 bcmerror = dhd_bus_devreset(bus->dhd, (uint8)int_val); in dhdpcie_bus_doiovar()
6336 bcmerror = dhd_bus_devreset(bus->dhd, (uint8)int_val); in dhdpcie_bus_doiovar()
6339 bcmerror = dhd_bus_perform_flr(bus, bus->flr_force_fail); in dhdpcie_bus_doiovar()
6342 bus->flr_force_fail = TRUE; in dhdpcie_bus_doiovar()
6350 if (bus->dhd->busstate == DHD_BUS_DATA) in dhdpcie_bus_doiovar()
6358 int_val = bus->dhd->flow_prio_map_type; in dhdpcie_bus_doiovar()
6363 int_val = (int32)dhd_update_flow_prio_map(bus->dhd, (uint8)int_val); in dhdpcie_bus_doiovar()
6369 if (!(bus->dhd->op_mode & DHD_FLAG_MFG_MODE)) { in dhdpcie_bus_doiovar()
6370 int_val = bus->idletime; in dhdpcie_bus_doiovar()
6381 bus->idletime = int_val; in dhdpcie_bus_doiovar()
6382 if (bus->idletime) { in dhdpcie_bus_doiovar()
6383 DHD_ENABLE_RUNTIME_PM(bus->dhd); in dhdpcie_bus_doiovar()
6385 DHD_DISABLE_RUNTIME_PM(bus->dhd); in dhdpcie_bus_doiovar()
6405 dhd_prot_init_info_rings(bus->dhd); in dhdpcie_bus_doiovar()
6409 if (bus->dhd->busstate != DHD_BUS_DOWN) { in dhdpcie_bus_doiovar()
6416 bus->dhd->h2d_phase_supported = TRUE; in dhdpcie_bus_doiovar()
6418 bus->dhd->h2d_phase_supported = FALSE; in dhdpcie_bus_doiovar()
6422 int_val = (int32) bus->dhd->h2d_phase_supported; in dhdpcie_bus_doiovar()
6427 if (bus->dhd->busstate != DHD_BUS_DOWN) { in dhdpcie_bus_doiovar()
6434 bus->dhd->force_dongletrap_on_bad_h2d_phase = TRUE; in dhdpcie_bus_doiovar()
6436 bus->dhd->force_dongletrap_on_bad_h2d_phase = FALSE; in dhdpcie_bus_doiovar()
6440 int_val = (int32) bus->dhd->force_dongletrap_on_bad_h2d_phase; in dhdpcie_bus_doiovar()
6445 if (bus->dhd->busstate != DHD_BUS_DOWN) { in dhdpcie_bus_doiovar()
6451 dhd_prot_set_h2d_max_txpost(bus->dhd, (uint16)int_val); in dhdpcie_bus_doiovar()
6455 int_val = dhd_prot_get_h2d_max_txpost(bus->dhd); in dhdpcie_bus_doiovar()
6472 bcmerror = dhd_prot_dump_extended_trap(bus->dhd, &dump_b, FALSE); in dhdpcie_bus_doiovar()
6480 bcmerror = dhd_prot_dump_extended_trap(bus->dhd, &dump_b, TRUE); in dhdpcie_bus_doiovar()
6494 clkreq = dhdpcie_clkreq(bus->dhd->osh, 0, 0); in dhdpcie_bus_doiovar()
6495 aspm = dhdpcie_lcreg(bus->dhd->osh, 0, 0); in dhdpcie_bus_doiovar()
6505 tmp = dhdpcie_lcreg(bus->dhd->osh, 0, 0); in dhdpcie_bus_doiovar()
6506 dhdpcie_lcreg(bus->dhd->osh, PCIE_ASPM_ENAB, in dhdpcie_bus_doiovar()
6509 dhdpcie_clkreq(bus->dhd->osh, 1, ((int_val & 0x100) >> 8)); in dhdpcie_bus_doiovar()
6514 bus->dhd->hang_report = bool_val; in dhdpcie_bus_doiovar()
6516 __FUNCTION__, bus->dhd->hang_report)); in dhdpcie_bus_doiovar()
6520 int_val = (int32)bus->dhd->hang_report; in dhdpcie_bus_doiovar()
6529 if (bus->sih->buscorerev < 19) { in dhdpcie_bus_doiovar()
6533 int_val = (int32)bus->cto_enable; in dhdpcie_bus_doiovar()
6539 if (bus->sih->buscorerev < 19) { in dhdpcie_bus_doiovar()
6543 bus->cto_threshold = (uint32)int_val; in dhdpcie_bus_doiovar()
6548 if (bus->sih->buscorerev < 19) { in dhdpcie_bus_doiovar()
6552 if (bus->cto_threshold) in dhdpcie_bus_doiovar()
6553 int_val = (int32)bus->cto_threshold; in dhdpcie_bus_doiovar()
6568 int_val = bus->hwa_enab_bmap; in dhdpcie_bus_doiovar()
6572 bus->hwa_enab_bmap = (uint8)int_val; in dhdpcie_bus_doiovar()
6575 int_val = bus->idma_enabled; in dhdpcie_bus_doiovar()
6579 bus->idma_enabled = (bool)int_val; in dhdpcie_bus_doiovar()
6582 int_val = bus->ifrm_enabled; in dhdpcie_bus_doiovar()
6586 bus->ifrm_enabled = (bool)int_val; in dhdpcie_bus_doiovar()
6590 dhd_flow_rings_flush(bus->dhd, 0); in dhdpcie_bus_doiovar()
6593 int_val = bus->dar_enabled; in dhdpcie_bus_doiovar()
6597 bus->dar_enabled = (bool)int_val; in dhdpcie_bus_doiovar()
6600 bcmerror = dhd_get_hscb_info(bus->dhd, NULL, (uint32 *)arg); in dhdpcie_bus_doiovar()
6604 bcmerror = dhd_get_hscb_buff(bus->dhd, int_val, int_val2, (void*)arg); in dhdpcie_bus_doiovar()
6610 dhd_prot_hp2p_enable(bus->dhd, TRUE, int_val); in dhdpcie_bus_doiovar()
6614 int_val = dhd_prot_hp2p_enable(bus->dhd, FALSE, int_val); in dhdpcie_bus_doiovar()
6619 dhd_prot_pkt_threshold(bus->dhd, TRUE, int_val); in dhdpcie_bus_doiovar()
6623 int_val = dhd_prot_pkt_threshold(bus->dhd, FALSE, int_val); in dhdpcie_bus_doiovar()
6628 dhd_prot_time_threshold(bus->dhd, TRUE, int_val); in dhdpcie_bus_doiovar()
6632 int_val = dhd_prot_time_threshold(bus->dhd, FALSE, int_val); in dhdpcie_bus_doiovar()
6637 dhd_prot_pkt_expiry(bus->dhd, TRUE, int_val); in dhdpcie_bus_doiovar()
6641 int_val = dhd_prot_pkt_expiry(bus->dhd, FALSE, int_val); in dhdpcie_bus_doiovar()
6645 if (bus->dhd->busstate != DHD_BUS_DOWN) { in dhdpcie_bus_doiovar()
6656 if (bus->dhd->busstate != DHD_BUS_DOWN) { in dhdpcie_bus_doiovar()
6668 if (bus->dhd->busstate != DHD_BUS_DOWN) { in dhdpcie_bus_doiovar()
6672 bus->dhd->extdtxs_in_txcpl = TRUE; in dhdpcie_bus_doiovar()
6674 bus->dhd->extdtxs_in_txcpl = FALSE; in dhdpcie_bus_doiovar()
6678 int_val = bus->dhd->extdtxs_in_txcpl; in dhdpcie_bus_doiovar()
6683 if (bus->dhd->busstate != DHD_BUS_DOWN) { in dhdpcie_bus_doiovar()
6687 bus->dhd->hostrdy_after_init = TRUE; in dhdpcie_bus_doiovar()
6689 bus->dhd->hostrdy_after_init = FALSE; in dhdpcie_bus_doiovar()
6693 int_val = bus->dhd->hostrdy_after_init; in dhdpcie_bus_doiovar()
6710 if (bus->dhd == NULL) { in dhdpcie_bus_lpback_req()
6714 if (bus->dhd->prot == NULL) { in dhdpcie_bus_lpback_req()
6718 if (bus->dhd->busstate != DHD_BUS_DATA) { in dhdpcie_bus_lpback_req()
6722 dhdmsgbuf_lpbk_req(bus->dhd, len); in dhdpcie_bus_lpback_req()
6734 if (bus->is_linkdown && !bus->cto_triggered) { in dhd_bus_dump_dar_registers()
6739 dar_clk_ctrl_reg = (uint32)DAR_CLK_CTRL(bus->sih->buscorerev); in dhd_bus_dump_dar_registers()
6740 dar_pwr_ctrl_reg = (uint32)DAR_PCIE_PWR_CTRL(bus->sih->buscorerev); in dhd_bus_dump_dar_registers()
6741 dar_intstat_reg = (uint32)DAR_INTSTAT(bus->sih->buscorerev); in dhd_bus_dump_dar_registers()
6742 dar_errlog_reg = (uint32)DAR_ERRLOG(bus->sih->buscorerev); in dhd_bus_dump_dar_registers()
6743 dar_erraddr_reg = (uint32)DAR_ERRADDR(bus->sih->buscorerev); in dhd_bus_dump_dar_registers()
6744 dar_pcie_mbint_reg = (uint32)DAR_PCIMailBoxInt(bus->sih->buscorerev); in dhd_bus_dump_dar_registers()
6746 if (bus->sih->buscorerev < 24) { in dhd_bus_dump_dar_registers()
6748 __FUNCTION__, bus->sih->buscorerev)); in dhd_bus_dump_dar_registers()
6752 dar_clk_ctrl_val = si_corereg(bus->sih, bus->sih->buscoreidx, dar_clk_ctrl_reg, 0, 0); in dhd_bus_dump_dar_registers()
6753 dar_pwr_ctrl_val = si_corereg(bus->sih, bus->sih->buscoreidx, dar_pwr_ctrl_reg, 0, 0); in dhd_bus_dump_dar_registers()
6754 dar_intstat_val = si_corereg(bus->sih, bus->sih->buscoreidx, dar_intstat_reg, 0, 0); in dhd_bus_dump_dar_registers()
6755 dar_errlog_val = si_corereg(bus->sih, bus->sih->buscoreidx, dar_errlog_reg, 0, 0); in dhd_bus_dump_dar_registers()
6756 dar_erraddr_val = si_corereg(bus->sih, bus->sih->buscoreidx, dar_erraddr_reg, 0, 0); in dhd_bus_dump_dar_registers()
6757 dar_pcie_mbint_val = si_corereg(bus->sih, bus->sih->buscoreidx, dar_pcie_mbint_reg, 0, 0); in dhd_bus_dump_dar_registers()
6772 if (!bus->dhd->d2h_hostrdy_supported) { in dhd_bus_hostready()
6776 if (bus->is_linkdown) { in dhd_bus_hostready()
6782 dhd_pcie_config_read(bus->osh, PCI_CFG_CMD, sizeof(uint32)))); in dhd_bus_hostready()
6790 si_corereg(bus->sih, bus->sih->buscoreidx, dhd_bus_db1_addr_get(bus), ~0, 0x12345678); in dhd_bus_hostready()
6791 bus->hostready_count ++; in dhd_bus_hostready()
6792 DHD_ERROR(("%s: Ring Hostready:%d\n", __FUNCTION__, bus->hostready_count)); in dhd_bus_hostready()
6800 if ((bus->sih->buscorerev == 6) || (bus->sih->buscorerev == 4) || in dhdpcie_bus_clear_intstatus()
6801 (bus->sih->buscorerev == 2)) { in dhdpcie_bus_clear_intstatus()
6806 intstatus = si_corereg(bus->sih, bus->sih->buscoreidx, bus->pcie_mailbox_int, 0, 0); in dhdpcie_bus_clear_intstatus()
6807 si_corereg(bus->sih, bus->sih->buscoreidx, bus->pcie_mailbox_int, bus->def_intmask, in dhdpcie_bus_clear_intstatus()
6828 if (bus->dhd == NULL) { in dhdpcie_bus_suspend()
6832 if (bus->dhd->prot == NULL) { in dhdpcie_bus_suspend()
6837 if (dhd_query_bus_erros(bus->dhd)) { in dhdpcie_bus_suspend()
6841 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
6842 if (!(bus->dhd->busstate == DHD_BUS_DATA || bus->dhd->busstate == DHD_BUS_SUSPEND)) { in dhdpcie_bus_suspend()
6844 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
6847 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
6848 if (bus->dhd->dongle_reset) { in dhdpcie_bus_suspend()
6850 return -EIO; in dhdpcie_bus_suspend()
6857 if (state == TRUE && bus->dhd->busstate == DHD_BUS_SUSPEND) { in dhdpcie_bus_suspend()
6860 } else if (state == FALSE && bus->dhd->busstate == DHD_BUS_DATA) { in dhdpcie_bus_suspend()
6871 if (bus->is_linkdown) { in dhdpcie_bus_suspend()
6880 bus->dhd->dhd_watchdog_ms_backup = dhd_watchdog_ms; in dhdpcie_bus_suspend()
6881 if (bus->dhd->dhd_watchdog_ms_backup) { in dhdpcie_bus_suspend()
6884 dhd_os_wd_timer(bus->dhd, 0); in dhdpcie_bus_suspend()
6887 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
6888 if (DHD_BUS_BUSY_CHECK_IN_TX(bus->dhd)) { in dhdpcie_bus_suspend()
6890 bus->dhd->busstate = DHD_BUS_DATA; in dhdpcie_bus_suspend()
6891 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
6892 return -EBUSY; in dhdpcie_bus_suspend()
6895 bus->last_suspend_start_time = OSL_LOCALTIME_NS(); in dhdpcie_bus_suspend()
6899 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
6903 DHD_OS_WAKE_LOCK_WAIVE(bus->dhd); in dhdpcie_bus_suspend()
6905 bus->wait_for_d3_ack = 0; in dhdpcie_bus_suspend()
6908 timeleft = dhd_os_d3ack_wait(bus->dhd, &bus->wait_for_d3_ack); in dhdpcie_bus_suspend()
6909 DHD_OS_WAKE_LOCK_RESTORE(bus->dhd); in dhdpcie_bus_suspend()
6912 bus->wait_for_d3_ack = 0; in dhdpcie_bus_suspend()
6914 while (!bus->wait_for_d3_ack && d3_read_retry < MAX_D3_ACK_TIMEOUT) { in dhdpcie_bus_suspend()
6921 DHD_OS_WAKE_LOCK_WAIVE(bus->dhd); in dhdpcie_bus_suspend()
6923 bus->wait_for_d3_ack = 0; in dhdpcie_bus_suspend()
6925 * Send H2D_HOST_D3_INFORM to dongle and mark bus->bus_low_power_state in dhdpcie_bus_suspend()
6934 timeleft = dhd_os_d3ack_wait(bus->dhd, &bus->wait_for_d3_ack); in dhdpcie_bus_suspend()
6937 if (bus->wait_for_d3_ack == 0) { in dhdpcie_bus_suspend()
6939 uint32 intstatus = si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_bus_suspend()
6940 bus->pcie_mailbox_int, 0, 0); in dhdpcie_bus_suspend()
6942 if ((intstatus) && (intstatus != (uint32)-1) && in dhdpcie_bus_suspend()
6943 (timeleft == 0) && (!dhd_query_bus_erros(bus->dhd))) { in dhdpcie_bus_suspend()
6947 dhd_pcie_intr_count_dump(bus->dhd); in dhdpcie_bus_suspend()
6948 dhd_print_tasklet_status(bus->dhd); in dhdpcie_bus_suspend()
6949 if (bus->api.fw_rev >= PCIE_SHARED_VERSION_6 && in dhdpcie_bus_suspend()
6950 !bus->use_mailbox) { in dhdpcie_bus_suspend()
6951 dhd_prot_process_ctrlbuf(bus->dhd); in dhdpcie_bus_suspend()
6955 timeleft = dhd_os_d3ack_wait(bus->dhd, &bus->wait_for_d3_ack); in dhdpcie_bus_suspend()
6959 } /* bus->wait_for_d3_ack was 0 */ in dhdpcie_bus_suspend()
6962 DHD_OS_WAKE_LOCK_RESTORE(bus->dhd); in dhdpcie_bus_suspend()
6966 /* To allow threads that got pre-empted to complete. in dhdpcie_bus_suspend()
6968 while ((active = dhd_os_check_wakelock_all(bus->dhd)) && in dhdpcie_bus_suspend()
6975 if (bus->wait_for_d3_ack) { in dhdpcie_bus_suspend()
6983 if (bus->dhd->dhd_watchdog_ms_backup) { in dhdpcie_bus_suspend()
6986 dhd_os_wd_timer(bus->dhd, in dhdpcie_bus_suspend()
6987 bus->dhd->dhd_watchdog_ms_backup); in dhdpcie_bus_suspend()
7002 bus->wait_for_d3_ack = 0; in dhdpcie_bus_suspend()
7004 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_suspend()
7005 bus->bus_low_power_state = DHD_BUS_NO_LOW_POWER_STATE; in dhdpcie_bus_suspend()
7009 bus->resume_intr_enable_count++; in dhdpcie_bus_suspend()
7013 * which were disabled in the dhdpcie_bus_isr()-> in dhdpcie_bus_suspend()
7021 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_suspend()
7023 if (bus->use_d0_inform) { in dhdpcie_bus_suspend()
7024 DHD_OS_WAKE_LOCK_WAIVE(bus->dhd); in dhdpcie_bus_suspend()
7027 DHD_OS_WAKE_LOCK_RESTORE(bus->dhd); in dhdpcie_bus_suspend()
7032 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
7033 bus->dhd->busstate = DHD_BUS_DATA; in dhdpcie_bus_suspend()
7036 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
7041 /* At this time bus->bus_low_power_state will be in dhdpcie_bus_suspend()
7045 if (bus->use_d0_inform && in dhdpcie_bus_suspend()
7046 (bus->api.fw_rev < PCIE_SHARED_VERSION_6)) { in dhdpcie_bus_suspend()
7047 DHD_OS_WAKE_LOCK_WAIVE(bus->dhd); in dhdpcie_bus_suspend()
7049 DHD_OS_WAKE_LOCK_RESTORE(bus->dhd); in dhdpcie_bus_suspend()
7053 if (bus->dhd->dhd_induce_error == DHD_INDUCE_DROP_OOB_IRQ) { in dhdpcie_bus_suspend()
7060 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
7067 * between DPC and suspend context and bus->bus_low_power_state in dhdpcie_bus_suspend()
7070 bus->dhd->d3ackcnt_timeout = 0; in dhdpcie_bus_suspend()
7071 bus->dhd->busstate = DHD_BUS_SUSPEND; in dhdpcie_bus_suspend()
7072 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
7077 bus->last_suspend_end_time = OSL_LOCALTIME_NS(); in dhdpcie_bus_suspend()
7084 uint32 cur_memdump_mode = bus->dhd->memdump_enabled; in dhdpcie_bus_suspend()
7088 bus->dhd->is_sched_error = !dhd_query_bus_erros(bus->dhd) && in dhdpcie_bus_suspend()
7089 bus->isr_entry_time > bus->last_d3_inform_time && in dhdpcie_bus_suspend()
7090 dhd_bus_query_dpc_sched_errors(bus->dhd); in dhdpcie_bus_suspend()
7091 bus->dhd->d3ack_timeout_occured = TRUE; in dhdpcie_bus_suspend()
7093 bus->dhd->d3ackcnt_timeout++; in dhdpcie_bus_suspend()
7095 __FUNCTION__, bus->dhd->is_sched_error ? in dhdpcie_bus_suspend()
7096 " due to scheduling problem" : "", bus->dhd->d3ackcnt_timeout)); in dhdpcie_bus_suspend()
7098 if (bus->dhd->is_sched_error && cur_memdump_mode == DUMP_MEMFILE_BUGON) { in dhdpcie_bus_suspend()
7105 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_suspend()
7106 bus->bus_low_power_state = DHD_BUS_NO_LOW_POWER_STATE; in dhdpcie_bus_suspend()
7107 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_suspend()
7108 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
7109 bus->dhd->busstate = DHD_BUS_DATA; in dhdpcie_bus_suspend()
7112 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
7113 if (!bus->dhd->dongle_trap_occured && in dhdpcie_bus_suspend()
7114 !bus->is_linkdown && in dhdpcie_bus_suspend()
7115 !bus->cto_triggered) { in dhdpcie_bus_suspend()
7119 intstatus = si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_bus_suspend()
7120 bus->pcie_mailbox_int, 0, 0); in dhdpcie_bus_suspend()
7121 if (intstatus == (uint32)-1) { in dhdpcie_bus_suspend()
7123 bus->is_linkdown = 1; in dhdpcie_bus_suspend()
7127 dhd_prot_debug_info_print(bus->dhd); in dhdpcie_bus_suspend()
7131 bus->dhd->memdump_type = DUMP_TYPE_D3_ACK_TIMEOUT; in dhdpcie_bus_suspend()
7141 bus->no_cfg_restore = 1; in dhdpcie_bus_suspend()
7144 dhd_os_check_hang(bus->dhd, 0, -ETIMEDOUT); in dhdpcie_bus_suspend()
7148 dhd_schedule_reset(bus->dhd); in dhdpcie_bus_suspend()
7150 rc = -ETIMEDOUT; in dhdpcie_bus_suspend()
7155 bus->last_resume_start_time = OSL_LOCALTIME_NS(); in dhdpcie_bus_suspend()
7167 si_invalidate_second_bar0win(bus->sih); in dhdpcie_bus_suspend()
7170 DHD_OS_OOB_IRQ_WAKE_UNLOCK(bus->dhd); in dhdpcie_bus_suspend()
7176 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_suspend()
7178 bus->bus_low_power_state = DHD_BUS_NO_LOW_POWER_STATE; in dhdpcie_bus_suspend()
7179 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_suspend()
7181 if (!rc && bus->dhd->busstate == DHD_BUS_SUSPEND) { in dhdpcie_bus_suspend()
7182 if (bus->use_d0_inform) { in dhdpcie_bus_suspend()
7183 DHD_OS_WAKE_LOCK_WAIVE(bus->dhd); in dhdpcie_bus_suspend()
7185 DHD_OS_WAKE_LOCK_RESTORE(bus->dhd); in dhdpcie_bus_suspend()
7190 DHD_GENERAL_LOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
7191 bus->dhd->busstate = DHD_BUS_DATA; in dhdpcie_bus_suspend()
7193 if (DHD_BUS_BUSY_CHECK_RPM_SUSPEND_DONE(bus->dhd)) { in dhdpcie_bus_suspend()
7194 bus->bus_wake = 1; in dhdpcie_bus_suspend()
7196 wake_up_interruptible(&bus->rpm_queue); in dhdpcie_bus_suspend()
7203 bus->resume_intr_enable_count++; in dhdpcie_bus_suspend()
7207 * which were disabled in the dhdpcie_bus_isr()->dhd_bus_handle_d3_ack(). in dhdpcie_bus_suspend()
7212 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhdpcie_bus_suspend()
7214 if (bus->dhd->dhd_watchdog_ms_backup) { in dhdpcie_bus_suspend()
7217 dhd_os_wd_timer(bus->dhd, bus->dhd->dhd_watchdog_ms_backup); in dhdpcie_bus_suspend()
7220 bus->last_resume_end_time = OSL_LOCALTIME_NS(); in dhdpcie_bus_suspend()
7222 DHD_EDL_RING_TCM_RD_UPDATE(bus->dhd); in dhdpcie_bus_suspend()
7230 ASSERT(bus && bus->sih); in dhdpcie_force_alp()
7232 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_force_alp()
7235 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_force_alp()
7247 ASSERT(bus && bus->sih); in dhdpcie_set_l1_entry_time()
7249 si_corereg(bus->sih, bus->sih->buscoreidx, OFFSETOF(sbpcieregs_t, configaddr), ~0, in dhdpcie_set_l1_entry_time()
7251 reg_val = si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_set_l1_entry_time()
7254 si_corereg(bus->sih, bus->sih->buscoreidx, OFFSETOF(sbpcieregs_t, configdata), ~0, in dhdpcie_set_l1_entry_time()
7263 uint16 chipid = si_chipid(bus->sih); in dhd_apply_d11_war_length()
7283 if (bus->dhd == NULL) { in dhdpcie_bus_dmaxfer_req()
7287 if (bus->dhd->prot == NULL) { in dhdpcie_bus_dmaxfer_req()
7291 if (bus->dhd->busstate != DHD_BUS_DATA) { in dhdpcie_bus_dmaxfer_req()
7303 bus->dmaxfer_complete = FALSE; in dhdpcie_bus_dmaxfer_req()
7304 ret = dhdmsgbuf_dmaxfer_req(bus->dhd, len, srcdelay, destdelay, in dhdpcie_bus_dmaxfer_req()
7310 ret = dhd_os_dmaxfer_wait(bus->dhd, &bus->dmaxfer_complete); in dhdpcie_bus_dmaxfer_req()
7322 return MULTIBP_CAP(bus->sih); in dhd_bus_is_multibp_capable()
7336 if (bus->sih->chip == CYW55560_CHIP_ID) { in dhdpcie_bus_download_state()
7338 bcmerror = dhdpcie_dongle_host_get_handshake_address(bus->sih, bus->osh, in dhdpcie_bus_download_state()
7346 if (!bus->sih) { in dhdpcie_bus_download_state()
7351 do_flr = ((bus->sih->buscorerev != PCIE_REV_FOR_4378A0) && in dhdpcie_bus_download_state()
7352 (bus->sih->buscorerev != PCIE_REV_FOR_4378B0)); in dhdpcie_bus_download_state()
7354 if (MULTIBP_ENAB(bus->sih) && !do_flr) { in dhdpcie_bus_download_state()
7367 bus->alp_only = TRUE; in dhdpcie_bus_download_state()
7370 cr4_regs = si_setcore(bus->sih, ARMCR4_CORE_ID, 0); in dhdpcie_bus_download_state()
7372 if (cr4_regs == NULL && !(si_setcore(bus->sih, ARM7S_CORE_ID, 0)) && in dhdpcie_bus_download_state()
7373 !(si_setcore(bus->sih, ARMCM3_CORE_ID, 0)) && in dhdpcie_bus_download_state()
7374 !(si_setcore(bus->sih, ARMCA7_CORE_ID, 0))) { in dhdpcie_bus_download_state()
7380 if (si_setcore(bus->sih, ARMCA7_CORE_ID, 0)) { in dhdpcie_bus_download_state()
7382 si_core_reset(bus->sih, SICF_CPUHALT, SICF_CPUHALT); in dhdpcie_bus_download_state()
7383 if (!(si_setcore(bus->sih, SYSMEM_CORE_ID, 0))) { in dhdpcie_bus_download_state()
7388 si_core_reset(bus->sih, 0, 0); in dhdpcie_bus_download_state()
7392 si_core_disable(bus->sih, 0); in dhdpcie_bus_download_state()
7394 if (!(si_setcore(bus->sih, SOCRAM_CORE_ID, 0))) { in dhdpcie_bus_download_state()
7400 si_core_reset(bus->sih, 0, 0); in dhdpcie_bus_download_state()
7403 if (bus->ramsize) { in dhdpcie_bus_download_state()
7405 if (dhdpcie_bus_membytes(bus, TRUE, bus->ramsize - 4, in dhdpcie_bus_download_state()
7421 if (bus->sih->chip == CYW55560_CHIP_ID) { in dhdpcie_bus_download_state()
7426 if ((bcmerror = dhdpcie_dongle_host_pre_handshake(bus->sih, in dhdpcie_bus_download_state()
7427 bus->osh, &bl_hs_addrs))) { in dhdpcie_bus_download_state()
7442 /* Console buffer read - First pass */ in dhdpcie_bus_download_state()
7449 si_core_reset(bus->sih, SICF_CPUHALT, SICF_CPUHALT); in dhdpcie_bus_download_state()
7450 if (BCM43602_CHIP(bus->sih->chip)) { in dhdpcie_bus_download_state()
7451 W_REG(bus->pcie_mb_intr_osh, cr4_regs + ARMCR4REG_BANKIDX, in dhdpcie_bus_download_state()
7453 W_REG(bus->pcie_mb_intr_osh, cr4_regs + ARMCR4REG_BANKPDA, in dhdpcie_bus_download_state()
7455 W_REG(bus->pcie_mb_intr_osh, cr4_regs + ARMCR4REG_BANKIDX, in dhdpcie_bus_download_state()
7457 W_REG(bus->pcie_mb_intr_osh, cr4_regs + ARMCR4REG_BANKPDA, in dhdpcie_bus_download_state()
7465 if (si_setcore(bus->sih, ARMCA7_CORE_ID, 0)) { in dhdpcie_bus_download_state()
7480 if (!(si_setcore(bus->sih, ARMCA7_CORE_ID, 0))) { in dhdpcie_bus_download_state()
7487 (uint8 *)&bus->resetinstr, sizeof(bus->resetinstr)); in dhdpcie_bus_download_state()
7489 } else if (!si_setcore(bus->sih, ARMCR4_CORE_ID, 0)) { in dhdpcie_bus_download_state()
7490 if (!(si_setcore(bus->sih, SOCRAM_CORE_ID, 0))) { in dhdpcie_bus_download_state()
7496 if (!si_iscoreup(bus->sih)) { in dhdpcie_bus_download_state()
7505 if (!si_setcore(bus->sih, PCMCIA_CORE_ID, 0) && in dhdpcie_bus_download_state()
7506 !si_setcore(bus->sih, SDIOD_CORE_ID, 0)) { in dhdpcie_bus_download_state()
7512 if (!(si_setcore(bus->sih, ARM7S_CORE_ID, 0)) && in dhdpcie_bus_download_state()
7513 !(si_setcore(bus->sih, ARMCM3_CORE_ID, 0))) { in dhdpcie_bus_download_state()
7519 if (BCM43602_CHIP(bus->sih->chip)) { in dhdpcie_bus_download_state()
7521 if (!(si_setcore(bus->sih, SOCRAM_CORE_ID, 0))) { in dhdpcie_bus_download_state()
7527 si_core_reset(bus->sih, 0, 0); in dhdpcie_bus_download_state()
7528 si_setcore(bus->sih, ARMCR4_CORE_ID, 0); in dhdpcie_bus_download_state()
7531 if (bus->sih->chip == CYW55560_CHIP_ID) { in dhdpcie_bus_download_state()
7532 /* Console buffer read - Second pass */ in dhdpcie_bus_download_state()
7539 if ((bcmerror = dhdpcie_dongle_host_post_handshake(bus->sih, in dhdpcie_bus_download_state()
7540 bus->osh, &bl_hs_addrs))) { in dhdpcie_bus_download_state()
7556 if ((bcmerror = dhdpcie_dongle_host_chk_validation(bus->sih, in dhdpcie_bus_download_state()
7557 bus->osh, &bl_hs_addrs))) { in dhdpcie_bus_download_state()
7580 if (!(si_setcore(bus->sih, ARMCR4_CORE_ID, 0))) { in dhdpcie_bus_download_state()
7587 if (bus->sih->chip != CYW55560_CHIP_ID) { in dhdpcie_bus_download_state()
7589 (uint8 *)&bus->resetinstr, sizeof(bus->resetinstr)); in dhdpcie_bus_download_state()
7597 if (bcmerror == BCME_OK && tmp != bus->resetinstr) { in dhdpcie_bus_download_state()
7599 __FUNCTION__, bus->resetinstr)); in dhdpcie_bus_download_state()
7611 if (bus->sih->chip == CYW55560_CHIP_ID) { in dhdpcie_bus_download_state()
7612 /* Console buffer read - Final pass */ in dhdpcie_bus_download_state()
7627 si_core_reset(bus->sih, 0, 0); in dhdpcie_bus_download_state()
7630 bus->alp_only = FALSE; in dhdpcie_bus_download_state()
7632 bus->dhd->busstate = DHD_BUS_LOAD; in dhdpcie_bus_download_state()
7638 if (bus->sih->chip == CYW55560_CHIP_ID) { in dhdpcie_bus_download_state()
7654 si_setcore(bus->sih, PCIE2_CORE_ID, 0); in dhdpcie_bus_download_state()
7656 if (MULTIBP_ENAB(bus->sih) && !do_flr) { in dhdpcie_bus_download_state()
7675 addr->d2h = &pcieregs->u1.dar_64.d2h_msg_reg0; in dhdpcie_dongle_host_get_handshake_address()
7676 addr->h2d = &pcieregs->u1.dar_64.h2d_msg_reg0; in dhdpcie_dongle_host_get_handshake_address()
7678 addr->d2h = (void *)HS_IN_TCM; in dhdpcie_dongle_host_get_handshake_address()
7679 addr->h2d = (volatile uint32 *)addr->d2h + 1; in dhdpcie_dongle_host_get_handshake_address()
7721 for (countdown_ = (us) + (HS_POLL_PERIOD_US - 1U); countdown_ >= HS_POLL_PERIOD_US; in dhdpcie_dongle_host_handshake_spinwait()
7722 countdown_ -= HS_POLL_PERIOD_US) { in dhdpcie_dongle_host_handshake_spinwait()
7752 bcmerror = dhdpcie_handshake_msg_reg_write(sih, osh, addr->h2d, &h2d_reg); in dhdpcie_dongle_host_pre_handshake()
7757 bcmerror = dhdpcie_dongle_host_handshake_spinwait(sih, osh, addr->d2h, D2H_READY_SHIFT, in dhdpcie_dongle_host_pre_handshake()
7764 bcmerror = dhdpcie_handshake_msg_reg_write(sih, osh, addr->h2d, &h2d_reg); in dhdpcie_dongle_host_pre_handshake()
7785 bcmerror = dhdpcie_handshake_msg_reg_write(sih, osh, addr->h2d, &h2d_reg); in dhdpcie_dongle_host_post_handshake()
7790 bcmerror = dhdpcie_dongle_host_handshake_spinwait(sih, osh, addr->d2h, in dhdpcie_dongle_host_post_handshake()
7795 dhdpcie_handshake_msg_reg_read(sih, osh, addr->h2d, &h2d_reg); in dhdpcie_dongle_host_post_handshake()
7797 dhdpcie_handshake_msg_reg_write(sih, osh, addr->h2d, &h2d_reg); in dhdpcie_dongle_host_post_handshake()
7811 bcmerror = dhdpcie_dongle_host_handshake_spinwait(sih, osh, addr->d2h, D2H_VALDN_DONE_SHIFT, in dhdpcie_dongle_host_chk_validation()
7815 bcmerror = dhdpcie_handshake_msg_reg_read(sih, osh, addr->d2h, &d2h_reg); in dhdpcie_dongle_host_chk_validation()
7832 dhdpcie_handshake_msg_reg_read(sih, osh, addr->h2d, &h2d_reg); in dhdpcie_dongle_host_chk_validation()
7834 dhdpcie_handshake_msg_reg_write(sih, osh, addr->h2d, &h2d_reg); in dhdpcie_dongle_host_chk_validation()
7853 &pcieregs->u1.dar_64.h2d_msg_reg0, &reg_val); in dhdpcie_dongle_host_pre_wd_reset_sequence()
7873 &pcieregs->u1.dar_64.h2d_msg_reg0, &reg_val); in dhdpcie_dongle_host_post_wd_reset_sequence()
7875 for (idx = D2H_READY_WD_RESET_COUNT; idx > 0; idx--) { in dhdpcie_dongle_host_post_wd_reset_sequence()
7887 dhdpcie_handshake_msg_reg_read(sih, osh, &pcieregs->u1.dar_64.d2h_msg_reg0, in dhdpcie_dongle_host_post_wd_reset_sequence()
7895 DHD_ERROR(("%s: error - Waiting for D2H_READY timeout %d\n", in dhdpcie_dongle_host_post_wd_reset_sequence()
7922 W_REG(osh, &pcieregs->u1.dar_64.h2d_msg_reg0, reg_val); in dhdpcie_dongle_host_pre_chipid_access_sequence()
7925 for (idx = D2H_READY_WD_RESET_COUNT; idx > 0; idx--) { in dhdpcie_dongle_host_pre_chipid_access_sequence()
7936 reg_val = R_REG(osh, &pcieregs->u1.dar_64.d2h_msg_reg0); in dhdpcie_dongle_host_pre_chipid_access_sequence()
7943 DHD_ERROR(("%s: error - Waiting for D2H_READY timeout %d\n", in dhdpcie_dongle_host_pre_chipid_access_sequence()
7962 bcmerror = dhdpcie_handshake_msg_reg_write(bus->sih, bus->osh, addr->h2d, &h2d_reg); in dhdpcie_dongle_host_post_varswrite()
7980 varsize = bus->varsz ? ROUNDUP(bus->varsz, 4) : 0; in dhdpcie_bus_write_vars()
7981 varaddr = (bus->ramsize - 4) - varsize; in dhdpcie_bus_write_vars()
7983 varaddr += bus->dongle_ram_base; in dhdpcie_bus_write_vars()
7985 if (bus->vars) { in dhdpcie_bus_write_vars()
7987 vbuffer = (uint8 *)MALLOC(bus->dhd->osh, varsize); in dhdpcie_bus_write_vars()
7992 bcopy(bus->vars, vbuffer, bus->varsz); in dhdpcie_bus_write_vars()
8000 nvram_ularray = (uint8*)MALLOC(bus->dhd->osh, varsize); in dhdpcie_bus_write_vars()
8002 MFREE(bus->dhd->osh, vbuffer, varsize); in dhdpcie_bus_write_vars()
8023 MFREE(bus->dhd->osh, nvram_ularray, varsize); in dhdpcie_bus_write_vars()
8026 MFREE(bus->dhd->osh, vbuffer, varsize); in dhdpcie_bus_write_vars()
8029 phys_size = REMAP_ENAB(bus) ? bus->ramsize : bus->orig_ramsize; in dhdpcie_bus_write_vars()
8031 phys_size += bus->dongle_ram_base; in dhdpcie_bus_write_vars()
8035 phys_size, bus->ramsize)); in dhdpcie_bus_write_vars()
8038 varsize = ((phys_size - 4) - varaddr); in dhdpcie_bus_write_vars()
8042 * Varsize, converted to words, in lower 16-bits, checksum in upper 16-bits. in dhdpcie_bus_write_vars()
8046 bus->nvram_csm = varsizew; in dhdpcie_bus_write_vars()
8050 bus->nvram_csm = varsizew; in dhdpcie_bus_write_vars()
8057 bcmerror = dhdpcie_bus_membytes(bus, TRUE, (phys_size - 4), in dhdpcie_bus_write_vars()
8071 if (bus->dhd->up) { in dhdpcie_downloadvars()
8081 if (bus->vars) in dhdpcie_downloadvars()
8082 MFREE(bus->dhd->osh, bus->vars, bus->varsz); in dhdpcie_downloadvars()
8084 bus->vars = MALLOC(bus->dhd->osh, len); in dhdpcie_downloadvars()
8085 bus->varsz = bus->vars ? len : 0; in dhdpcie_downloadvars()
8086 if (bus->vars == NULL) { in dhdpcie_downloadvars()
8091 /* Copy the passed variables, which should include the terminating double-null */ in dhdpcie_downloadvars()
8092 bcopy(arg, bus->vars, bus->varsz); in dhdpcie_downloadvars()
8095 if (dhd_bus_get_fw_mode(bus->dhd) == DHD_FLAG_MFG_MODE) { in dhdpcie_downloadvars()
8103 sp = strnstr(bus->vars, tag[i], bus->varsz); in dhdpcie_downloadvars()
8106 __FUNCTION__, bus->nv_path)); in dhdpcie_downloadvars()
8115 if (ep && ((ep - sp) <= WLC_CNTRY_BUF_SZ)) { in dhdpcie_downloadvars()
8299 bus = dhd->bus; in dhd_dump_intr_counters()
8305 bcm_bprintf(strbuf, "\n ------- DUMPING INTR enable/disable counters-------\n"); in dhd_dump_intr_counters()
8309 bus->resume_intr_enable_count, bus->dpc_intr_enable_count, in dhd_dump_intr_counters()
8310 bus->isr_intr_disable_count, bus->suspend_intr_disable_count, in dhd_dump_intr_counters()
8311 bus->dpc_return_busdown_count, bus->non_ours_irq_count); in dhd_dump_intr_counters()
8317 bus->oob_intr_count, bus->oob_intr_enable_count, in dhd_dump_intr_counters()
8318 bus->oob_intr_disable_count, dhdpcie_get_oob_irq_num(bus), in dhd_dump_intr_counters()
8319 GET_SEC_USEC(bus->last_oob_irq_time), GET_SEC_USEC(bus->last_oob_irq_enable_time), in dhd_dump_intr_counters()
8320 GET_SEC_USEC(bus->last_oob_irq_disable_time), dhdpcie_get_oob_irq_status(bus), in dhd_dump_intr_counters()
8331 GET_SEC_USEC(current_time), GET_SEC_USEC(bus->isr_entry_time), in dhd_dump_intr_counters()
8332 GET_SEC_USEC(bus->isr_exit_time), GET_SEC_USEC(bus->dpc_sched_time), in dhd_dump_intr_counters()
8333 GET_SEC_USEC(bus->last_non_ours_irq_time), GET_SEC_USEC(bus->dpc_entry_time), in dhd_dump_intr_counters()
8334 GET_SEC_USEC(bus->last_process_ctrlbuf_time), in dhd_dump_intr_counters()
8335 GET_SEC_USEC(bus->last_process_flowring_time), in dhd_dump_intr_counters()
8336 GET_SEC_USEC(bus->last_process_txcpl_time), in dhd_dump_intr_counters()
8337 GET_SEC_USEC(bus->last_process_rxcpl_time), in dhd_dump_intr_counters()
8338 GET_SEC_USEC(bus->last_process_infocpl_time), in dhd_dump_intr_counters()
8339 GET_SEC_USEC(bus->last_process_edl_time), in dhd_dump_intr_counters()
8340 GET_SEC_USEC(bus->dpc_exit_time), GET_SEC_USEC(bus->resched_dpc_time), in dhd_dump_intr_counters()
8341 GET_SEC_USEC(bus->last_d3_inform_time)); in dhd_dump_intr_counters()
8345 SEC_USEC_FMT"\n", GET_SEC_USEC(bus->last_suspend_start_time), in dhd_dump_intr_counters()
8346 GET_SEC_USEC(bus->last_suspend_end_time), in dhd_dump_intr_counters()
8347 GET_SEC_USEC(bus->last_resume_start_time), in dhd_dump_intr_counters()
8348 GET_SEC_USEC(bus->last_resume_end_time)); in dhd_dump_intr_counters()
8356 GET_SEC_USEC(dhd->logtrace_thr_ts.entry_time), in dhd_dump_intr_counters()
8357 GET_SEC_USEC(dhd->logtrace_thr_ts.sem_down_time), in dhd_dump_intr_counters()
8358 GET_SEC_USEC(dhd->logtrace_thr_ts.flush_time), in dhd_dump_intr_counters()
8359 GET_SEC_USEC(dhd->logtrace_thr_ts.unexpected_break_time), in dhd_dump_intr_counters()
8360 GET_SEC_USEC(dhd->logtrace_thr_ts.complete_time)); in dhd_dump_intr_counters()
8371 intstatus = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_dump_intr_registers()
8372 dhd->bus->pcie_mailbox_int, 0, 0); in dhd_dump_intr_registers()
8373 intmask = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_dump_intr_registers()
8374 dhd->bus->pcie_mailbox_mask, 0, 0); in dhd_dump_intr_registers()
8375 d2h_db0 = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, PCID2H_MailBox, 0, 0); in dhd_dump_intr_registers()
8376 dhd_bus_cmn_readshared(dhd->bus, &d2h_mb_data, D2H_MB_DATA, 0); in dhd_dump_intr_registers()
8381 d2h_mb_data, dhd->bus->def_intmask); in dhd_dump_intr_registers()
8396 if (dhdp->busstate != DHD_BUS_DATA) in dhd_bus_dump()
8404 bcmpcie_get_total_wake(dhdp->bus), dhdp->bus->wake_counts.rxwake, in dhd_bus_dump()
8405 dhdp->bus->wake_counts.rcwake); in dhd_bus_dump()
8408 dhdp->bus->wake_counts.rx_ucast, dhdp->bus->wake_counts.rx_mcast, in dhd_bus_dump()
8409 dhdp->bus->wake_counts.rx_bcast, dhdp->bus->wake_counts.rx_arp); in dhd_bus_dump()
8411 dhdp->bus->wake_counts.rx_multi_ipv4, dhdp->bus->wake_counts.rx_multi_ipv6, in dhd_bus_dump()
8412 dhdp->bus->wake_counts.rx_icmpv6, dhdp->bus->wake_counts.rx_multi_other); in dhd_bus_dump()
8414 dhdp->bus->wake_counts.rx_icmpv6_ra, dhdp->bus->wake_counts.rx_icmpv6_na, in dhd_bus_dump()
8415 dhdp->bus->wake_counts.rx_icmpv6_ns); in dhd_bus_dump()
8419 if (dhdp->bus->wake_counts.rc_event[flowid] != 0) in dhd_bus_dump()
8421 dhdp->bus->wake_counts.rc_event[flowid]); in dhd_bus_dump()
8430 dhdp->bus->h2d_mb_data_ptr_addr, dhdp->bus->d2h_mb_data_ptr_addr); in dhd_bus_dump()
8431 bcm_bprintf(strbuf, "dhd cumm_ctr %d\n", DHD_CUMM_CTR_READ(&dhdp->cumm_ctr)); in dhd_bus_dump()
8434 dhdp->multi_client_flow_rings, dhdp->max_multi_client_flow_rings); in dhd_bus_dump()
8448 for (flowid = 0; flowid < dhdp->num_flow_rings; flowid++) { in dhd_bus_dump()
8450 if (!flow_ring_node->active) in dhd_bus_dump()
8453 flow_info = &flow_ring_node->flow_info; in dhd_bus_dump()
8456 flow_ring_node->flowid, flow_info->ifindex, flow_info->tid, in dhd_bus_dump()
8457 MAC2STRDBG(flow_info->da), in dhd_bus_dump()
8458 DHD_FLOW_QUEUE_LEN(&flow_ring_node->queue), in dhd_bus_dump()
8459 DHD_CUMM_CTR_READ(DHD_FLOW_QUEUE_CLEN_PTR(&flow_ring_node->queue)), in dhd_bus_dump()
8460 DHD_CUMM_CTR_READ(DHD_FLOW_QUEUE_L2CLEN_PTR(&flow_ring_node->queue)), in dhd_bus_dump()
8461 DHD_FLOW_QUEUE_FAILURES(&flow_ring_node->queue)); in dhd_bus_dump()
8462 dhd_prot_print_flow_ring(dhdp, flow_ring_node->prot_info, strbuf, in dhd_bus_dump()
8467 flow_info->num_tx_pkts, in dhd_bus_dump()
8468 flow_info->num_tx_status ? in dhd_bus_dump()
8469 DIV_U64_BY_U64(flow_info->cum_tx_status_latency, in dhd_bus_dump()
8470 flow_info->num_tx_status) : 0); in dhd_bus_dump()
8472 ifindex = flow_info->ifindex; in dhd_bus_dump()
8475 if_tx_status_latency[ifindex].num_tx_status += flow_info->num_tx_status; in dhd_bus_dump()
8477 flow_info->cum_tx_status_latency; in dhd_bus_dump()
8488 if_flow_lkup = (if_flow_lkup_t *)dhdp->if_flow_lkup; in dhd_bus_dump()
8503 if (dhdp->hp2p_capable) { in dhd_bus_dump()
8510 hp2p_info = &dhdp->hp2p_info[flowid]; in dhd_bus_dump()
8511 if (hp2p_info->num_timer_start == 0) in dhd_bus_dump()
8514 bcm_bprintf(strbuf, "\n%d", hp2p_info->flowid); in dhd_bus_dump()
8519 hp2p_info->tx_t0[bin], hp2p_info->tx_t1[bin]); in dhd_bus_dump()
8523 bcm_bprintf(strbuf, "\n%d", hp2p_info->flowid); in dhd_bus_dump()
8528 hp2p_info->rx_t0[bin]); in dhd_bus_dump()
8533 bcm_bprintf(strbuf, "\n%d %24d %16d", hp2p_info->num_pkt_limit, in dhd_bus_dump()
8534 hp2p_info->num_timer_limit, hp2p_info->num_timer_start); in dhd_bus_dump()
8541 bcm_bprintf(strbuf, "D3 inform cnt %d\n", dhdp->bus->d3_inform_cnt); in dhd_bus_dump()
8542 bcm_bprintf(strbuf, "D0 inform cnt %d\n", dhdp->bus->d0_inform_cnt); in dhd_bus_dump()
8543 bcm_bprintf(strbuf, "D0 inform in use cnt %d\n", dhdp->bus->d0_inform_in_use_cnt); in dhd_bus_dump()
8544 if (dhdp->d2h_hostrdy_supported) { in dhd_bus_dump()
8545 bcm_bprintf(strbuf, "hostready count:%d\n", dhdp->bus->hostready_count); in dhd_bus_dump()
8547 bcm_bprintf(strbuf, "d2h_intr_method -> %s\n", in dhd_bus_dump()
8548 dhdp->bus->d2h_intr_method ? "PCIE_MSI" : "PCIE_INTX"); in dhd_bus_dump()
8555 uint32 axi_tcm_addr = dhdpcie_bus_rtcm32(dhdp->bus, dhdp->axierror_logbuf_addr); in dhd_axi_sig_match()
8557 if (dhdp->dhd_induce_error == DHD_INDUCE_DROP_AXI_SIG) { in dhd_axi_sig_match()
8563 __FUNCTION__, axi_tcm_addr, dhdp->bus->dongle_ram_base, in dhd_axi_sig_match()
8564 dhdp->bus->dongle_ram_base + dhdp->bus->ramsize)); in dhd_axi_sig_match()
8565 if (axi_tcm_addr >= dhdp->bus->dongle_ram_base && in dhd_axi_sig_match()
8566 axi_tcm_addr < dhdp->bus->dongle_ram_base + dhdp->bus->ramsize) { in dhd_axi_sig_match()
8567 uint32 axi_signature = dhdpcie_bus_rtcm32(dhdp->bus, (axi_tcm_addr + in dhd_axi_sig_match()
8594 axi_logbuf_addr = dhdp->axierror_logbuf_addr; in dhd_axi_error()
8600 axi_err_dump = dhdp->axi_err_dump; in dhd_axi_error()
8611 axi_tcm_addr = dhdpcie_bus_rtcm32(dhdp->bus, axi_logbuf_addr); in dhd_axi_error()
8613 axi_err_buf = MALLOCZ(dhdp->osh, size); in dhd_axi_error()
8620 err = dhdpcie_bus_membytes(dhdp->bus, FALSE, axi_tcm_addr, p_axi_err, size); in dhd_axi_error()
8629 err = memcpy_s(&axi_err_dump->etd_axi_error_v1, size, axi_err_buf, size); in dhd_axi_error()
8637 MFREE(dhdp->osh, axi_err_buf, size); in dhd_axi_error()
8651 DHD_ERROR(("%s: signature : 0x%x\n", __FUNCTION__, axi_err_v1->signature)); in dhd_log_dump_axi_error()
8652 DHD_ERROR(("%s: version : 0x%x\n", __FUNCTION__, axi_err_v1->version)); in dhd_log_dump_axi_error()
8653 DHD_ERROR(("%s: length : 0x%x\n", __FUNCTION__, axi_err_v1->length)); in dhd_log_dump_axi_error()
8655 __FUNCTION__, axi_err_v1->dma_fifo_valid_count)); in dhd_log_dump_axi_error()
8657 __FUNCTION__, axi_err_v1->axi_errorlog_status)); in dhd_log_dump_axi_error()
8659 __FUNCTION__, axi_err_v1->axi_errorlog_core)); in dhd_log_dump_axi_error()
8661 __FUNCTION__, axi_err_v1->axi_errorlog_hi)); in dhd_log_dump_axi_error()
8663 __FUNCTION__, axi_err_v1->axi_errorlog_lo)); in dhd_log_dump_axi_error()
8665 __FUNCTION__, axi_err_v1->axi_errorlog_id)); in dhd_log_dump_axi_error()
8668 dma_fifo = axi_err_v1->dma_fifo[i]; in dhd_log_dump_axi_error()
8685 dma_dentry = axi_err_v1->dma_fifo[i].dentry[j]; in dhd_log_dump_axi_error()
8713 struct dhd_bus *bus = dhd->bus; in dhd_update_txflowrings()
8720 DHD_FLOWRING_LIST_LOCK(bus->dhd->flowring_list_lock, flags); in dhd_update_txflowrings()
8721 for (item = dll_head_p(&bus->flowring_active_list); in dhd_update_txflowrings()
8722 (!dhd_is_device_removed(dhd) && !dll_end(&bus->flowring_active_list, item)); in dhd_update_txflowrings()
8724 if (dhd->hang_was_sent) { in dhd_update_txflowrings()
8735 ASSERT(flow_ring_node->prot_info != NULL); in dhd_update_txflowrings()
8737 dhd_prot_update_txflowring(dhd, flow_ring_node->flowid, flow_ring_node->prot_info); in dhd_update_txflowrings()
8739 DHD_FLOWRING_LIST_UNLOCK(bus->dhd->flowring_list_lock, flags); in dhd_update_txflowrings()
8746 if ((bus->sih->buscorerev == 2) || (bus->sih->buscorerev == 6) || in dhd_bus_gen_devmb_intr()
8747 (bus->sih->buscorerev == 4)) { in dhd_bus_gen_devmb_intr()
8751 if (bus->db1_for_mb) { in dhd_bus_gen_devmb_intr()
8757 si_corereg(bus->sih, bus->sih->buscoreidx, dhd_bus_db1_addr_get(bus), in dhd_bus_gen_devmb_intr()
8776 (void)dhd_wl_ioctl_set_intiovar(bus->dhd, "bus:disconnect", 99, WLC_SET_VAR, TRUE, 0); in dhdpcie_fw_trap()
8784 if (bus->bus_low_power_state != DHD_BUS_NO_LOW_POWER_STATE) { in dhd_bus_ringbell()
8786 __FUNCTION__, bus->bus_low_power_state)); in dhd_bus_ringbell()
8791 if (bus->is_linkdown) { in dhd_bus_ringbell()
8796 if ((bus->sih->buscorerev == 2) || (bus->sih->buscorerev == 6) || in dhd_bus_ringbell()
8797 (bus->sih->buscorerev == 4)) { in dhd_bus_ringbell()
8798 si_corereg(bus->sih, bus->sih->buscoreidx, bus->pcie_mailbox_int, in dhd_bus_ringbell()
8803 if (IDMA_ACTIVE(bus->dhd)) { in dhd_bus_ringbell()
8807 si_corereg(bus->sih, bus->sih->buscoreidx, dhd_bus_db0_addr_2_get(bus), in dhd_bus_ringbell()
8813 si_corereg(bus->sih, bus->sih->buscoreidx, in dhd_bus_ringbell()
8825 if (bus->bus_low_power_state != DHD_BUS_NO_LOW_POWER_STATE) { in dhd_bus_ringbell_2()
8827 __FUNCTION__, bus->bus_low_power_state)); in dhd_bus_ringbell_2()
8832 if (bus->is_linkdown) { in dhd_bus_ringbell_2()
8841 si_corereg(bus->sih, bus->sih->buscoreidx, dhd_bus_db0_addr_2_get(bus), in dhd_bus_ringbell_2()
8849 if (bus->bus_low_power_state != DHD_BUS_NO_LOW_POWER_STATE) { in dhdpcie_bus_ringbell_fast()
8851 __FUNCTION__, bus->bus_low_power_state)); in dhdpcie_bus_ringbell_fast()
8856 if (bus->is_linkdown) { in dhdpcie_bus_ringbell_fast()
8866 if (bus->dhd->db0ts_capable) { in dhdpcie_bus_ringbell_fast()
8876 W_REG(bus->pcie_mb_intr_osh, bus->pcie_mb_intr_addr, value); in dhdpcie_bus_ringbell_fast()
8883 if (bus->bus_low_power_state != DHD_BUS_NO_LOW_POWER_STATE) { in dhdpcie_bus_ringbell_2_fast()
8885 __FUNCTION__, bus->bus_low_power_state)); in dhdpcie_bus_ringbell_2_fast()
8890 if (bus->is_linkdown) { in dhdpcie_bus_ringbell_2_fast()
8898 W_REG(bus->pcie_mb_intr_osh, bus->pcie_mb_intr_2_addr, value); in dhdpcie_bus_ringbell_2_fast()
8906 if (bus->bus_low_power_state != DHD_BUS_NO_LOW_POWER_STATE) { in dhd_bus_ringbell_oldpcie()
8908 __FUNCTION__, bus->bus_low_power_state)); in dhd_bus_ringbell_oldpcie()
8913 if (bus->is_linkdown) { in dhd_bus_ringbell_oldpcie()
8918 w = (R_REG(bus->pcie_mb_intr_osh, bus->pcie_mb_intr_addr) & ~PCIE_INTB) | PCIE_INTB; in dhd_bus_ringbell_oldpcie()
8919 W_REG(bus->pcie_mb_intr_osh, bus->pcie_mb_intr_addr, w); in dhd_bus_ringbell_oldpcie()
8925 if ((bus->sih->buscorerev == 2) || (bus->sih->buscorerev == 6) || in dhd_bus_get_mbintr_fn()
8926 (bus->sih->buscorerev == 4)) { in dhd_bus_get_mbintr_fn()
8927 bus->pcie_mb_intr_addr = si_corereg_addr(bus->sih, bus->sih->buscoreidx, in dhd_bus_get_mbintr_fn()
8928 bus->pcie_mailbox_int); in dhd_bus_get_mbintr_fn()
8929 if (bus->pcie_mb_intr_addr) { in dhd_bus_get_mbintr_fn()
8930 bus->pcie_mb_intr_osh = si_osh(bus->sih); in dhd_bus_get_mbintr_fn()
8934 bus->pcie_mb_intr_addr = si_corereg_addr(bus->sih, bus->sih->buscoreidx, in dhd_bus_get_mbintr_fn()
8936 if (bus->pcie_mb_intr_addr) { in dhd_bus_get_mbintr_fn()
8937 bus->pcie_mb_intr_osh = si_osh(bus->sih); in dhd_bus_get_mbintr_fn()
8947 bus->pcie_mb_intr_2_addr = si_corereg_addr(bus->sih, bus->sih->buscoreidx, in dhd_bus_get_mbintr_2_fn()
8949 if (bus->pcie_mb_intr_2_addr) { in dhd_bus_get_mbintr_2_fn()
8950 bus->pcie_mb_intr_osh = si_osh(bus->sih); in dhd_bus_get_mbintr_2_fn()
8964 bus->dpc_entry_time = OSL_LOCALTIME_NS(); in dhd_bus_dpc()
8966 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_dpc()
8972 if (bus->dhd->busstate == DHD_BUS_DOWN) { in dhd_bus_dpc()
8974 bus->intstatus = 0; in dhd_bus_dpc()
8975 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_dpc()
8976 bus->dpc_return_busdown_count++; in dhd_bus_dpc()
8980 bus->idlecount = 0; in dhd_bus_dpc()
8982 DHD_BUS_BUSY_SET_IN_DPC(bus->dhd); in dhd_bus_dpc()
8983 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_dpc()
8985 resched = dhdpcie_bus_process_mailbox_intr(bus, bus->intstatus); in dhd_bus_dpc()
8987 bus->intstatus = 0; in dhd_bus_dpc()
8988 bus->dpc_intr_enable_count++; in dhd_bus_dpc()
8994 bus->dpc_exit_time = OSL_LOCALTIME_NS(); in dhd_bus_dpc()
8996 bus->resched_dpc_time = OSL_LOCALTIME_NS(); in dhd_bus_dpc()
8999 bus->dpc_sched = resched; in dhd_bus_dpc()
9001 DHD_GENERAL_LOCK(bus->dhd, flags); in dhd_bus_dpc()
9002 DHD_BUS_BUSY_CLEAR_IN_DPC(bus->dhd); in dhd_bus_dpc()
9003 dhd_os_busbusy_wake(bus->dhd); in dhd_bus_dpc()
9004 DHD_GENERAL_UNLOCK(bus->dhd, flags); in dhd_bus_dpc()
9017 if (bus->is_linkdown) { in dhdpcie_send_mb_data()
9022 if (bus->api.fw_rev >= PCIE_SHARED_VERSION_6 && !bus->use_mailbox) { in dhdpcie_send_mb_data()
9027 if (dhd_prot_h2d_mbdata_send_ctrlmsg(bus->dhd, h2d_mb_data)) { in dhdpcie_send_mb_data()
9059 bus->last_d3_inform_time = OSL_LOCALTIME_NS(); in dhdpcie_send_mb_data()
9060 bus->d3_inform_cnt++; in dhdpcie_send_mb_data()
9064 bus->d0_inform_in_use_cnt++; in dhdpcie_send_mb_data()
9068 bus->d0_inform_cnt++; in dhdpcie_send_mb_data()
9079 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhd_bus_handle_d3_ack()
9080 bus->suspend_intr_disable_count++; in dhd_bus_handle_d3_ack()
9092 if (bus->dhd->dhd_induce_error != DHD_INDUCE_D3_ACK_TIMEOUT) { in dhd_bus_handle_d3_ack()
9094 bus->bus_low_power_state = DHD_BUS_D3_ACK_RECIEVED; in dhd_bus_handle_d3_ack()
9097 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhd_bus_handle_d3_ack()
9101 if (bus->dhd->dhd_induce_error != DHD_INDUCE_D3_ACK_TIMEOUT) { in dhd_bus_handle_d3_ack()
9102 bus->wait_for_d3_ack = 1; in dhd_bus_handle_d3_ack()
9103 dhd_os_d3ack_wake(bus->dhd); in dhd_bus_handle_d3_ack()
9111 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_handle_mb_data()
9123 bus->no_cfg_restore = 1; in dhd_bus_handle_mb_data()
9126 dhd_os_check_hang(bus->dhd, 0, -EREMOTEIO); in dhd_bus_handle_mb_data()
9133 if (bus->bus_low_power_state == DHD_BUS_D3_ACK_RECIEVED) { in dhd_bus_handle_mb_data()
9134 DHD_ERROR(("DS-ENTRY AFTER D3-ACK!!!!! QUITING\n")); in dhd_bus_handle_mb_data()
9136 bus->dhd->busstate = DHD_BUS_DOWN; in dhd_bus_handle_mb_data()
9157 if (!bus->wait_for_d3_ack) { in dhd_bus_handle_mb_data()
9159 if (bus->dhd->req_hang_type == HANG_REASON_D3_ACK_TIMEOUT) { in dhd_bus_handle_mb_data()
9171 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_handle_mb_data()
9182 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_handle_mb_data()
9199 /* not ready yet dhd_os_ind_firmware_stall(bus->dhd); */ in dhdpcie_handle_mb_data()
9215 if (!bus->wait_for_d3_ack) { in dhdpcie_handle_mb_data()
9217 if (bus->dhd->req_hang_type == HANG_REASON_D3_ACK_TIMEOUT) { in dhdpcie_handle_mb_data()
9229 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_handle_mb_data()
9240 if (bus->is_linkdown) { in dhdpcie_read_handle_mb_data()
9245 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_read_handle_mb_data()
9259 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_read_handle_mb_data()
9270 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_bus_process_mailbox_intr()
9273 if ((bus->sih->buscorerev == 2) || (bus->sih->buscorerev == 6) || in dhdpcie_bus_process_mailbox_intr()
9274 (bus->sih->buscorerev == 4)) { in dhdpcie_bus_process_mailbox_intr()
9283 bus->api.handle_mb_data(bus); in dhdpcie_bus_process_mailbox_intr()
9286 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_process_mailbox_intr()
9287 if (bus->bus_low_power_state == DHD_BUS_D3_ACK_RECIEVED) { in dhdpcie_bus_process_mailbox_intr()
9290 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_process_mailbox_intr()
9293 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhdpcie_bus_process_mailbox_intr()
9296 if ((bus->d2h_intr_method == PCIE_MSI) || in dhdpcie_bus_process_mailbox_intr()
9297 ((bus->d2h_intr_method == PCIE_INTX) && (intstatus & bus->d2h_mb_mask))) { in dhdpcie_bus_process_mailbox_intr()
9311 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_bus_process_mailbox_intr()
9324 if ((bus->dhd->busstate == DHD_BUS_DATA) && in dhdpci_bus_rte_log_time_sync_poll()
9325 (bus->dhd->dhd_rte_time_sync_ms != 0) && in dhdpci_bus_rte_log_time_sync_poll()
9326 (bus->bus_low_power_state == DHD_BUS_NO_LOW_POWER_STATE)) { in dhdpci_bus_rte_log_time_sync_poll()
9327 time_elapsed = OSL_SYSUPTIME_US() - bus->dhd_rte_time_sync_count; in dhdpci_bus_rte_log_time_sync_poll()
9329 if ((time_elapsed / 1000) >= bus->dhd->dhd_rte_time_sync_ms) { in dhdpci_bus_rte_log_time_sync_poll()
9334 bus->dhd_rte_time_sync_count += time_elapsed; in dhdpci_bus_rte_log_time_sync_poll()
9337 dhd_h2d_log_time_sync_deferred_wq_schedule(bus->dhd); in dhdpci_bus_rte_log_time_sync_poll()
9350 if ((bus->api.fw_rev >= PCIE_SHARED_VERSION_6) && in dhdpci_bus_read_frames()
9351 (bus->dhd->dongle_trap_data = dhd_prot_process_trapbuf(bus->dhd))) { in dhdpci_bus_read_frames()
9353 if (bus->dhd->axi_error) { in dhdpci_bus_read_frames()
9363 DHD_PERIM_LOCK_ALL((bus->dhd->fwder_unit % FWDER_MAX_UNIT)); in dhdpci_bus_read_frames()
9365 dhd_prot_process_ctrlbuf(bus->dhd); in dhdpci_bus_read_frames()
9366 bus->last_process_ctrlbuf_time = OSL_LOCALTIME_NS(); in dhdpci_bus_read_frames()
9368 DHD_PERIM_UNLOCK_ALL((bus->dhd->fwder_unit % FWDER_MAX_UNIT)); in dhdpci_bus_read_frames()
9371 DHD_BUS_LOCK(bus->bus_lock, flags_bus); in dhdpci_bus_read_frames()
9372 if (bus->bus_low_power_state != DHD_BUS_NO_LOW_POWER_STATE) { in dhdpci_bus_read_frames()
9375 __FUNCTION__, bus->bus_low_power_state)); in dhdpci_bus_read_frames()
9376 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhdpci_bus_read_frames()
9379 DHD_BUS_UNLOCK(bus->bus_lock, flags_bus); in dhdpci_bus_read_frames()
9381 DHD_PERIM_LOCK_ALL((bus->dhd->fwder_unit % FWDER_MAX_UNIT)); in dhdpci_bus_read_frames()
9383 dhd_update_txflowrings(bus->dhd); in dhdpci_bus_read_frames()
9384 bus->last_process_flowring_time = OSL_LOCALTIME_NS(); in dhdpci_bus_read_frames()
9390 more |= dhd_prot_process_msgbuf_txcpl(bus->dhd, dhd_txbound, DHD_HP2P_RING); in dhdpci_bus_read_frames()
9392 more |= dhd_prot_process_msgbuf_txcpl(bus->dhd, dhd_txbound, DHD_REGULAR_RING); in dhdpci_bus_read_frames()
9393 bus->last_process_txcpl_time = OSL_LOCALTIME_NS(); in dhdpci_bus_read_frames()
9399 more |= dhd_prot_process_msgbuf_rxcpl(bus->dhd, dhd_rxbound, DHD_HP2P_RING); in dhdpci_bus_read_frames()
9401 more |= dhd_prot_process_msgbuf_rxcpl(bus->dhd, dhd_rxbound, DHD_REGULAR_RING); in dhdpci_bus_read_frames()
9402 bus->last_process_rxcpl_time = OSL_LOCALTIME_NS(); in dhdpci_bus_read_frames()
9406 if (!bus->dhd->dongle_edl_support) in dhdpci_bus_read_frames()
9409 more |= dhd_prot_process_msgbuf_infocpl(bus->dhd, DHD_INFORING_BOUND); in dhdpci_bus_read_frames()
9410 bus->last_process_infocpl_time = OSL_LOCALTIME_NS(); in dhdpci_bus_read_frames()
9414 more |= dhd_prot_process_msgbuf_edl(bus->dhd); in dhdpci_bus_read_frames()
9415 bus->last_process_edl_time = OSL_LOCALTIME_NS(); in dhdpci_bus_read_frames()
9420 if (bus->enable_idle_flowring_mgmt) { in dhdpci_bus_read_frames()
9427 if (bus->dhd->hang_was_sent) { in dhdpci_bus_read_frames()
9430 DHD_PERIM_UNLOCK_ALL((bus->dhd->fwder_unit % FWDER_MAX_UNIT)); in dhdpci_bus_read_frames()
9433 if (bus->read_shm_fail) { in dhdpci_bus_read_frames()
9435 int intstatus = si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpci_bus_read_frames()
9436 bus->pcie_mailbox_int, 0, 0); in dhdpci_bus_read_frames()
9437 if (intstatus != (uint32)-1) { in dhdpci_bus_read_frames()
9440 if (bus->dhd->memdump_enabled) { in dhdpci_bus_read_frames()
9441 DHD_OS_WAKE_LOCK(bus->dhd); in dhdpci_bus_read_frames()
9442 bus->dhd->memdump_type = DUMP_TYPE_READ_SHM_FAIL; in dhdpci_bus_read_frames()
9443 dhd_bus_mem_dump(bus->dhd); in dhdpci_bus_read_frames()
9444 DHD_OS_WAKE_UNLOCK(bus->dhd); in dhdpci_bus_read_frames()
9450 bus->no_cfg_restore = 1; in dhdpci_bus_read_frames()
9452 bus->is_linkdown = 1; in dhdpci_bus_read_frames()
9455 dhd_prot_debug_info_print(bus->dhd); in dhdpci_bus_read_frames()
9456 bus->dhd->hang_reason = HANG_REASON_PCIE_LINK_DOWN_EP_DETECT; in dhdpci_bus_read_frames()
9457 dhd_os_send_hang_message(bus->dhd); in dhdpci_bus_read_frames()
9475 shaddr = bus->dongle_ram_base + bus->ramsize - 4; in dhdpcie_tcm_valid()
9480 if ((addr == 0) || (addr == bus->nvram_csm) || (addr < bus->dongle_ram_base) || in dhdpcie_tcm_valid()
9495 if (sh.console_addr != bus->pcie_sh->console_addr) { in dhdpcie_tcm_valid()
9538 pciedev_shared_t *sh = bus->pcie_sh; in dhdpcie_readshared_console()
9541 shaddr = bus->dongle_ram_base + bus->ramsize - 4; in dhdpcie_readshared_console()
9545 while (((addr == 0) || (addr == bus->nvram_csm)) && !dhd_timeout_expired(&tmo)) { in dhdpcie_readshared_console()
9550 if ((addr == 0) || (addr == bus->nvram_csm) || (addr < bus->dongle_ram_base) || in dhdpcie_readshared_console()
9557 bus->shared_addr = (ulong)addr; in dhdpcie_readshared_console()
9570 sh->console_addr = ltoh32(sh->console_addr); in dhdpcie_readshared_console()
9572 bus->console_addr = sh->console_addr; in dhdpcie_readshared_console()
9583 pciedev_shared_t *sh = bus->pcie_sh; in dhdpcie_readshared()
9587 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_readshared()
9591 shaddr = bus->dongle_ram_base + bus->ramsize - 4; in dhdpcie_readshared()
9595 while (((addr == 0) || (addr == bus->nvram_csm)) && !dhd_timeout_expired(&tmo)) { in dhdpcie_readshared()
9600 if (addr == (uint32)-1) { in dhdpcie_readshared()
9604 bus->no_cfg_restore = 1; in dhdpcie_readshared()
9607 bus->is_linkdown = 1; in dhdpcie_readshared()
9611 if ((addr == 0) || (addr == bus->nvram_csm) || (addr < bus->dongle_ram_base) || in dhdpcie_readshared()
9617 if (addr != (uint32)-1) { /* skip further PCIE reads if read this addr */ in dhdpcie_readshared()
9619 bus->dhd->memdump_enabled = DUMP_MEMFILE_BUGON; in dhdpcie_readshared()
9621 if (bus->dhd->memdump_enabled) { in dhdpcie_readshared()
9622 bus->dhd->memdump_type = DUMP_TYPE_DONGLE_INIT_FAILURE; in dhdpcie_readshared()
9629 bus->shared_addr = (ulong)addr; in dhdpcie_readshared()
9642 sh->flags = ltoh32(sh->flags); in dhdpcie_readshared()
9643 sh->trap_addr = ltoh32(sh->trap_addr); in dhdpcie_readshared()
9644 sh->assert_exp_addr = ltoh32(sh->assert_exp_addr); in dhdpcie_readshared()
9645 sh->assert_file_addr = ltoh32(sh->assert_file_addr); in dhdpcie_readshared()
9646 sh->assert_line = ltoh32(sh->assert_line); in dhdpcie_readshared()
9647 sh->console_addr = ltoh32(sh->console_addr); in dhdpcie_readshared()
9648 sh->msgtrace_addr = ltoh32(sh->msgtrace_addr); in dhdpcie_readshared()
9649 sh->dma_rxoffset = ltoh32(sh->dma_rxoffset); in dhdpcie_readshared()
9650 sh->rings_info_ptr = ltoh32(sh->rings_info_ptr); in dhdpcie_readshared()
9651 sh->flags2 = ltoh32(sh->flags2); in dhdpcie_readshared()
9654 bus->console_addr = sh->console_addr; in dhdpcie_readshared()
9657 bus->dma_rxoffset = bus->pcie_sh->dma_rxoffset; in dhdpcie_readshared()
9658 dhd_prot_rx_dataoffset(bus->dhd, bus->dma_rxoffset); in dhdpcie_readshared()
9660 DHD_INFO(("DMA RX offset from shared Area %d\n", bus->dma_rxoffset)); in dhdpcie_readshared()
9662 bus->api.fw_rev = sh->flags & PCIE_SHARED_VERSION_MASK; in dhdpcie_readshared()
9663 if (!(dhdpcie_check_firmware_compatible(bus->api.fw_rev, PCIE_SHARED_VERSION))) in dhdpcie_readshared()
9668 bus->api.fw_rev)); in dhdpcie_readshared()
9671 dhdpcie_update_bus_api_revisions(bus->api.fw_rev, PCIE_SHARED_VERSION); in dhdpcie_readshared()
9673 bus->rw_index_sz = (sh->flags & PCIE_SHARED_2BYTE_INDICES) ? in dhdpcie_readshared()
9676 __FUNCTION__, bus->rw_index_sz)); in dhdpcie_readshared()
9679 if (sh->flags & PCIE_SHARED_IDLE_FLOW_RING) { in dhdpcie_readshared()
9682 bus->enable_idle_flowring_mgmt = TRUE; in dhdpcie_readshared()
9687 if (bus->sih->buscorerev == 23) { in dhdpcie_readshared()
9693 /* TODO: This need to be selected based on IPC instead of compile time */ in dhdpcie_readshared()
9694 bus->dhd->hwa_enable = TRUE; in dhdpcie_readshared()
9697 bus->dhd->idma_enable = (sh->flags & PCIE_SHARED_IDMA) ? TRUE : FALSE; in dhdpcie_readshared()
9698 bus->dhd->ifrm_enable = (sh->flags & PCIE_SHARED_IFRM) ? TRUE : FALSE; in dhdpcie_readshared()
9701 bus->dhd->d2h_sync_mode = sh->flags & PCIE_SHARED_D2H_SYNC_MODE_MASK; in dhdpcie_readshared()
9703 bus->dhd->dar_enable = (sh->flags & PCIE_SHARED_DAR) ? TRUE : FALSE; in dhdpcie_readshared()
9706 if (sh->flags & PCIE_SHARED_DMA_INDEX) { in dhdpcie_readshared()
9707 if (!bus->dhd->dma_ring_upd_overwrite) { in dhdpcie_readshared()
9709 if (!IFRM_ENAB(bus->dhd)) { in dhdpcie_readshared()
9710 bus->dhd->dma_h2d_ring_upd_support = TRUE; in dhdpcie_readshared()
9712 bus->dhd->dma_d2h_ring_upd_support = TRUE; in dhdpcie_readshared()
9716 if (bus->dhd->dma_d2h_ring_upd_support) in dhdpcie_readshared()
9717 bus->dhd->d2h_sync_mode = 0; in dhdpcie_readshared()
9719 DHD_INFO(("%s: Host support DMAing indices: H2D:%d - D2H:%d. FW supports it\n", in dhdpcie_readshared()
9721 (bus->dhd->dma_h2d_ring_upd_support ? 1 : 0), in dhdpcie_readshared()
9722 (bus->dhd->dma_d2h_ring_upd_support ? 1 : 0))); in dhdpcie_readshared()
9723 } else if (!(sh->flags & PCIE_SHARED_D2H_SYNC_MODE_MASK)) { in dhdpcie_readshared()
9728 bus->dhd->dma_h2d_ring_upd_support = FALSE; in dhdpcie_readshared()
9729 bus->dhd->dma_d2h_ring_upd_support = FALSE; in dhdpcie_readshared()
9733 if (sh->flags2 & PCIE_SHARED2_FAST_DELETE_RING) { in dhdpcie_readshared()
9736 bus->dhd->fast_delete_ring_support = TRUE; in dhdpcie_readshared()
9740 bus->dhd->fast_delete_ring_support = FALSE; in dhdpcie_readshared()
9748 if (sh->rings_info_ptr > shaddr) { in dhdpcie_readshared()
9750 __FUNCTION__, sh->rings_info_ptr)); in dhdpcie_readshared()
9754 if ((rv = dhdpcie_bus_membytes(bus, FALSE, sh->rings_info_ptr, in dhdpcie_readshared()
9758 bus->h2d_mb_data_ptr_addr = ltoh32(sh->h2d_mb_data_ptr); in dhdpcie_readshared()
9759 bus->d2h_mb_data_ptr_addr = ltoh32(sh->d2h_mb_data_ptr); in dhdpcie_readshared()
9761 if (bus->api.fw_rev >= PCIE_SHARED_VERSION_6) { in dhdpcie_readshared()
9762 bus->max_tx_flowrings = ltoh16(ring_info.max_tx_flowrings); in dhdpcie_readshared()
9763 bus->max_submission_rings = ltoh16(ring_info.max_submission_queues); in dhdpcie_readshared()
9764 bus->max_completion_rings = ltoh16(ring_info.max_completion_rings); in dhdpcie_readshared()
9765 bus->max_cmn_rings = bus->max_submission_rings - bus->max_tx_flowrings; in dhdpcie_readshared()
9766 bus->api.handle_mb_data = dhdpcie_read_handle_mb_data; in dhdpcie_readshared()
9767 bus->use_mailbox = sh->flags & PCIE_SHARED_USE_MAILBOX; in dhdpcie_readshared()
9770 bus->max_tx_flowrings = ltoh16(ring_info.max_tx_flowrings); in dhdpcie_readshared()
9771 bus->max_submission_rings = bus->max_tx_flowrings; in dhdpcie_readshared()
9772 bus->max_completion_rings = BCMPCIE_D2H_COMMON_MSGRINGS; in dhdpcie_readshared()
9773 bus->max_cmn_rings = BCMPCIE_H2D_COMMON_MSGRINGS; in dhdpcie_readshared()
9774 bus->api.handle_mb_data = dhdpcie_handle_mb_data; in dhdpcie_readshared()
9775 bus->use_mailbox = TRUE; in dhdpcie_readshared()
9777 if (bus->max_completion_rings == 0) { in dhdpcie_readshared()
9779 bus->max_completion_rings)); in dhdpcie_readshared()
9782 if (bus->max_submission_rings == 0) { in dhdpcie_readshared()
9784 bus->max_submission_rings)); in dhdpcie_readshared()
9787 if (bus->max_tx_flowrings == 0) { in dhdpcie_readshared()
9788 DHD_ERROR(("dongle txflow rings are invalid %d\n", bus->max_tx_flowrings)); in dhdpcie_readshared()
9795 if (bus->dhd->dma_h2d_ring_upd_support || IDMA_ENAB(bus->dhd)) { in dhdpcie_readshared()
9796 dma_indx_wr_buf = dhd_prot_dma_indx_init(bus->dhd, bus->rw_index_sz, in dhdpcie_readshared()
9797 H2D_DMA_INDX_WR_BUF, bus->max_submission_rings); in dhdpcie_readshared()
9798 dma_indx_rd_buf = dhd_prot_dma_indx_init(bus->dhd, bus->rw_index_sz, in dhdpcie_readshared()
9799 D2H_DMA_INDX_RD_BUF, bus->max_completion_rings); in dhdpcie_readshared()
9805 bus->dhd->dma_h2d_ring_upd_support = FALSE; in dhdpcie_readshared()
9806 bus->dhd->idma_enable = FALSE; in dhdpcie_readshared()
9810 if (bus->dhd->dma_d2h_ring_upd_support) { in dhdpcie_readshared()
9811 dma_indx_wr_buf = dhd_prot_dma_indx_init(bus->dhd, bus->rw_index_sz, in dhdpcie_readshared()
9812 D2H_DMA_INDX_WR_BUF, bus->max_completion_rings); in dhdpcie_readshared()
9813 dma_indx_rd_buf = dhd_prot_dma_indx_init(bus->dhd, bus->rw_index_sz, in dhdpcie_readshared()
9814 H2D_DMA_INDX_RD_BUF, bus->max_submission_rings); in dhdpcie_readshared()
9820 bus->dhd->dma_d2h_ring_upd_support = FALSE; in dhdpcie_readshared()
9824 if (IFRM_ENAB(bus->dhd)) { in dhdpcie_readshared()
9825 dma_indx_wr_buf = dhd_prot_dma_indx_init(bus->dhd, bus->rw_index_sz, in dhdpcie_readshared()
9826 H2D_IFRM_INDX_WR_BUF, bus->max_tx_flowrings); in dhdpcie_readshared()
9831 bus->dhd->ifrm_enable = FALSE; in dhdpcie_readshared()
9847 __FUNCTION__, bus->h2d_mb_data_ptr_addr)); in dhdpcie_readshared()
9849 __FUNCTION__, bus->d2h_mb_data_ptr_addr)); in dhdpcie_readshared()
9853 __FUNCTION__, bus->dhd->d2h_sync_mode)); in dhdpcie_readshared()
9855 bus->dhd->d2h_hostrdy_supported = in dhdpcie_readshared()
9856 ((sh->flags & PCIE_SHARED_HOSTRDY_SUPPORT) == PCIE_SHARED_HOSTRDY_SUPPORT); in dhdpcie_readshared()
9858 bus->dhd->ext_trap_data_supported = in dhdpcie_readshared()
9859 ((sh->flags2 & PCIE_SHARED2_EXTENDED_TRAP_DATA) == PCIE_SHARED2_EXTENDED_TRAP_DATA); in dhdpcie_readshared()
9861 if ((sh->flags2 & PCIE_SHARED2_TXSTATUS_METADATA) == 0) in dhdpcie_readshared()
9862 bus->dhd->pcie_txs_metadata_enable = 0; in dhdpcie_readshared()
9864 bus->dhd->hscb_enable = in dhdpcie_readshared()
9865 (sh->flags2 & PCIE_SHARED2_HSCB) == PCIE_SHARED2_HSCB; in dhdpcie_readshared()
9869 bus->dhd->dongle_edl_support = (sh->flags2 & PCIE_SHARED2_EDL_RING) ? TRUE : FALSE; in dhdpcie_readshared()
9870 DHD_ERROR(("Dongle EDL support: %u\n", bus->dhd->dongle_edl_support)); in dhdpcie_readshared()
9874 bus->dhd->debug_buf_dest_support = in dhdpcie_readshared()
9875 (sh->flags2 & PCIE_SHARED2_DEBUG_BUF_DEST) ? TRUE : FALSE; in dhdpcie_readshared()
9877 bus->dhd->debug_buf_dest_support ? "Y" : "N")); in dhdpcie_readshared()
9880 if (bus->dhd->hp2p_enable) { in dhdpcie_readshared()
9881 bus->dhd->hp2p_ts_capable = in dhdpcie_readshared()
9882 (sh->flags2 & PCIE_SHARED2_PKT_TIMESTAMP) == PCIE_SHARED2_PKT_TIMESTAMP; in dhdpcie_readshared()
9883 bus->dhd->hp2p_capable = in dhdpcie_readshared()
9884 (sh->flags2 & PCIE_SHARED2_HP2P) == PCIE_SHARED2_HP2P; in dhdpcie_readshared()
9885 bus->dhd->hp2p_capable &= bus->dhd->hp2p_ts_capable; in dhdpcie_readshared()
9888 bus->dhd->hp2p_capable ? "Y" : "N")); in dhdpcie_readshared()
9890 if (bus->dhd->hp2p_capable) { in dhdpcie_readshared()
9891 bus->dhd->pkt_thresh = HP2P_PKT_THRESH; in dhdpcie_readshared()
9892 bus->dhd->pkt_expiry = HP2P_PKT_EXPIRY; in dhdpcie_readshared()
9893 bus->dhd->time_thresh = HP2P_TIME_THRESH; in dhdpcie_readshared()
9895 hp2p_info_t *hp2p_info = &bus->dhd->hp2p_info[addr]; in dhdpcie_readshared()
9897 hp2p_info->hrtimer_init = FALSE; in dhdpcie_readshared()
9898 hp2p_info->timer.function = &dhd_hp2p_write; in dhdpcie_readshared()
9900 tasklet_hrtimer_init(&hp2p_info->timer, in dhdpcie_readshared()
9903 hrtimer_init(&hp2p_info->timer, CLOCK_MONOTONIC, in dhdpcie_readshared()
9912 bus->dhd->db0ts_capable = in dhdpcie_readshared()
9913 (sh->flags & PCIE_SHARED_TIMESTAMP_DB0) == PCIE_SHARED_TIMESTAMP_DB0; in dhdpcie_readshared()
9916 if (MULTIBP_ENAB(bus->sih)) { in dhdpcie_readshared()
9921 * De-assert WL domain in DAR in dhdpcie_readshared()
9923 if (bus->sih->buscorerev >= 68) { in dhdpcie_readshared()
9938 uint16 max_tx_flowrings = bus->max_tx_flowrings; in dhd_fillup_ring_sharedptr_info()
9951 tcm_memloc = ltoh32(ring_info->ringmem_ptr); in dhd_fillup_ring_sharedptr_info()
9955 bus->ring_sh[i].ring_mem_addr = tcm_memloc; in dhd_fillup_ring_sharedptr_info()
9959 i, bus->ring_sh[i].ring_mem_addr)); in dhd_fillup_ring_sharedptr_info()
9965 d2h_w_idx_ptr = ltoh32(ring_info->d2h_w_idx_ptr); in dhd_fillup_ring_sharedptr_info()
9966 d2h_r_idx_ptr = ltoh32(ring_info->d2h_r_idx_ptr); in dhd_fillup_ring_sharedptr_info()
9967 h2d_w_idx_ptr = ltoh32(ring_info->h2d_w_idx_ptr); in dhd_fillup_ring_sharedptr_info()
9968 h2d_r_idx_ptr = ltoh32(ring_info->h2d_r_idx_ptr); in dhd_fillup_ring_sharedptr_info()
9972 bus->ring_sh[i].ring_state_w = h2d_w_idx_ptr; in dhd_fillup_ring_sharedptr_info()
9973 bus->ring_sh[i].ring_state_r = h2d_r_idx_ptr; in dhd_fillup_ring_sharedptr_info()
9976 h2d_w_idx_ptr = h2d_w_idx_ptr + bus->rw_index_sz; in dhd_fillup_ring_sharedptr_info()
9977 h2d_r_idx_ptr = h2d_r_idx_ptr + bus->rw_index_sz; in dhd_fillup_ring_sharedptr_info()
9980 bus->ring_sh[i].ring_state_w, bus->ring_sh[i].ring_state_r)); in dhd_fillup_ring_sharedptr_info()
9985 bus->ring_sh[i].ring_state_w = d2h_w_idx_ptr; in dhd_fillup_ring_sharedptr_info()
9986 bus->ring_sh[i].ring_state_r = d2h_r_idx_ptr; in dhd_fillup_ring_sharedptr_info()
9989 d2h_w_idx_ptr = d2h_w_idx_ptr + bus->rw_index_sz; in dhd_fillup_ring_sharedptr_info()
9990 d2h_r_idx_ptr = d2h_r_idx_ptr + bus->rw_index_sz; in dhd_fillup_ring_sharedptr_info()
9993 bus->ring_sh[i].ring_state_w, bus->ring_sh[i].ring_state_r)); in dhd_fillup_ring_sharedptr_info()
9997 if (bus->api.fw_rev < PCIE_SHARED_VERSION_6) { in dhd_fillup_ring_sharedptr_info()
9998 max_tx_flowrings -= BCMPCIE_H2D_COMMON_MSGRINGS; in dhd_fillup_ring_sharedptr_info()
10005 bus->ring_sh[i].ring_state_w = h2d_w_idx_ptr; in dhd_fillup_ring_sharedptr_info()
10006 bus->ring_sh[i].ring_state_r = h2d_r_idx_ptr; in dhd_fillup_ring_sharedptr_info()
10009 h2d_w_idx_ptr = h2d_w_idx_ptr + bus->rw_index_sz; in dhd_fillup_ring_sharedptr_info()
10010 h2d_r_idx_ptr = h2d_r_idx_ptr + bus->rw_index_sz; in dhd_fillup_ring_sharedptr_info()
10013 bus->ring_sh[i].ring_state_w, in dhd_fillup_ring_sharedptr_info()
10014 bus->ring_sh[i].ring_state_r)); in dhd_fillup_ring_sharedptr_info()
10017 bus->ring_sh[i].ring_state_w = d2h_w_idx_ptr; in dhd_fillup_ring_sharedptr_info()
10018 bus->ring_sh[i].ring_state_r = d2h_r_idx_ptr; in dhd_fillup_ring_sharedptr_info()
10019 d2h_w_idx_ptr = d2h_w_idx_ptr + bus->rw_index_sz; in dhd_fillup_ring_sharedptr_info()
10020 d2h_r_idx_ptr = d2h_r_idx_ptr + bus->rw_index_sz; in dhd_fillup_ring_sharedptr_info()
10022 bus->ring_sh[i].ring_state_w, bus->ring_sh[i].ring_state_r)); in dhd_fillup_ring_sharedptr_info()
10032 dhd_bus_t *bus = dhdp->bus; in dhd_bus_init()
10037 ASSERT(bus->dhd); in dhd_bus_init()
10038 if (!bus->dhd) in dhd_bus_init()
10041 if (PCIE_RELOAD_WAR_ENAB(bus->sih->buscorerev)) { in dhd_bus_init()
10045 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_init()
10053 bus->reg = si_setcore(bus->sih, PCIE2_CORE_ID, 0); in dhd_bus_init()
10054 ASSERT(bus->reg != NULL); in dhd_bus_init()
10074 bus->reg = si_setcore(bus->sih, PCIE2_CORE_ID, 0); in dhd_bus_init()
10075 ASSERT(bus->reg != NULL); in dhd_bus_init()
10082 dhdp->busstate = DHD_BUS_DATA; in dhd_bus_init()
10083 bus->bus_low_power_state = DHD_BUS_NO_LOW_POWER_STATE; in dhd_bus_init()
10084 dhdp->dhd_bus_busy_state = 0; in dhd_bus_init()
10092 dhd_dpc_enable(bus->dhd); in dhd_bus_init()
10096 bus->intr_enabled = TRUE; in dhd_bus_init()
10098 /* bcmsdh_intr_unmask(bus->sdh); */ in dhd_bus_init()
10100 bus->idlecount = 0; in dhd_bus_init()
10101 bus->idletime = (int32)MAX_IDLE_COUNT; in dhd_bus_init()
10102 init_waitqueue_head(&bus->rpm_queue); in dhd_bus_init()
10103 mutex_init(&bus->pm_lock); in dhd_bus_init()
10105 bus->idletime = 0; in dhd_bus_init()
10109 if (bus->api.fw_rev < PCIE_SHARED_VERSION_6) { in dhd_bus_init()
10110 bus->use_d0_inform = TRUE; in dhd_bus_init()
10112 bus->use_d0_inform = FALSE; in dhd_bus_init()
10116 if (MULTIBP_ENAB(bus->sih)) { in dhd_bus_init()
10128 addr = bus->dongle_ram_base + bus->ramsize - 4; in dhdpcie_init_shared_addr()
10130 dhdpcie_runtime_bus_wake(bus->dhd, TRUE, __builtin_return_address(0)); in dhdpcie_init_shared_addr()
10145 return (-ENODEV); in dhdpcie_chipmatch()
10303 return (-ENODEV); in dhdpcie_chipmatch()
10332 cur_coreid = si_coreid(bus->sih); in dhdpcie_sromotp_customvar()
10334 chipcregs = (chipcregs_t *)si_setcore(bus->sih, CC_CORE_ID, 0); in dhdpcie_sromotp_customvar()
10336 chipc_corerev = si_corerev(bus->sih); in dhdpcie_sromotp_customvar()
10343 if (((uint16)bus->sih->chip != BCM4350_CHIP_ID) && !BCM4345_CHIP((uint16)bus->sih->chip) && in dhdpcie_sromotp_customvar()
10344 ((uint16)bus->sih->chip != BCM4355_CHIP_ID) && in dhdpcie_sromotp_customvar()
10345 ((uint16)bus->sih->chip != BCM4359_CHIP_ID) && in dhdpcie_sromotp_customvar()
10346 ((uint16)bus->sih->chip != BCM4349_CHIP_ID)) { in dhdpcie_sromotp_customvar()
10352 if (chipcregs->sromcontrol & SRC_PRESENT) { in dhdpcie_sromotp_customvar()
10354 sprom_size = (1 << (2 * ((chipcregs->sromcontrol & SRC_SIZE_MASK) in dhdpcie_sromotp_customvar()
10358 if (chipcregs->sromcontrol & SRC_OTPPRESENT) { in dhdpcie_sromotp_customvar()
10360 if (((chipcregs->otplayout & OTPL_WRAP_TYPE_MASK) >> OTPL_WRAP_TYPE_SHIFT) in dhdpcie_sromotp_customvar()
10367 otp_size = (((chipcregs->otplayout & OTPL_ROW_SIZE_MASK) in dhdpcie_sromotp_customvar()
10371 otp_size = (((chipcregs->capabilities & CC_CAP_OTPSIZE) in dhdpcie_sromotp_customvar()
10381 otp_size = otp_size_65nm[(chipcregs->otplayout & in dhdpcie_sromotp_customvar()
10385 otp_size = otp_size_65nm[(chipcregs->capabilities & in dhdpcie_sromotp_customvar()
10397 if (((chipcregs->sromcontrol & SRC_PRESENT) == 0) && in dhdpcie_sromotp_customvar()
10398 ((chipcregs->otplayout & OTPL_ROW_SIZE_MASK) == 0)) { in dhdpcie_sromotp_customvar()
10401 __FUNCTION__, chipcregs->sromcontrol, in dhdpcie_sromotp_customvar()
10402 chipcregs->otplayout)); in dhdpcie_sromotp_customvar()
10406 if (((chipcregs->sromcontrol & SRC_PRESENT) == 0) && in dhdpcie_sromotp_customvar()
10407 ((chipcregs->capabilities & CC_CAP_OTPSIZE) == 0)) { in dhdpcie_sromotp_customvar()
10410 __FUNCTION__, chipcregs->sromcontrol, in dhdpcie_sromotp_customvar()
10411 chipcregs->capabilities)); in dhdpcie_sromotp_customvar()
10416 if ((!(chipcregs->sromcontrol & SRC_PRESENT) || (chipcregs->sromcontrol & SRC_OTPSEL)) && in dhdpcie_sromotp_customvar()
10417 (chipcregs->sromcontrol & SRC_OTPPRESENT)) { in dhdpcie_sromotp_customvar()
10421 } else if (((chipcregs->sromcontrol & SRC_OTPSEL) == 0) && in dhdpcie_sromotp_customvar()
10422 (chipcregs->sromcontrol & SRC_PRESENT)) { in dhdpcie_sromotp_customvar()
10425 /* If SPROM > 8K only 8Kbits is mapped to ChipCommon (0x800 - 0xBFF) */ in dhdpcie_sromotp_customvar()
10433 if (bus->regs == NULL) { in dhdpcie_sromotp_customvar()
10444 nvm_shadow = (volatile uint16 *)si_setcore(bus->sih, GCI_CORE_ID, 0); in dhdpcie_sromotp_customvar()
10447 nvm_shadow = chipcregs->sromotp; in dhdpcie_sromotp_customvar()
10461 * dump_size & dump_offset in 16-bit words in dhdpcie_sromotp_customvar()
10513 si_setcore(bus->sih, cur_coreid, 0); in dhdpcie_sromotp_customvar()
10542 cur_coreid = si_coreid(bus->sih); in dhdpcie_cc_nvmshadow()
10544 chipcregs = (chipcregs_t *)si_setcore(bus->sih, CC_CORE_ID, 0); in dhdpcie_cc_nvmshadow()
10547 chipc_corerev = si_corerev(bus->sih); in dhdpcie_cc_nvmshadow()
10556 if (((uint16)bus->sih->chip != BCM4350_CHIP_ID) && !BCM4345_CHIP((uint16)bus->sih->chip) && in dhdpcie_cc_nvmshadow()
10557 ((uint16)bus->sih->chip != BCM4355_CHIP_ID) && in dhdpcie_cc_nvmshadow()
10558 ((uint16)bus->sih->chip != BCM4364_CHIP_ID)) { in dhdpcie_cc_nvmshadow()
10565 if (chipcregs->sromcontrol & SRC_PRESENT) { in dhdpcie_cc_nvmshadow()
10567 sprom_size = (1 << (2 * ((chipcregs->sromcontrol & SRC_SIZE_MASK) in dhdpcie_cc_nvmshadow()
10572 if (chipcregs->sromcontrol & SRC_OTPPRESENT) { in dhdpcie_cc_nvmshadow()
10575 if (((chipcregs->otplayout & OTPL_WRAP_TYPE_MASK) >> OTPL_WRAP_TYPE_SHIFT) in dhdpcie_cc_nvmshadow()
10582 otp_size = (((chipcregs->otplayout & OTPL_ROW_SIZE_MASK) in dhdpcie_cc_nvmshadow()
10586 otp_size = (((chipcregs->capabilities & CC_CAP_OTPSIZE) in dhdpcie_cc_nvmshadow()
10596 otp_size = otp_size_65nm[(chipcregs->otplayout & OTPL_ROW_SIZE_MASK) in dhdpcie_cc_nvmshadow()
10600 otp_size = otp_size_65nm[(chipcregs->capabilities & CC_CAP_OTPSIZE) in dhdpcie_cc_nvmshadow()
10613 if (((chipcregs->sromcontrol & SRC_PRESENT) == 0) && in dhdpcie_cc_nvmshadow()
10614 ((chipcregs->otplayout & OTPL_ROW_SIZE_MASK) == 0)) { in dhdpcie_cc_nvmshadow()
10617 __FUNCTION__, chipcregs->sromcontrol, chipcregs->otplayout)); in dhdpcie_cc_nvmshadow()
10621 if (((chipcregs->sromcontrol & SRC_PRESENT) == 0) && in dhdpcie_cc_nvmshadow()
10622 ((chipcregs->capabilities & CC_CAP_OTPSIZE) == 0)) { in dhdpcie_cc_nvmshadow()
10625 __FUNCTION__, chipcregs->sromcontrol, chipcregs->capabilities)); in dhdpcie_cc_nvmshadow()
10631 if ((!(chipcregs->sromcontrol & SRC_PRESENT) || (chipcregs->sromcontrol & SRC_OTPSEL)) && in dhdpcie_cc_nvmshadow()
10632 (chipcregs->sromcontrol & SRC_OTPPRESENT)) { in dhdpcie_cc_nvmshadow()
10639 } else if (((chipcregs->sromcontrol & SRC_OTPSEL) == 0) && in dhdpcie_cc_nvmshadow()
10640 (chipcregs->sromcontrol & SRC_PRESENT)) { in dhdpcie_cc_nvmshadow()
10645 /* If SPROM > 8K only 8Kbits is mapped to ChipCommon (0x800 - 0xBFF) */ in dhdpcie_cc_nvmshadow()
10654 if (bus->regs == NULL) { in dhdpcie_cc_nvmshadow()
10667 nvm_shadow = (volatile uint16 *)si_setcore(bus->sih, GCI_CORE_ID, 0); in dhdpcie_cc_nvmshadow()
10670 nvm_shadow = chipcregs->sromotp; in dhdpcie_cc_nvmshadow()
10680 * dump_size & dump_offset in 16-bit words in dhdpcie_cc_nvmshadow()
10693 si_setcore(bus->sih, cur_coreid, 0); in dhdpcie_cc_nvmshadow()
10706 queue = &flow_ring_node->queue; in dhd_bus_clean_flow_ring()
10712 dhd_tcpack_info_tbl_clean(bus->dhd); in dhd_bus_clean_flow_ring()
10716 if (flow_ring_node->hp2p_ring) { in dhd_bus_clean_flow_ring()
10717 bus->dhd->hp2p_ring_active = FALSE; in dhd_bus_clean_flow_ring()
10718 flow_ring_node->hp2p_ring = FALSE; in dhd_bus_clean_flow_ring()
10723 DHD_FLOWRING_LOCK(flow_ring_node->lock, flags); in dhd_bus_clean_flow_ring()
10726 while ((pkt = dhd_flow_queue_dequeue(bus->dhd, queue)) != NULL) { in dhd_bus_clean_flow_ring()
10727 PKTFREE(bus->dhd->osh, pkt, TRUE); in dhd_bus_clean_flow_ring()
10732 dhd_flow_queue_reinit(bus->dhd, queue, FLOW_RING_QUEUE_THRESHOLD); in dhd_bus_clean_flow_ring()
10733 flow_ring_node->status = FLOW_RING_STATUS_CLOSED; in dhd_bus_clean_flow_ring()
10734 flow_ring_node->active = FALSE; in dhd_bus_clean_flow_ring()
10736 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_clean_flow_ring()
10739 DHD_FLOWRING_LIST_LOCK(bus->dhd->flowring_list_lock, flags); in dhd_bus_clean_flow_ring()
10740 dll_delete(&flow_ring_node->list); in dhd_bus_clean_flow_ring()
10741 DHD_FLOWRING_LIST_UNLOCK(bus->dhd->flowring_list_lock, flags); in dhd_bus_clean_flow_ring()
10744 dhd_prot_flowrings_pool_release(bus->dhd, in dhd_bus_clean_flow_ring()
10745 flow_ring_node->flowid, flow_ring_node->prot_info); in dhd_bus_clean_flow_ring()
10748 dhd_flowid_free(bus->dhd, flow_ring_node->flow_info.ifindex, in dhd_bus_clean_flow_ring()
10749 flow_ring_node->flowid); in dhd_bus_clean_flow_ring()
10764 if (dhd_prot_flow_ring_create(bus->dhd, flow_ring_node) != BCME_OK) in dhd_bus_flow_ring_create_request()
10780 if (flowid >= bus->dhd->num_flow_rings) { in dhd_bus_flow_ring_create_response()
10782 flowid, bus->dhd->num_flow_rings)); in dhd_bus_flow_ring_create_response()
10786 flow_ring_node = DHD_FLOW_RING(bus->dhd, flowid); in dhd_bus_flow_ring_create_response()
10792 ASSERT(flow_ring_node->flowid == flowid); in dhd_bus_flow_ring_create_response()
10793 if (flow_ring_node->flowid != flowid) { in dhd_bus_flow_ring_create_response()
10796 flow_ring_node->flowid)); in dhd_bus_flow_ring_create_response()
10808 DHD_FLOWRING_LOCK(flow_ring_node->lock, flags); in dhd_bus_flow_ring_create_response()
10809 flow_ring_node->status = FLOW_RING_STATUS_OPEN; in dhd_bus_flow_ring_create_response()
10810 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_flow_ring_create_response()
10828 DHD_FLOWRING_LIST_LOCK(bus->dhd->flowring_list_lock, flags); in dhd_bus_flow_ring_create_response()
10829 dll_prepend(&bus->flowring_active_list, &flow_ring_node->list); in dhd_bus_flow_ring_create_response()
10830 DHD_FLOWRING_LIST_UNLOCK(bus->dhd->flowring_list_lock, flags); in dhd_bus_flow_ring_create_response()
10853 dhd_tcpack_info_tbl_clean(bus->dhd); in dhd_bus_flow_ring_delete_request()
10855 DHD_FLOWRING_LOCK(flow_ring_node->lock, flags); in dhd_bus_flow_ring_delete_request()
10856 if (flow_ring_node->status == FLOW_RING_STATUS_DELETE_PENDING) { in dhd_bus_flow_ring_delete_request()
10857 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_flow_ring_delete_request()
10858 DHD_ERROR(("%s :Delete Pending flowid %u\n", __FUNCTION__, flow_ring_node->flowid)); in dhd_bus_flow_ring_delete_request()
10861 flow_ring_node->status = FLOW_RING_STATUS_DELETE_PENDING; in dhd_bus_flow_ring_delete_request()
10863 queue = &flow_ring_node->queue; /* queue associated with flow ring */ in dhd_bus_flow_ring_delete_request()
10866 while ((pkt = dhd_flow_queue_dequeue(bus->dhd, queue)) != NULL) { in dhd_bus_flow_ring_delete_request()
10867 PKTFREE(bus->dhd->osh, pkt, TRUE); in dhd_bus_flow_ring_delete_request()
10871 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_flow_ring_delete_request()
10874 dhd_prot_flow_ring_delete(bus->dhd, flow_ring_node); in dhd_bus_flow_ring_delete_request()
10887 if (flowid >= bus->dhd->num_flow_rings) { in dhd_bus_flow_ring_delete_response()
10889 flowid, bus->dhd->num_flow_rings)); in dhd_bus_flow_ring_delete_response()
10893 flow_ring_node = DHD_FLOW_RING(bus->dhd, flowid); in dhd_bus_flow_ring_delete_response()
10899 ASSERT(flow_ring_node->flowid == flowid); in dhd_bus_flow_ring_delete_response()
10900 if (flow_ring_node->flowid != flowid) { in dhd_bus_flow_ring_delete_response()
10903 flow_ring_node->flowid)); in dhd_bus_flow_ring_delete_response()
10930 DHD_FLOWRING_LOCK(flow_ring_node->lock, flags); in dhd_bus_flow_ring_flush_request()
10931 queue = &flow_ring_node->queue; /* queue associated with flow ring */ in dhd_bus_flow_ring_flush_request()
10935 flow_ring_node->status = FLOW_RING_STATUS_FLUSH_PENDING; in dhd_bus_flow_ring_flush_request()
10941 dhd_tcpack_info_tbl_clean(bus->dhd); in dhd_bus_flow_ring_flush_request()
10945 while ((pkt = dhd_flow_queue_dequeue(bus->dhd, queue)) != NULL) { in dhd_bus_flow_ring_flush_request()
10946 PKTFREE(bus->dhd->osh, pkt, TRUE); in dhd_bus_flow_ring_flush_request()
10950 DHD_FLOWRING_UNLOCK(flow_ring_node->lock, flags); in dhd_bus_flow_ring_flush_request()
10953 dhd_prot_flow_ring_flush(bus->dhd, flow_ring_node); in dhd_bus_flow_ring_flush_request()
10970 if (flowid >= bus->dhd->num_flow_rings) { in dhd_bus_flow_ring_flush_response()
10972 flowid, bus->dhd->num_flow_rings)); in dhd_bus_flow_ring_flush_response()
10976 flow_ring_node = DHD_FLOW_RING(bus->dhd, flowid); in dhd_bus_flow_ring_flush_response()
10982 ASSERT(flow_ring_node->flowid == flowid); in dhd_bus_flow_ring_flush_response()
10983 if (flow_ring_node->flowid != flowid) { in dhd_bus_flow_ring_flush_response()
10986 flow_ring_node->flowid)); in dhd_bus_flow_ring_flush_response()
10990 flow_ring_node->status = FLOW_RING_STATUS_OPEN; in dhd_bus_flow_ring_flush_response()
10997 return bus->max_submission_rings; in dhd_bus_max_h2d_queues()
11010 dhdp->bus->is_linkdown = val; in dhd_bus_set_linkdown()
11016 return dhdp->bus->is_linkdown; in dhd_bus_get_linkdown()
11022 return dhdp->bus->cto_triggered; in dhd_bus_get_cto()
11032 DHD_ERROR(("%s :Flow Resume Request flow id %u\n", __FUNCTION__, flow_ring_node->flowid)); in dhd_bus_flow_ring_resume_request()
11034 flow_ring_node->status = FLOW_RING_STATUS_RESUME_PENDING; in dhd_bus_flow_ring_resume_request()
11037 dhd_prot_flow_ring_resume(bus->dhd, flow_ring_node); in dhd_bus_flow_ring_resume_request()
11051 flow_ring_node = DHD_FLOW_RING(bus->dhd, flowid); in dhd_bus_flow_ring_resume_response()
11052 ASSERT(flow_ring_node->flowid == flowid); in dhd_bus_flow_ring_resume_response()
11060 DHD_TRACE(("%s :Number of pkts queued in FlowId:%d is -> %u!!\n", in dhd_bus_flow_ring_resume_response()
11061 __FUNCTION__, flow_ring_node->flowid, flow_ring_node->queue.len)); in dhd_bus_flow_ring_resume_response()
11063 flow_ring_node->status = FLOW_RING_STATUS_OPEN; in dhd_bus_flow_ring_resume_response()
11077 diff = time_stamp - bus->active_list_last_process_ts; in dhd_bus_check_idle_scan()
11081 bus->active_list_last_process_ts = OSL_SYSUPTIME(); in dhd_bus_check_idle_scan()
11099 DHD_FLOWRING_LIST_LOCK(bus->dhd->flowring_list_lock, flags); in dhd_bus_idle_scan()
11101 for (item = dll_tail_p(&bus->flowring_active_list); in dhd_bus_idle_scan()
11102 !dll_end(&bus->flowring_active_list, item); item = prev) { in dhd_bus_idle_scan()
11107 if (flow_ring_node->flowid == (bus->max_submission_rings - 1)) in dhd_bus_idle_scan()
11110 if (flow_ring_node->status != FLOW_RING_STATUS_OPEN) { in dhd_bus_idle_scan()
11114 flow_ring_node->flowid)); in dhd_bus_idle_scan()
11119 diff = time_stamp - flow_ring_node->last_active_ts; in dhd_bus_idle_scan()
11121 if ((diff > IDLE_FLOW_RING_TIMEOUT) && !(flow_ring_node->queue.len)) { in dhd_bus_idle_scan()
11122 DHD_ERROR(("\nSuspending flowid %d\n", flow_ring_node->flowid)); in dhd_bus_idle_scan()
11125 flow_ring_node->status = FLOW_RING_STATUS_SUSPENDED; in dhd_bus_idle_scan()
11126 ringid[count] = flow_ring_node->flowid; in dhd_bus_idle_scan()
11130 dhd_prot_flow_ring_batch_suspend_request(bus->dhd, ringid, count); in dhd_bus_idle_scan()
11142 dhd_prot_flow_ring_batch_suspend_request(bus->dhd, ringid, count); in dhd_bus_idle_scan()
11145 DHD_FLOWRING_LIST_UNLOCK(bus->dhd->flowring_list_lock, flags); in dhd_bus_idle_scan()
11155 DHD_FLOWRING_LIST_LOCK(bus->dhd->flowring_list_lock, flags); in dhd_flow_ring_move_to_active_list_head()
11157 list = dll_head_p(&bus->flowring_active_list); in dhd_flow_ring_move_to_active_list_head()
11158 if (&flow_ring_node->list != list) { in dhd_flow_ring_move_to_active_list_head()
11159 dll_delete(&flow_ring_node->list); in dhd_flow_ring_move_to_active_list_head()
11160 dll_prepend(&bus->flowring_active_list, &flow_ring_node->list); in dhd_flow_ring_move_to_active_list_head()
11164 flow_ring_node->last_active_ts = OSL_SYSUPTIME(); in dhd_flow_ring_move_to_active_list_head()
11166 DHD_FLOWRING_LIST_UNLOCK(bus->dhd->flowring_list_lock, flags); in dhd_flow_ring_move_to_active_list_head()
11175 DHD_FLOWRING_LIST_LOCK(bus->dhd->flowring_list_lock, flags); in dhd_flow_ring_add_to_active_list()
11177 dll_prepend(&bus->flowring_active_list, &flow_ring_node->list); in dhd_flow_ring_add_to_active_list()
11179 flow_ring_node->last_active_ts = OSL_SYSUPTIME(); in dhd_flow_ring_add_to_active_list()
11181 DHD_FLOWRING_LIST_UNLOCK(bus->dhd->flowring_list_lock, flags); in dhd_flow_ring_add_to_active_list()
11187 dll_delete(&flow_ring_node->list); in __dhd_flow_ring_delete_from_active_list()
11194 DHD_FLOWRING_LIST_LOCK(bus->dhd->flowring_list_lock, flags); in dhd_flow_ring_delete_from_active_list()
11198 DHD_FLOWRING_LIST_UNLOCK(bus->dhd->flowring_list_lock, flags); in dhd_flow_ring_delete_from_active_list()
11261 osh = bus->osh; in dhd_bus_release_dongle()
11264 if (bus->dhd) { in dhd_bus_release_dongle()
11269 dongle_isolation = bus->dhd->dongle_isolation; in dhd_bus_release_dongle()
11297 if (bus->sih->buscorerev < 19) { in dhdpcie_cto_init()
11299 __FUNCTION__, bus->sih->buscorerev)); in dhdpcie_cto_init()
11303 if (bus->sih->buscorerev == 19) { in dhdpcie_cto_init()
11305 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_cto_init()
11308 pcie_lnkst = si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_cto_init()
11317 bus->cto_enable = enable; in dhdpcie_cto_init()
11322 if (bus->cto_threshold == 0) { in dhdpcie_cto_init()
11323 bus->cto_threshold = PCIE_CTO_TO_THRESH_DEFAULT; in dhdpcie_cto_init()
11325 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_cto_init()
11327 ((bus->cto_threshold << PCIE_CTO_TO_THRESHOLD_SHIFT) & in dhdpcie_cto_init()
11333 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_cto_init()
11338 __FUNCTION__, bus->cto_enable)); in dhdpcie_cto_init()
11353 DHD_OS_WAKE_LOCK(bus->dhd); in dhdpcie_cto_error_recovery()
11355 DHD_ERROR(("--- CTO Triggered --- %d\n", bus->pwr_req_ref)); in dhdpcie_cto_error_recovery()
11368 err_status = si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_cto_error_recovery()
11369 DAR_ERRLOG(bus->sih->buscorerev), in dhdpcie_cto_error_recovery()
11372 si_corereg(bus->sih, bus->sih->buscoreidx, in dhdpcie_cto_error_recovery()
11373 DAR_ERRLOG(bus->sih->buscorerev), in dhdpcie_cto_error_recovery()
11383 DHD_OS_WAKE_UNLOCK(bus->dhd); in dhdpcie_cto_error_recovery()
11405 DHD_OS_WAKE_UNLOCK(bus->dhd); in dhdpcie_cto_error_recovery()
11428 if (bus->pcie_sh->flags2 & PCIE_SHARED2_D2H_D11_TX_STATUS) { in dhdpcie_init_d11status()
11429 flags2 = bus->pcie_sh->flags2; in dhdpcie_init_d11status()
11430 addr = bus->shared_addr + OFFSETOF(pciedev_shared_t, flags2); in dhdpcie_init_d11status()
11439 bus->pcie_sh->flags2 = flags2; in dhdpcie_init_d11status()
11440 bus->dhd->d11_tx_status = TRUE; in dhdpcie_init_d11status()
11457 return dhdpcie_oob_intr_register(dhdp->bus); in dhd_bus_oob_intr_register()
11463 dhdpcie_oob_intr_unregister(dhdp->bus); in dhd_bus_oob_intr_unregister()
11469 dhdpcie_oob_intr_set(dhdp->bus, enable); in dhd_bus_oob_intr_set()
11476 return bus->dhd->d2h_hostrdy_supported; in dhdpcie_bus_get_pcie_hostready_supported()
11482 dhd_bus_t *bus = pub->bus; in dhd_pcie_dump_core_regs()
11484 uint32 core_addr = SI_ENUM_BASE(bus->sih) + coreoffset; in dhd_pcie_dump_core_regs()
11488 core_addr = SI_ENUM_BASE(bus->sih) + coreoffset + first_addr; in dhd_pcie_dump_core_regs()
11500 if (!bus->dhd) in dhdpcie_bus_get_pcie_hwa_supported()
11502 else if (bus->hwa_enab_bmap) { in dhdpcie_bus_get_pcie_hwa_supported()
11503 return bus->dhd->hwa_enable; in dhdpcie_bus_get_pcie_hwa_supported()
11512 if (!bus->dhd) in dhdpcie_bus_get_pcie_idma_supported()
11514 else if (bus->idma_enabled) { in dhdpcie_bus_get_pcie_idma_supported()
11515 return bus->dhd->idma_enable; in dhdpcie_bus_get_pcie_idma_supported()
11524 if (!bus->dhd) in dhdpcie_bus_get_pcie_ifrm_supported()
11526 else if (bus->ifrm_enabled) { in dhdpcie_bus_get_pcie_ifrm_supported()
11527 return bus->dhd->ifrm_enable; in dhdpcie_bus_get_pcie_ifrm_supported()
11536 if (!bus->dhd) { in dhdpcie_bus_get_pcie_dar_supported()
11538 } else if (bus->dar_enabled) { in dhdpcie_bus_get_pcie_dar_supported()
11539 return bus->dhd->dar_enable; in dhdpcie_bus_get_pcie_dar_supported()
11549 bus->dw_option = dw_option; in dhdpcie_bus_enab_pcie_dw()
11555 trap_t *tr = &bus->dhd->last_trap_info; in dhd_bus_dump_trap_info()
11562 ltoh32(tr->type), ltoh32(tr->epc), ltoh32(tr->cpsr), ltoh32(tr->spsr), in dhd_bus_dump_trap_info()
11563 ltoh32(tr->r13), ltoh32(tr->r14), ltoh32(tr->pc), in dhd_bus_dump_trap_info()
11564 ltoh32(bus->pcie_sh->trap_addr), in dhd_bus_dump_trap_info()
11565 ltoh32(tr->r0), ltoh32(tr->r1), ltoh32(tr->r2), ltoh32(tr->r3), in dhd_bus_dump_trap_info()
11566 ltoh32(tr->r4), ltoh32(tr->r5), ltoh32(tr->r6), ltoh32(tr->r7), in dhd_bus_dump_trap_info()
11567 ltoh32(tr->r8), ltoh32(tr->r9), ltoh32(tr->r10), in dhd_bus_dump_trap_info()
11568 ltoh32(tr->r11), ltoh32(tr->r12)); in dhd_bus_dump_trap_info()
11575 struct dhd_bus *bus = dhdp->bus; in dhd_bus_readwrite_bp_addr()
11588 return dhd->bus->idletime; in dhd_get_idletime()
11595 if (serialized_backplane_access(dhd->bus, addr, sizeof(uint), val, read) != BCME_OK) { in dhd_sbreg_op()
11632 if (serialized_backplane_access(dhd->bus, addr, in dhdpcie_get_sssr_fifo_dump()
11650 si_t *sih = dhd->bus->sih; in dhdpcie_get_sssr_dig_dump()
11666 if ((!dhd->sssr_reg_info.vasip_regs.vasip_sr_size) && in dhdpcie_get_sssr_dig_dump()
11667 dhd->sssr_reg_info.length > OFFSETOF(sssr_reg_info_v1_t, dig_mem_info)) { in dhdpcie_get_sssr_dig_dump()
11668 int err = dhdpcie_bus_membytes(dhd->bus, FALSE, addr_reg, (uint8 *)buf, in dhdpcie_get_sssr_dig_dump()
11676 addr = dhd->sssr_reg_info.vasip_regs.wrapper_regs.ioctrl; in dhdpcie_get_sssr_dig_dump()
11686 if (serialized_backplane_access(dhd->bus, addr, sizeof(uint), in dhdpcie_get_sssr_dig_dump()
11710 W_REG(si_osh(sih), &chipcregs->sr_memrw_addr, 0); in dhdpcie_get_sssr_dig_dump()
11714 buf[i] = R_REG(si_osh(sih), &chipcregs->sr_memrw_data); in dhdpcie_get_sssr_dig_dump()
11743 if (!dhd->concise_dbg_buf) in dhdpcie_get_etd_preserve_logs()
11750 tlv = bcm_parse_tlvs(hdr->data, hdr->len, TAG_TRAP_LOG_DATA); in dhdpcie_get_etd_preserve_logs()
11753 uint32 endaddr = dhd->bus->dongle_ram_base + dhd->bus->ramsize - 4; in dhdpcie_get_etd_preserve_logs()
11755 etd_evtlog = (eventlog_trapdata_info_t *)tlv->data; in dhdpcie_get_etd_preserve_logs()
11758 (etd_evtlog->num_elements), in dhdpcie_get_etd_preserve_logs()
11759 ntoh32(etd_evtlog->seq_num), (etd_evtlog->log_arr_addr))); in dhdpcie_get_etd_preserve_logs()
11760 arr_size = (uint32)sizeof(*evtlog_buf_arr) * (etd_evtlog->num_elements); in dhdpcie_get_etd_preserve_logs()
11765 evtlog_buf_arr = MALLOCZ(dhd->osh, arr_size); in dhdpcie_get_etd_preserve_logs()
11772 baseaddr = etd_evtlog->log_arr_addr; in dhdpcie_get_etd_preserve_logs()
11773 if ((baseaddr < dhd->bus->dongle_ram_base) || in dhdpcie_get_etd_preserve_logs()
11781 err = dhdpcie_bus_membytes(dhd->bus, FALSE, in dhdpcie_get_etd_preserve_logs()
11782 (ulong)(etd_evtlog->log_arr_addr), in dhdpcie_get_etd_preserve_logs()
11793 seqnum = ntoh32(etd_evtlog->seq_num); in dhdpcie_get_etd_preserve_logs()
11794 memset(dhd->concise_dbg_buf, 0, CONCISE_DUMP_BUFLEN); in dhdpcie_get_etd_preserve_logs()
11795 for (i = 0; i < (etd_evtlog->num_elements); ++i) { in dhdpcie_get_etd_preserve_logs()
11798 if ((baseaddr < dhd->bus->dongle_ram_base) || in dhdpcie_get_etd_preserve_logs()
11805 err = dhdpcie_bus_membytes(dhd->bus, FALSE, in dhdpcie_get_etd_preserve_logs()
11807 dhd->concise_dbg_buf, (evtlog_buf_arr[i].len)); in dhdpcie_get_etd_preserve_logs()
11813 dhd_dbg_msgtrace_log_parser(dhd, dhd->concise_dbg_buf, in dhdpcie_get_etd_preserve_logs()
11819 MFREE(dhd->osh, evtlog_buf_arr, arr_size); in dhdpcie_get_etd_preserve_logs()
11835 addr = dhd->sssr_reg_info.chipcommon_regs.base_regs.powerctrl; in dhdpcie_resume_chipcommon_powerctrl()
11837 if (!(val & dhd->sssr_reg_info.chipcommon_regs.base_regs.powerctrl_mask)) { in dhdpcie_resume_chipcommon_powerctrl()
11838 addr = dhd->sssr_reg_info.chipcommon_regs.base_regs.powerctrl; in dhdpcie_resume_chipcommon_powerctrl()
11853 addr = dhd->sssr_reg_info.chipcommon_regs.base_regs.powerctrl; in dhdpcie_suspend_chipcommon_powerctrl()
11855 if (reg_val & dhd->sssr_reg_info.chipcommon_regs.base_regs.powerctrl_mask) { in dhdpcie_suspend_chipcommon_powerctrl()
11856 addr = dhd->sssr_reg_info.chipcommon_regs.base_regs.powerctrl; in dhdpcie_suspend_chipcommon_powerctrl()
11872 addr = dhd->sssr_reg_info.chipcommon_regs.base_regs.intmask; in dhdpcie_clear_intmask_and_timer()
11877 addr = dhd->sssr_reg_info.pmu_regs.base_regs.pmuintmask0; in dhdpcie_clear_intmask_and_timer()
11882 addr = dhd->sssr_reg_info.pmu_regs.base_regs.pmuintmask1; in dhdpcie_clear_intmask_and_timer()
11887 addr = dhd->sssr_reg_info.pmu_regs.base_regs.resreqtimer; in dhdpcie_clear_intmask_and_timer()
11892 addr = dhd->sssr_reg_info.pmu_regs.base_regs.macresreqtimer; in dhdpcie_clear_intmask_and_timer()
11897 addr = dhd->sssr_reg_info.pmu_regs.base_regs.macresreqtimer1; in dhdpcie_clear_intmask_and_timer()
11902 if (dhd->sssr_reg_info.vasip_regs.vasip_sr_size) { in dhdpcie_clear_intmask_and_timer()
11903 addr = dhd->sssr_reg_info.vasip_regs.wrapper_regs.ioctrl; in dhdpcie_clear_intmask_and_timer()
11922 if (dhd->sssr_d11_outofreset[i] && in dhdpcie_update_d11_status_from_trapdata()
11923 (dhd->dongle_trap_data & trap_data_mask[i])) { in dhdpcie_update_d11_status_from_trapdata()
11924 dhd->sssr_d11_outofreset[i] = TRUE; in dhdpcie_update_d11_status_from_trapdata()
11926 dhd->sssr_d11_outofreset[i] = FALSE; in dhdpcie_update_d11_status_from_trapdata()
11929 "trap_data:0x%x-0x%x\n", in dhdpcie_update_d11_status_from_trapdata()
11930 __FUNCTION__, i, dhd->sssr_d11_outofreset[i], in dhdpcie_update_d11_status_from_trapdata()
11931 dhd->dongle_trap_data, trap_data_mask[i])); in dhdpcie_update_d11_status_from_trapdata()
11947 addr = dhd->sssr_reg_info.mac_regs[i].wrapper_regs.resetctrl; in dhdpcie_d11_check_outofreset()
11955 dhd->sssr_d11_outofreset[i] = TRUE; in dhdpcie_d11_check_outofreset()
11957 dhd->sssr_d11_outofreset[i] = FALSE; in dhdpcie_d11_check_outofreset()
11960 __FUNCTION__, i, dhd->sssr_d11_outofreset[i])); in dhdpcie_d11_check_outofreset()
11977 if (dhd->sssr_d11_outofreset[i]) { in dhdpcie_d11_clear_clk_req()
11979 addr = dhd->sssr_reg_info.mac_regs[i].wrapper_regs.itopoobb; in dhdpcie_d11_clear_clk_req()
11983 addr = dhd->sssr_reg_info.mac_regs[i].base_regs.clockcontrolstatus; in dhdpcie_d11_clear_clk_req()
11985 dhd->sssr_reg_info.mac_regs[i].base_regs.clockcontrolstatus_val; in dhdpcie_d11_clear_clk_req()
12002 addr = dhd->sssr_reg_info.arm_regs.wrapper_regs.resetctrl; in dhdpcie_arm_clear_clk_req()
12006 addr = dhd->sssr_reg_info.arm_regs.wrapper_regs.itopoobb; in dhdpcie_arm_clear_clk_req()
12010 addr = dhd->sssr_reg_info.arm_regs.base_regs.clockcontrolstatus; in dhdpcie_arm_clear_clk_req()
12011 val = dhd->sssr_reg_info.arm_regs.base_regs.clockcontrolstatus_val; in dhdpcie_arm_clear_clk_req()
12027 addr = dhd->sssr_reg_info.pcie_regs.wrapper_regs.itopoobb; in dhdpcie_pcie_clear_clk_req()
12031 addr = dhd->sssr_reg_info.pcie_regs.base_regs.clockcontrolstatus; in dhdpcie_pcie_clear_clk_req()
12032 val = dhd->sssr_reg_info.pcie_regs.base_regs.clockcontrolstatus_val; in dhdpcie_pcie_clear_clk_req()
12046 addr = dhd->sssr_reg_info.pcie_regs.base_regs.ltrstate; in dhdpcie_pcie_send_ltrsleep()
12080 if (dhd->sssr_d11_outofreset[i]) { in dhdpcie_bring_d11_outofreset()
12082 addr = dhd->sssr_reg_info.mac_regs[i].wrapper_regs.resetctrl; in dhdpcie_bring_d11_outofreset()
12087 addr = dhd->sssr_reg_info.mac_regs[i].wrapper_regs.ioctrl; in dhdpcie_bring_d11_outofreset()
12088 val = dhd->sssr_reg_info.mac_regs[0].wrapper_regs.ioctrl_resetseq_val[0]; in dhdpcie_bring_d11_outofreset()
12091 val = dhd->sssr_reg_info.mac_regs[0].wrapper_regs.ioctrl_resetseq_val[1]; in dhdpcie_bring_d11_outofreset()
12095 addr = dhd->sssr_reg_info.mac_regs[i].wrapper_regs.resetctrl; in dhdpcie_bring_d11_outofreset()
12099 addr = dhd->sssr_reg_info.mac_regs[i].wrapper_regs.ioctrl; in dhdpcie_bring_d11_outofreset()
12100 val = dhd->sssr_reg_info.mac_regs[0].wrapper_regs.ioctrl_resetseq_val[2]; in dhdpcie_bring_d11_outofreset()
12103 val = dhd->sssr_reg_info.mac_regs[0].wrapper_regs.ioctrl_resetseq_val[3]; in dhdpcie_bring_d11_outofreset()
12106 val = dhd->sssr_reg_info.mac_regs[0].wrapper_regs.ioctrl_resetseq_val[4]; in dhdpcie_bring_d11_outofreset()
12121 if (dhd->sssr_d11_outofreset[i]) { in dhdpcie_sssr_dump_get_before_sr()
12122 dhdpcie_get_sssr_fifo_dump(dhd, dhd->sssr_d11_before[i], in dhdpcie_sssr_dump_get_before_sr()
12123 dhd->sssr_reg_info.mac_regs[i].sr_size, in dhdpcie_sssr_dump_get_before_sr()
12124 dhd->sssr_reg_info.mac_regs[i].base_regs.xmtaddress, in dhdpcie_sssr_dump_get_before_sr()
12125 dhd->sssr_reg_info.mac_regs[i].base_regs.xmtdata); in dhdpcie_sssr_dump_get_before_sr()
12129 if (dhd->sssr_reg_info.vasip_regs.vasip_sr_size) { in dhdpcie_sssr_dump_get_before_sr()
12130 dhdpcie_get_sssr_dig_dump(dhd, dhd->sssr_dig_buf_before, in dhdpcie_sssr_dump_get_before_sr()
12131 dhd->sssr_reg_info.vasip_regs.vasip_sr_size, in dhdpcie_sssr_dump_get_before_sr()
12132 dhd->sssr_reg_info.vasip_regs.vasip_sr_addr); in dhdpcie_sssr_dump_get_before_sr()
12133 } else if ((dhd->sssr_reg_info.length > OFFSETOF(sssr_reg_info_v1_t, dig_mem_info)) && in dhdpcie_sssr_dump_get_before_sr()
12134 dhd->sssr_reg_info.dig_mem_info.dig_sr_addr) { in dhdpcie_sssr_dump_get_before_sr()
12135 dhdpcie_get_sssr_dig_dump(dhd, dhd->sssr_dig_buf_before, in dhdpcie_sssr_dump_get_before_sr()
12136 dhd->sssr_reg_info.dig_mem_info.dig_sr_size, in dhdpcie_sssr_dump_get_before_sr()
12137 dhd->sssr_reg_info.dig_mem_info.dig_sr_addr); in dhdpcie_sssr_dump_get_before_sr()
12151 if (dhd->sssr_d11_outofreset[i]) { in dhdpcie_sssr_dump_get_after_sr()
12152 dhdpcie_get_sssr_fifo_dump(dhd, dhd->sssr_d11_after[i], in dhdpcie_sssr_dump_get_after_sr()
12153 dhd->sssr_reg_info.mac_regs[i].sr_size, in dhdpcie_sssr_dump_get_after_sr()
12154 dhd->sssr_reg_info.mac_regs[i].base_regs.xmtaddress, in dhdpcie_sssr_dump_get_after_sr()
12155 dhd->sssr_reg_info.mac_regs[i].base_regs.xmtdata); in dhdpcie_sssr_dump_get_after_sr()
12159 if (dhd->sssr_reg_info.vasip_regs.vasip_sr_size) { in dhdpcie_sssr_dump_get_after_sr()
12160 dhdpcie_get_sssr_dig_dump(dhd, dhd->sssr_dig_buf_after, in dhdpcie_sssr_dump_get_after_sr()
12161 dhd->sssr_reg_info.vasip_regs.vasip_sr_size, in dhdpcie_sssr_dump_get_after_sr()
12162 dhd->sssr_reg_info.vasip_regs.vasip_sr_addr); in dhdpcie_sssr_dump_get_after_sr()
12163 } else if ((dhd->sssr_reg_info.length > OFFSETOF(sssr_reg_info_v1_t, dig_mem_info)) && in dhdpcie_sssr_dump_get_after_sr()
12164 dhd->sssr_reg_info.dig_mem_info.dig_sr_addr) { in dhdpcie_sssr_dump_get_after_sr()
12165 dhdpcie_get_sssr_dig_dump(dhd, dhd->sssr_dig_buf_after, in dhdpcie_sssr_dump_get_after_sr()
12166 dhd->sssr_reg_info.dig_mem_info.dig_sr_size, in dhdpcie_sssr_dump_get_after_sr()
12167 dhd->sssr_reg_info.dig_mem_info.dig_sr_addr); in dhdpcie_sssr_dump_get_after_sr()
12178 if (!dhd->sssr_inited) { in dhdpcie_sssr_dump()
12183 if (dhd->bus->is_linkdown) { in dhdpcie_sssr_dump()
12212 dhd->sssr_dump_collected = TRUE; in dhdpcie_sssr_dump()
12221 if (!dhd->sssr_inited) { in dhdpcie_fis_trigger()
12226 if (dhd->bus->is_linkdown) { in dhdpcie_fis_trigger()
12232 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhdpcie_fis_trigger()
12233 DAR_FIS_CTRL(dhd->bus->sih->buscorerev), ~0, DAR_FIS_START_MASK); in dhdpcie_fis_trigger()
12250 if (!dhd->sssr_inited) { in dhdpcie_fis_dump()
12255 if (dhd->bus->is_linkdown) { in dhdpcie_fis_dump()
12261 PMU_REG(dhd->bus->sih, min_res_mask, ~0, in dhdpcie_fis_dump()
12262 PMU_REG(dhd->bus->sih, max_res_mask, 0, 0)); in dhdpcie_fis_dump()
12266 dhd->sssr_d11_outofreset[i] = TRUE; in dhdpcie_fis_dump()
12273 PMU_REG(dhd->bus->sih, fis_ctrl_status, PMU_CLEAR_FIS_DONE_MASK, PMU_CLEAR_FIS_DONE_MASK); in dhdpcie_fis_dump()
12299 return &dhd->bus->wake_counts; in dhd_bus_get_wakecount()
12304 return bcmpcie_set_get_wake(dhd->bus, 0); in dhd_bus_get_bus_wake()
12318 uint32 addr = bus->dongle_ram_base + (bus->ramsize - BCM_NVRAM_OFFSET_TCM) - in dhdpcie_wrt_rnd()
12319 ((bus->nvram_csm & 0xffff)* BCM_NVRAM_IMG_COMPRS_FACTOR + sizeof(rnd_data)); in dhdpcie_wrt_rnd()
12327 addr -= count; in dhdpcie_wrt_rnd()
12330 bus->dhd->rnd_buf = NULL; in dhdpcie_wrt_rnd()
12332 ret = dhd_get_rnd_info(bus->dhd); in dhdpcie_wrt_rnd()
12333 if (bus->dhd->rnd_buf) { in dhdpcie_wrt_rnd()
12336 dhdpcie_bus_membytes(bus, TRUE, addr, bus->dhd->rnd_buf, bus->dhd->rnd_len); in dhdpcie_wrt_rnd()
12339 dhd_dump_rnd_info(bus->dhd, bus->dhd->rnd_buf, bus->dhd->rnd_len); in dhdpcie_wrt_rnd()
12341 /* bus->dhd->rnd_buf is allocated in dhd_get_rnd_info, free here */ in dhdpcie_wrt_rnd()
12342 MFREE(bus->dhd->osh, bus->dhd->rnd_buf, bus->dhd->rnd_len); in dhdpcie_wrt_rnd()
12343 bus->dhd->rnd_buf = NULL; in dhdpcie_wrt_rnd()
12357 dhd_dump_rnd_info(bus->dhd, rand_buf, count); in dhdpcie_wrt_rnd()
12366 struct dhd_bus *bus = dhd->bus; in dhd_pcie_intr_count_dump()
12369 DHD_ERROR(("\n ------- DUMPING INTR enable/disable counters ------- \r\n")); in dhd_pcie_intr_count_dump()
12371 bus->resume_intr_enable_count, bus->dpc_intr_enable_count)); in dhd_pcie_intr_count_dump()
12373 bus->isr_intr_disable_count, bus->suspend_intr_disable_count)); in dhd_pcie_intr_count_dump()
12376 bus->oob_intr_count, bus->oob_intr_enable_count, in dhd_pcie_intr_count_dump()
12377 bus->oob_intr_disable_count)); in dhd_pcie_intr_count_dump()
12380 GET_SEC_USEC(bus->last_oob_irq_time))); in dhd_pcie_intr_count_dump()
12383 GET_SEC_USEC(bus->last_oob_irq_enable_time), in dhd_pcie_intr_count_dump()
12384 GET_SEC_USEC(bus->last_oob_irq_disable_time))); in dhd_pcie_intr_count_dump()
12390 bus->dpc_return_busdown_count, bus->non_ours_irq_count)); in dhd_pcie_intr_count_dump()
12397 GET_SEC_USEC(bus->isr_entry_time), in dhd_pcie_intr_count_dump()
12398 GET_SEC_USEC(bus->isr_exit_time))); in dhd_pcie_intr_count_dump()
12401 GET_SEC_USEC(bus->dpc_sched_time), in dhd_pcie_intr_count_dump()
12402 GET_SEC_USEC(bus->last_non_ours_irq_time))); in dhd_pcie_intr_count_dump()
12405 GET_SEC_USEC(bus->dpc_entry_time), in dhd_pcie_intr_count_dump()
12406 GET_SEC_USEC(bus->last_process_ctrlbuf_time))); in dhd_pcie_intr_count_dump()
12409 GET_SEC_USEC(bus->last_process_flowring_time), in dhd_pcie_intr_count_dump()
12410 GET_SEC_USEC(bus->last_process_txcpl_time))); in dhd_pcie_intr_count_dump()
12414 GET_SEC_USEC(bus->last_process_rxcpl_time), in dhd_pcie_intr_count_dump()
12415 GET_SEC_USEC(bus->last_process_infocpl_time), in dhd_pcie_intr_count_dump()
12416 GET_SEC_USEC(bus->last_process_edl_time))); in dhd_pcie_intr_count_dump()
12419 GET_SEC_USEC(bus->dpc_exit_time), in dhd_pcie_intr_count_dump()
12420 GET_SEC_USEC(bus->resched_dpc_time))); in dhd_pcie_intr_count_dump()
12422 GET_SEC_USEC(bus->last_d3_inform_time))); in dhd_pcie_intr_count_dump()
12426 GET_SEC_USEC(bus->last_suspend_start_time), in dhd_pcie_intr_count_dump()
12427 GET_SEC_USEC(bus->last_suspend_end_time))); in dhd_pcie_intr_count_dump()
12430 GET_SEC_USEC(bus->last_resume_start_time), in dhd_pcie_intr_count_dump()
12431 GET_SEC_USEC(bus->last_resume_end_time))); in dhd_pcie_intr_count_dump()
12439 GET_SEC_USEC(dhd->logtrace_thr_ts.entry_time), in dhd_pcie_intr_count_dump()
12440 GET_SEC_USEC(dhd->logtrace_thr_ts.sem_down_time), in dhd_pcie_intr_count_dump()
12441 GET_SEC_USEC(dhd->logtrace_thr_ts.flush_time), in dhd_pcie_intr_count_dump()
12442 GET_SEC_USEC(dhd->logtrace_thr_ts.unexpected_break_time), in dhd_pcie_intr_count_dump()
12443 GET_SEC_USEC(dhd->logtrace_thr_ts.complete_time))); in dhd_pcie_intr_count_dump()
12457 si_t *sih = dhd->bus->sih; in dhd_pcie_dump_wrapper_regs()
12489 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, corecontrol)); in dhd_pcie_dump_wrapper_regs()
12491 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, corecapabilities)); in dhd_pcie_dump_wrapper_regs()
12494 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, corestatus)); in dhd_pcie_dump_wrapper_regs()
12496 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, nmiisrst)); in dhd_pcie_dump_wrapper_regs()
12498 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, nmimask)); in dhd_pcie_dump_wrapper_regs()
12500 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, isrmask)); in dhd_pcie_dump_wrapper_regs()
12502 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, swintreg)); in dhd_pcie_dump_wrapper_regs()
12504 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, intstatus)); in dhd_pcie_dump_wrapper_regs()
12506 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, cyclecnt)); in dhd_pcie_dump_wrapper_regs()
12508 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, inttimer)); in dhd_pcie_dump_wrapper_regs()
12510 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, clk_ctl_st)); in dhd_pcie_dump_wrapper_regs()
12512 val = R_REG(dhd->osh, ARM_CR4_REG(cr4regs, powerctl)); in dhd_pcie_dump_wrapper_regs()
12518 val = R_REG(dhd->osh, ARM_CA7_REG(ca7regs, corecontrol)); in dhd_pcie_dump_wrapper_regs()
12520 val = R_REG(dhd->osh, ARM_CA7_REG(ca7regs, corecapabilities)); in dhd_pcie_dump_wrapper_regs()
12523 val = R_REG(dhd->osh, ARM_CA7_REG(ca7regs, corestatus)); in dhd_pcie_dump_wrapper_regs()
12525 val = R_REG(dhd->osh, ARM_CA7_REG(ca7regs, tracecontrol)); in dhd_pcie_dump_wrapper_regs()
12527 val = R_REG(dhd->osh, ARM_CA7_REG(ca7regs, clk_ctl_st)); in dhd_pcie_dump_wrapper_regs()
12529 val = R_REG(dhd->osh, ARM_CA7_REG(ca7regs, powerctl)); in dhd_pcie_dump_wrapper_regs()
12543 val = R_REG(dhd->osh, &reg->intstatus[0]); in dhd_pcie_dump_wrapper_regs()
12545 val = R_REG(dhd->osh, &reg->intstatus[1]); in dhd_pcie_dump_wrapper_regs()
12547 val = R_REG(dhd->osh, &reg->intstatus[2]); in dhd_pcie_dump_wrapper_regs()
12549 val = R_REG(dhd->osh, &reg->intstatus[3]); in dhd_pcie_dump_wrapper_regs()
12562 si_setcoreidx(dhd->bus->sih, save_idx); in dhd_pcie_dump_wrapper_regs()
12570 if (dhd->bus->is_linkdown) { in dhd_pcie_dma_info_dump()
12571 DHD_ERROR(("\n ------- SKIP DUMPING DMA Registers " in dhd_pcie_dma_info_dump()
12572 "due to PCIe link down ------- \r\n")); in dhd_pcie_dma_info_dump()
12576 DHD_ERROR(("\n ------- DUMPING DMA Registers ------- \r\n")); in dhd_pcie_dma_info_dump()
12580 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x200, 0, 0), in dhd_pcie_dma_info_dump()
12581 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x204, 0, 0))); in dhd_pcie_dma_info_dump()
12583 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x208, 0, 0), in dhd_pcie_dma_info_dump()
12584 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x20C, 0, 0))); in dhd_pcie_dma_info_dump()
12586 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x210, 0, 0), in dhd_pcie_dma_info_dump()
12587 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x214, 0, 0))); in dhd_pcie_dma_info_dump()
12590 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x220, 0, 0), in dhd_pcie_dma_info_dump()
12591 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x224, 0, 0))); in dhd_pcie_dma_info_dump()
12593 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x228, 0, 0), in dhd_pcie_dma_info_dump()
12594 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x22C, 0, 0))); in dhd_pcie_dma_info_dump()
12596 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x230, 0, 0), in dhd_pcie_dma_info_dump()
12597 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x234, 0, 0))); in dhd_pcie_dma_info_dump()
12601 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x240, 0, 0), in dhd_pcie_dma_info_dump()
12602 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x244, 0, 0))); in dhd_pcie_dma_info_dump()
12604 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x248, 0, 0), in dhd_pcie_dma_info_dump()
12605 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x24C, 0, 0))); in dhd_pcie_dma_info_dump()
12607 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x250, 0, 0), in dhd_pcie_dma_info_dump()
12608 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x254, 0, 0))); in dhd_pcie_dma_info_dump()
12611 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x260, 0, 0), in dhd_pcie_dma_info_dump()
12612 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x264, 0, 0))); in dhd_pcie_dma_info_dump()
12614 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x268, 0, 0), in dhd_pcie_dma_info_dump()
12615 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x26C, 0, 0))); in dhd_pcie_dma_info_dump()
12617 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x270, 0, 0), in dhd_pcie_dma_info_dump()
12618 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, 0x274, 0, 0))); in dhd_pcie_dma_info_dump()
12631 DHD_ERROR(("\n ------- DUMPING INTR Status and Masks ------- \r\n")); in dhd_pcie_dump_int_regs()
12632 intstatus = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_dump_int_regs()
12633 dhd->bus->pcie_mailbox_int, 0, 0); in dhd_pcie_dump_int_regs()
12634 if (intstatus == (uint32)-1) { in dhd_pcie_dump_int_regs()
12639 intmask = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_dump_int_regs()
12640 dhd->bus->pcie_mailbox_mask, 0, 0); in dhd_pcie_dump_int_regs()
12641 if (intmask == (uint32) -1) { in dhd_pcie_dump_int_regs()
12646 d2h_db0 = si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_dump_int_regs()
12648 if (d2h_db0 == (uint32)-1) { in dhd_pcie_dump_int_regs()
12656 dhd_bus_cmn_readshared(dhd->bus, &d2h_mb_data, D2H_MB_DATA, 0); in dhd_pcie_dump_int_regs()
12658 dhd->bus->def_intmask)); in dhd_pcie_dump_int_regs()
12666 DHD_ERROR(("\n ------- DUMPING PCIE RC config space Registers ------- \r\n")); in dhd_pcie_dump_rc_conf_space_cap()
12668 dhdpcie_rc_access_cap(dhd->bus, PCIE_EXTCAP_ID_ERR, in dhd_pcie_dump_rc_conf_space_cap()
12672 dhdpcie_rc_access_cap(dhd->bus, PCIE_EXTCAP_ID_ERR, in dhd_pcie_dump_rc_conf_space_cap()
12674 dhdpcie_rc_access_cap(dhd->bus, PCIE_EXTCAP_ID_ERR, in dhd_pcie_dump_rc_conf_space_cap()
12676 dhdpcie_rc_access_cap(dhd->bus, PCIE_EXTCAP_ID_ERR, in dhd_pcie_dump_rc_conf_space_cap()
12678 dhdpcie_rc_access_cap(dhd->bus, PCIE_EXTCAP_ID_ERR, in dhd_pcie_dump_rc_conf_space_cap()
12688 DHD_ERROR(("bus->bus_low_power_state = %d\n", dhd->bus->bus_low_power_state)); in dhd_pcie_debug_info_dump()
12689 host_irq_disabled = dhdpcie_irq_disabled(dhd->bus); in dhd_pcie_debug_info_dump()
12694 DHD_ERROR(("\n ------- DUMPING PCIE EP Resouce Info ------- \r\n")); in dhd_pcie_debug_info_dump()
12695 dhdpcie_dump_resource(dhd->bus); in dhd_pcie_debug_info_dump()
12700 dhd_debug_get_rc_linkcap(dhd->bus))); in dhd_pcie_debug_info_dump()
12702 if (dhd->bus->is_linkdown && !dhd->bus->cto_triggered) { in dhd_pcie_debug_info_dump()
12708 DHD_ERROR(("\n ------- DUMPING PCIE EP config space Registers ------- \r\n")); in dhd_pcie_debug_info_dump()
12712 dhd_pcie_config_read(dhd->bus->osh, PCIECFGREG_STATUS_CMD, sizeof(uint32)), in dhd_pcie_debug_info_dump()
12714 dhd_pcie_config_read(dhd->bus->osh, PCIECFGREG_BASEADDR0, sizeof(uint32)), in dhd_pcie_debug_info_dump()
12716 dhd_pcie_config_read(dhd->bus->osh, PCIECFGREG_BASEADDR1, sizeof(uint32)), in dhd_pcie_debug_info_dump()
12718 dhd_pcie_config_read(dhd->bus->osh, PCIE_CFG_PMCSR, sizeof(uint32)))); in dhd_pcie_debug_info_dump()
12721 dhd_pcie_config_read(dhd->bus->osh, PCIECFGREG_LINK_STATUS_CTRL, in dhd_pcie_debug_info_dump()
12723 dhd_pcie_config_read(dhd->bus->osh, PCIECFGGEN_DEV_STATUS_CTRL2, in dhd_pcie_debug_info_dump()
12725 dhd_pcie_config_read(dhd->bus->osh, PCIECFGREG_PML1_SUB_CTRL1, in dhd_pcie_debug_info_dump()
12729 dhdpcie_ep_access_cap(dhd->bus, PCIE_EXTCAP_ID_ERR, in dhd_pcie_debug_info_dump()
12733 dhd_pcie_config_read(dhd->bus->osh, PCI_TLP_HDR_LOG1, sizeof(uint32)), in dhd_pcie_debug_info_dump()
12735 dhd_pcie_config_read(dhd->bus->osh, PCI_TLP_HDR_LOG2, sizeof(uint32)), in dhd_pcie_debug_info_dump()
12737 dhd_pcie_config_read(dhd->bus->osh, PCI_TLP_HDR_LOG3, sizeof(uint32)), in dhd_pcie_debug_info_dump()
12739 dhd_pcie_config_read(dhd->bus->osh, PCI_TLP_HDR_LOG4, sizeof(uint32)))); in dhd_pcie_debug_info_dump()
12740 if (dhd->bus->sih->buscorerev >= 24) { in dhd_pcie_debug_info_dump()
12743 dhd_pcie_config_read(dhd->bus->osh, PCIECFGREG_DEV_STATUS_CTRL, in dhd_pcie_debug_info_dump()
12745 dhd_pcie_config_read(dhd->bus->osh, PCIE_CFG_SUBSYSTEM_CONTROL, in dhd_pcie_debug_info_dump()
12747 dhd_pcie_config_read(dhd->bus->osh, PCIECFGREG_PML1_SUB_CTRL2, in dhd_pcie_debug_info_dump()
12749 dhd_bus_dump_dar_registers(dhd->bus); in dhd_pcie_debug_info_dump()
12753 if (dhd->bus->is_linkdown) { in dhd_pcie_debug_info_dump()
12758 DHD_ERROR(("\n ------- DUMPING PCIE core Registers ------- \r\n")); in dhd_pcie_debug_info_dump()
12762 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_PHY_DBG_CLKREQ0), in dhd_pcie_debug_info_dump()
12764 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_PHY_DBG_CLKREQ1), in dhd_pcie_debug_info_dump()
12766 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_PHY_DBG_CLKREQ2), in dhd_pcie_debug_info_dump()
12768 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_PHY_DBG_CLKREQ3))); in dhd_pcie_debug_info_dump()
12771 if (dhd->bus->sih->buscorerev >= 24) { in dhd_pcie_debug_info_dump()
12776 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_PHY_LTSSM_HIST_0), in dhd_pcie_debug_info_dump()
12778 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_PHY_LTSSM_HIST_1), in dhd_pcie_debug_info_dump()
12780 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_PHY_LTSSM_HIST_2), in dhd_pcie_debug_info_dump()
12782 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_PHY_LTSSM_HIST_3))); in dhd_pcie_debug_info_dump()
12786 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_TREFUP), in dhd_pcie_debug_info_dump()
12788 dhd_pcie_corereg_read(dhd->bus->sih, PCIECFGREG_TREFUP_EXT))); in dhd_pcie_debug_info_dump()
12794 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12797 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12799 PCIFunctionIntstatus(dhd->bus->sih->buscorerev), in dhd_pcie_debug_info_dump()
12800 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12801 PCIFunctionIntstatus(dhd->bus->sih->buscorerev), 0, 0), in dhd_pcie_debug_info_dump()
12802 PCIFunctionIntmask(dhd->bus->sih->buscorerev), in dhd_pcie_debug_info_dump()
12803 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12804 PCIFunctionIntmask(dhd->bus->sih->buscorerev), 0, 0), in dhd_pcie_debug_info_dump()
12805 PCIPowerIntstatus(dhd->bus->sih->buscorerev), in dhd_pcie_debug_info_dump()
12806 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12807 PCIPowerIntstatus(dhd->bus->sih->buscorerev), 0, 0), in dhd_pcie_debug_info_dump()
12808 PCIPowerIntmask(dhd->bus->sih->buscorerev), in dhd_pcie_debug_info_dump()
12809 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12810 PCIPowerIntmask(dhd->bus->sih->buscorerev), 0, 0))); in dhd_pcie_debug_info_dump()
12814 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12817 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12820 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12823 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12827 si_corereg(dhd->bus->sih, dhd->bus->sih->buscoreidx, in dhd_pcie_debug_info_dump()
12842 return bus->force_bt_quiesce; in dhd_bus_force_bt_quiesce_enabled()
12850 return bus->hp2p_txcpl_max_items; in dhd_bus_get_hp2p_ring_max_size()
12852 return bus->hp2p_rxcpl_max_items; in dhd_bus_get_hp2p_ring_max_size()
12859 bus->hp2p_txcpl_max_items = val; in dhd_bus_set_hp2p_ring_max_size()
12861 bus->hp2p_rxcpl_max_items = val; in dhd_bus_set_hp2p_ring_max_size()
12885 read_buf = MALLOCZ(bus->dhd->osh, MEMBLOCK); in dhd_bus_tcm_test()
12892 write_buf = MALLOCZ(bus->dhd->osh, MEMBLOCK); in dhd_bus_tcm_test()
12895 MFREE(bus->dhd->osh, read_buf, MEMBLOCK); in dhd_bus_tcm_test()
12900 DHD_ERROR(("%s: start %x, size: %x\n", __FUNCTION__, bus->dongle_ram_base, bus->ramsize)); in dhd_bus_tcm_test()
12904 start = bus->dongle_ram_base; in dhd_bus_tcm_test()
12906 size = bus->ramsize; in dhd_bus_tcm_test()
12916 MFREE(bus->dhd->osh, read_buf, MEMBLOCK); in dhd_bus_tcm_test()
12917 MFREE(bus->dhd->osh, write_buf, MEMBLOCK); in dhd_bus_tcm_test()
12924 MFREE(bus->dhd->osh, read_buf, MEMBLOCK); in dhd_bus_tcm_test()
12925 MFREE(bus->dhd->osh, write_buf, MEMBLOCK); in dhd_bus_tcm_test()
12935 MFREE(bus->dhd->osh, read_buf, MEMBLOCK); in dhd_bus_tcm_test()
12936 MFREE(bus->dhd->osh, write_buf, MEMBLOCK); in dhd_bus_tcm_test()
12941 size -= read_size; in dhd_bus_tcm_test()
12947 MFREE(bus->dhd->osh, read_buf, MEMBLOCK); in dhd_bus_tcm_test()
12948 MFREE(bus->dhd->osh, write_buf, MEMBLOCK); in dhd_bus_tcm_test()