Lines Matching refs:pkt

71 	struct cpucp_packet pkt = {};  in hl_fw_send_pci_access_msg()  local
73 pkt.ctl = cpu_to_le32(opcode << CPUCP_PKT_CTL_OPCODE_SHIFT); in hl_fw_send_pci_access_msg()
75 return hdev->asic_funcs->send_cpu_message(hdev, (u32 *) &pkt, in hl_fw_send_pci_access_msg()
76 sizeof(pkt), 0, NULL); in hl_fw_send_pci_access_msg()
82 struct cpucp_packet *pkt; in hl_fw_send_cpu_message() local
87 pkt = hdev->asic_funcs->cpu_accessible_dma_pool_alloc(hdev, len, in hl_fw_send_cpu_message()
89 if (!pkt) { in hl_fw_send_cpu_message()
95 memcpy(pkt, msg, len); in hl_fw_send_cpu_message()
113 rc = hl_poll_timeout_memory(hdev, &pkt->fence, tmp, in hl_fw_send_cpu_message()
125 tmp = le32_to_cpu(pkt->ctl); in hl_fw_send_cpu_message()
135 *result = (long) le64_to_cpu(pkt->result); in hl_fw_send_cpu_message()
141 hdev->asic_funcs->cpu_accessible_dma_pool_free(hdev, len, pkt); in hl_fw_send_cpu_message()
148 struct cpucp_packet pkt; in hl_fw_unmask_irq() local
152 memset(&pkt, 0, sizeof(pkt)); in hl_fw_unmask_irq()
154 pkt.ctl = cpu_to_le32(CPUCP_PACKET_UNMASK_RAZWI_IRQ << in hl_fw_unmask_irq()
156 pkt.value = cpu_to_le64(event_type); in hl_fw_unmask_irq()
158 rc = hdev->asic_funcs->send_cpu_message(hdev, (u32 *) &pkt, sizeof(pkt), in hl_fw_unmask_irq()
170 struct cpucp_unmask_irq_arr_packet *pkt; in hl_fw_unmask_irq_arr() local
187 pkt = kzalloc(total_pkt_size, GFP_KERNEL); in hl_fw_unmask_irq_arr()
188 if (!pkt) in hl_fw_unmask_irq_arr()
191 pkt->length = cpu_to_le32(irq_arr_size / sizeof(irq_arr[0])); in hl_fw_unmask_irq_arr()
192 memcpy(&pkt->irqs, irq_arr, irq_arr_size); in hl_fw_unmask_irq_arr()
194 pkt->cpucp_pkt.ctl = cpu_to_le32(CPUCP_PACKET_UNMASK_RAZWI_IRQ_ARRAY << in hl_fw_unmask_irq_arr()
197 rc = hdev->asic_funcs->send_cpu_message(hdev, (u32 *) pkt, in hl_fw_unmask_irq_arr()
203 kfree(pkt); in hl_fw_unmask_irq_arr()
274 struct cpucp_packet pkt = {}; in hl_fw_cpucp_info_get() local
292 pkt.ctl = cpu_to_le32(CPUCP_PACKET_INFO_GET << in hl_fw_cpucp_info_get()
294 pkt.addr = cpu_to_le64(cpucp_info_dma_addr); in hl_fw_cpucp_info_get()
295 pkt.data_max_size = cpu_to_le32(sizeof(struct cpucp_info)); in hl_fw_cpucp_info_get()
297 rc = hdev->asic_funcs->send_cpu_message(hdev, (u32 *) &pkt, sizeof(pkt), in hl_fw_cpucp_info_get()
325 struct cpucp_packet pkt = {}; in hl_fw_get_eeprom_data() local
342 pkt.ctl = cpu_to_le32(CPUCP_PACKET_EEPROM_DATA_GET << in hl_fw_get_eeprom_data()
344 pkt.addr = cpu_to_le64(eeprom_info_dma_addr); in hl_fw_get_eeprom_data()
345 pkt.data_max_size = cpu_to_le32(max_size); in hl_fw_get_eeprom_data()
347 rc = hdev->asic_funcs->send_cpu_message(hdev, (u32 *) &pkt, sizeof(pkt), in hl_fw_get_eeprom_data()
370 struct cpucp_packet pkt = {}; in hl_fw_cpucp_pci_counters_get() local
374 pkt.ctl = cpu_to_le32(CPUCP_PACKET_PCIE_THROUGHPUT_GET << in hl_fw_cpucp_pci_counters_get()
378 pkt.index = cpu_to_le32(cpucp_pcie_throughput_rx); in hl_fw_cpucp_pci_counters_get()
379 rc = hdev->asic_funcs->send_cpu_message(hdev, (u32 *) &pkt, sizeof(pkt), in hl_fw_cpucp_pci_counters_get()
388 memset(&pkt, 0, sizeof(pkt)); in hl_fw_cpucp_pci_counters_get()
389 pkt.ctl = cpu_to_le32(CPUCP_PACKET_PCIE_THROUGHPUT_GET << in hl_fw_cpucp_pci_counters_get()
393 pkt.index = cpu_to_le32(cpucp_pcie_throughput_tx); in hl_fw_cpucp_pci_counters_get()
394 rc = hdev->asic_funcs->send_cpu_message(hdev, (u32 *) &pkt, sizeof(pkt), in hl_fw_cpucp_pci_counters_get()
404 memset(&pkt, 0, sizeof(pkt)); in hl_fw_cpucp_pci_counters_get()
405 pkt.ctl = cpu_to_le32(CPUCP_PACKET_PCIE_REPLAY_CNT_GET << in hl_fw_cpucp_pci_counters_get()
408 rc = hdev->asic_funcs->send_cpu_message(hdev, (u32 *) &pkt, sizeof(pkt), in hl_fw_cpucp_pci_counters_get()
422 struct cpucp_packet pkt = {}; in hl_fw_cpucp_total_energy_get() local
426 pkt.ctl = cpu_to_le32(CPUCP_PACKET_TOTAL_ENERGY_GET << in hl_fw_cpucp_total_energy_get()
429 rc = hdev->asic_funcs->send_cpu_message(hdev, (u32 *) &pkt, sizeof(pkt), in hl_fw_cpucp_total_energy_get()