Lines Matching refs:trans_pcie

201 	struct iwl_trans_pcie *trans_pcie =  in iwlagn_rx_queue_restock()  local
204 struct iwl_rx_queue *rxq = &trans_pcie->rxq; in iwlagn_rx_queue_restock()
230 schedule_work(&trans_pcie->rx_replenish); in iwlagn_rx_queue_restock()
253 struct iwl_trans_pcie *trans_pcie = in iwlagn_rx_allocate() local
256 struct iwl_rx_queue *rxq = &trans_pcie->rxq; in iwlagn_rx_allocate()
342 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwlagn_rx_replenish() local
347 spin_lock_irqsave(&trans_pcie->irq_lock, flags); in iwlagn_rx_replenish()
349 spin_unlock_irqrestore(&trans_pcie->irq_lock, flags); in iwlagn_rx_replenish()
361 struct iwl_trans_pcie *trans_pcie = in iwl_bg_rx_replenish() local
364 iwlagn_rx_replenish(trans_pcie->trans); in iwl_bg_rx_replenish()
370 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_rx_handle_rxbuf() local
371 struct iwl_rx_queue *rxq = &trans_pcie->rxq; in iwl_rx_handle_rxbuf()
372 struct iwl_tx_queue *txq = &trans_pcie->txq[trans_pcie->cmd_queue]; in iwl_rx_handle_rxbuf()
411 for (i = 0; i < trans_pcie->n_no_reclaim_cmds; i++) { in iwl_rx_handle_rxbuf()
412 if (trans_pcie->no_reclaim_cmds[i] == pkt->hdr.cmd) { in iwl_rx_handle_rxbuf()
488 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_rx_handle() local
489 struct iwl_rx_queue *rxq = &trans_pcie->rxq; in iwl_rx_handle()
616 struct iwl_trans_pcie *trans_pcie = in iwl_dump_nic_error_log() local
645 trans_pcie->isr_stats.err_code = table.error_id; in iwl_dump_nic_error_log()
960 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_irq_tasklet() local
961 struct isr_statistics *isr_stats = &trans_pcie->isr_stats; in iwl_irq_tasklet()
964 spin_lock_irqsave(&trans_pcie->irq_lock, flags); in iwl_irq_tasklet()
978 trans_pcie->inta | ~trans_pcie->inta_mask); in iwl_irq_tasklet()
980 inta = trans_pcie->inta; in iwl_irq_tasklet()
992 trans_pcie->inta = 0; in iwl_irq_tasklet()
994 spin_unlock_irqrestore(&trans_pcie->irq_lock, flags); in iwl_irq_tasklet()
1065 iwl_rx_queue_update_write_ptr(trans, &trans_pcie->rxq); in iwl_irq_tasklet()
1068 &trans_pcie->txq[i]); in iwl_irq_tasklet()
1131 trans_pcie->ucode_write_complete = true; in iwl_irq_tasklet()
1132 wake_up(&trans_pcie->ucode_write_waitq); in iwl_irq_tasklet()
1140 if (inta & ~(trans_pcie->inta_mask)) { in iwl_irq_tasklet()
1142 inta & ~trans_pcie->inta_mask); in iwl_irq_tasklet()
1147 if (test_bit(STATUS_INT_ENABLED, &trans_pcie->status)) in iwl_irq_tasklet()
1168 struct iwl_trans_pcie *trans_pcie = in iwl_free_isr_ict() local
1171 if (trans_pcie->ict_tbl) { in iwl_free_isr_ict()
1173 trans_pcie->ict_tbl, in iwl_free_isr_ict()
1174 trans_pcie->ict_tbl_dma); in iwl_free_isr_ict()
1175 trans_pcie->ict_tbl = NULL; in iwl_free_isr_ict()
1176 trans_pcie->ict_tbl_dma = 0; in iwl_free_isr_ict()
1188 struct iwl_trans_pcie *trans_pcie = in iwl_alloc_isr_ict() local
1191 trans_pcie->ict_tbl = in iwl_alloc_isr_ict()
1193 &trans_pcie->ict_tbl_dma, in iwl_alloc_isr_ict()
1195 if (!trans_pcie->ict_tbl) in iwl_alloc_isr_ict()
1199 if (WARN_ON(trans_pcie->ict_tbl_dma & (ICT_SIZE - 1))) { in iwl_alloc_isr_ict()
1205 (unsigned long long)trans_pcie->ict_tbl_dma); in iwl_alloc_isr_ict()
1207 IWL_DEBUG_ISR(trans, "ict vir addr %p\n", trans_pcie->ict_tbl); in iwl_alloc_isr_ict()
1210 memset(trans_pcie->ict_tbl, 0, ICT_SIZE); in iwl_alloc_isr_ict()
1211 trans_pcie->ict_index = 0; in iwl_alloc_isr_ict()
1214 trans_pcie->inta_mask |= CSR_INT_BIT_RX_PERIODIC; in iwl_alloc_isr_ict()
1225 struct iwl_trans_pcie *trans_pcie = in iwl_reset_ict() local
1228 if (!trans_pcie->ict_tbl) in iwl_reset_ict()
1231 spin_lock_irqsave(&trans_pcie->irq_lock, flags); in iwl_reset_ict()
1234 memset(trans_pcie->ict_tbl, 0, ICT_SIZE); in iwl_reset_ict()
1236 val = trans_pcie->ict_tbl_dma >> ICT_SHIFT; in iwl_reset_ict()
1244 trans_pcie->use_ict = true; in iwl_reset_ict()
1245 trans_pcie->ict_index = 0; in iwl_reset_ict()
1246 iwl_write32(trans, CSR_INT, trans_pcie->inta_mask); in iwl_reset_ict()
1248 spin_unlock_irqrestore(&trans_pcie->irq_lock, flags); in iwl_reset_ict()
1254 struct iwl_trans_pcie *trans_pcie = in iwl_disable_ict() local
1259 spin_lock_irqsave(&trans_pcie->irq_lock, flags); in iwl_disable_ict()
1260 trans_pcie->use_ict = false; in iwl_disable_ict()
1261 spin_unlock_irqrestore(&trans_pcie->irq_lock, flags); in iwl_disable_ict()
1267 struct iwl_trans_pcie *trans_pcie; in iwl_isr() local
1278 trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_isr()
1280 spin_lock_irqsave(&trans_pcie->irq_lock, flags); in iwl_isr()
1323 trans_pcie->inta |= inta; in iwl_isr()
1326 tasklet_schedule(&trans_pcie->irq_tasklet); in iwl_isr()
1327 else if (test_bit(STATUS_INT_ENABLED, &trans_pcie->status) && in iwl_isr()
1328 !trans_pcie->inta) in iwl_isr()
1332 spin_unlock_irqrestore(&trans_pcie->irq_lock, flags); in iwl_isr()
1338 if (test_bit(STATUS_INT_ENABLED, &trans_pcie->status) && in iwl_isr()
1339 !trans_pcie->inta) in iwl_isr()
1342 spin_unlock_irqrestore(&trans_pcie->irq_lock, flags); in iwl_isr()
1357 struct iwl_trans_pcie *trans_pcie; in iwl_isr_ict() local
1366 trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_isr_ict()
1371 if (!trans_pcie->use_ict) in iwl_isr_ict()
1376 spin_lock_irqsave(&trans_pcie->irq_lock, flags); in iwl_isr_ict()
1390 read = le32_to_cpu(trans_pcie->ict_tbl[trans_pcie->ict_index]); in iwl_isr_ict()
1391 trace_iwlwifi_dev_ict_read(trans->dev, trans_pcie->ict_index, read); in iwl_isr_ict()
1404 trans_pcie->ict_index, read); in iwl_isr_ict()
1405 trans_pcie->ict_tbl[trans_pcie->ict_index] = 0; in iwl_isr_ict()
1406 trans_pcie->ict_index = in iwl_isr_ict()
1407 iwl_queue_inc_wrap(trans_pcie->ict_index, ICT_COUNT); in iwl_isr_ict()
1409 read = le32_to_cpu(trans_pcie->ict_tbl[trans_pcie->ict_index]); in iwl_isr_ict()
1410 trace_iwlwifi_dev_ict_read(trans->dev, trans_pcie->ict_index, in iwl_isr_ict()
1432 inta &= trans_pcie->inta_mask; in iwl_isr_ict()
1433 trans_pcie->inta |= inta; in iwl_isr_ict()
1437 tasklet_schedule(&trans_pcie->irq_tasklet); in iwl_isr_ict()
1438 else if (test_bit(STATUS_INT_ENABLED, &trans_pcie->status) && in iwl_isr_ict()
1439 !trans_pcie->inta) { in iwl_isr_ict()
1447 spin_unlock_irqrestore(&trans_pcie->irq_lock, flags); in iwl_isr_ict()
1454 if (test_bit(STATUS_INT_ENABLED, &trans_pcie->status) && in iwl_isr_ict()
1455 !trans_pcie->inta) in iwl_isr_ict()
1458 spin_unlock_irqrestore(&trans_pcie->irq_lock, flags); in iwl_isr_ict()