Lines Matching refs:rba

516 	struct iwl_rb_allocator *rba = &trans_pcie->rba;  in iwl_pcie_rx_allocator()  local
518 int pending = atomic_read(&rba->req_pending); in iwl_pcie_rx_allocator()
523 spin_lock_bh(&rba->lock); in iwl_pcie_rx_allocator()
525 list_replace_init(&rba->rbd_empty, &local_empty); in iwl_pcie_rx_allocator()
526 spin_unlock_bh(&rba->lock); in iwl_pcie_rx_allocator()
575 atomic_dec(&rba->req_pending); in iwl_pcie_rx_allocator()
579 pending = atomic_read(&rba->req_pending); in iwl_pcie_rx_allocator()
586 spin_lock_bh(&rba->lock); in iwl_pcie_rx_allocator()
588 list_splice_tail(&local_allocated, &rba->rbd_allocated); in iwl_pcie_rx_allocator()
590 list_splice_tail_init(&rba->rbd_empty, &local_empty); in iwl_pcie_rx_allocator()
591 spin_unlock_bh(&rba->lock); in iwl_pcie_rx_allocator()
593 atomic_inc(&rba->req_ready); in iwl_pcie_rx_allocator()
597 spin_lock_bh(&rba->lock); in iwl_pcie_rx_allocator()
599 list_splice_tail(&local_empty, &rba->rbd_empty); in iwl_pcie_rx_allocator()
600 spin_unlock_bh(&rba->lock); in iwl_pcie_rx_allocator()
617 struct iwl_rb_allocator *rba = &trans_pcie->rba; in iwl_pcie_rx_allocator_get() local
630 if (atomic_dec_if_positive(&rba->req_ready) < 0) in iwl_pcie_rx_allocator_get()
633 spin_lock(&rba->lock); in iwl_pcie_rx_allocator_get()
637 list_first_entry(&rba->rbd_allocated, in iwl_pcie_rx_allocator_get()
642 spin_unlock(&rba->lock); in iwl_pcie_rx_allocator_get()
653 container_of(rba_p, struct iwl_trans_pcie, rba); in iwl_pcie_rx_allocator_work()
760 struct iwl_rb_allocator *rba = &trans_pcie->rba; in iwl_pcie_rx_alloc() local
784 spin_lock_init(&rba->lock); in iwl_pcie_rx_alloc()
1060 struct iwl_rb_allocator *rba = &trans_pcie->rba; in _iwl_pcie_rx_init() local
1070 cancel_work_sync(&rba->rx_alloc); in _iwl_pcie_rx_init()
1072 spin_lock_bh(&rba->lock); in _iwl_pcie_rx_init()
1073 atomic_set(&rba->req_pending, 0); in _iwl_pcie_rx_init()
1074 atomic_set(&rba->req_ready, 0); in _iwl_pcie_rx_init()
1075 INIT_LIST_HEAD(&rba->rbd_allocated); in _iwl_pcie_rx_init()
1076 INIT_LIST_HEAD(&rba->rbd_empty); in _iwl_pcie_rx_init()
1077 spin_unlock_bh(&rba->lock); in _iwl_pcie_rx_init()
1130 list_add(&rxb->list, &rba->rbd_empty); in _iwl_pcie_rx_init()
1180 struct iwl_rb_allocator *rba = &trans_pcie->rba; in iwl_pcie_rx_free() local
1195 cancel_work_sync(&rba->rx_alloc); in iwl_pcie_rx_free()
1227 struct iwl_rb_allocator *rba) in iwl_pcie_rx_move_to_allocator() argument
1229 spin_lock(&rba->lock); in iwl_pcie_rx_move_to_allocator()
1230 list_splice_tail_init(&rxq->rx_used, &rba->rbd_empty); in iwl_pcie_rx_move_to_allocator()
1231 spin_unlock(&rba->lock); in iwl_pcie_rx_move_to_allocator()
1245 struct iwl_rb_allocator *rba = &trans_pcie->rba; in iwl_pcie_rx_reuse_rbd() local
1265 iwl_pcie_rx_move_to_allocator(rxq, rba); in iwl_pcie_rx_reuse_rbd()
1267 atomic_inc(&rba->req_pending); in iwl_pcie_rx_reuse_rbd()
1268 queue_work(rba->alloc_wq, &rba->rx_alloc); in iwl_pcie_rx_reuse_rbd()
1504 struct iwl_rb_allocator *rba = &trans_pcie->rba; in iwl_pcie_rx_handle() local
1508 atomic_read(&trans_pcie->rba.req_pending) * in iwl_pcie_rx_handle()
1514 iwl_pcie_rx_move_to_allocator(rxq, rba); in iwl_pcie_rx_handle()
1561 iwl_pcie_rx_move_to_allocator(rxq, rba); in iwl_pcie_rx_handle()