Lines Matching refs:wq
34 #define WQ_SIZE(wq) ((wq)->q_depth * (wq)->wqebb_size) argument
44 #define WQ_BASE_VADDR(wqs, wq) \ argument
45 ((void *)((wqs)->page_vaddr[(wq)->page_idx]) \
46 + (wq)->block_idx * WQ_BLOCK_SIZE)
48 #define WQ_BASE_PADDR(wqs, wq) \ argument
49 ((wqs)->page_paddr[(wq)->page_idx] \
50 + (wq)->block_idx * WQ_BLOCK_SIZE)
52 #define WQ_BASE_ADDR(wqs, wq) \ argument
53 ((void *)((wqs)->shadow_page_vaddr[(wq)->page_idx]) \
54 + (wq)->block_idx * WQ_BLOCK_SIZE)
56 #define CMDQ_BASE_VADDR(cmdq_pages, wq) \ argument
58 + (wq)->block_idx * CMDQ_BLOCK_SIZE)
60 #define CMDQ_BASE_PADDR(cmdq_pages, wq) \ argument
62 + (wq)->block_idx * CMDQ_BLOCK_SIZE)
64 #define CMDQ_BASE_ADDR(cmdq_pages, wq) \ argument
66 + (wq)->block_idx * CMDQ_BLOCK_SIZE)
68 #define WQ_PAGE_ADDR(wq, idx) \ argument
69 ((wq)->shadow_block_vaddr[WQE_PAGE_NUM(wq, idx)])
71 #define MASKED_WQE_IDX(wq, idx) ((idx) & (wq)->mask) argument
77 #define WQE_SHADOW_PAGE(wq, wqe) \ argument
78 (((unsigned long)(wqe) - (unsigned long)(wq)->shadow_wqe) \
79 / (wq)->max_wqe_size)
81 static inline int WQE_PAGE_OFF(struct hinic_wq *wq, u16 idx) in WQE_PAGE_OFF() argument
83 return (((idx) & ((wq)->num_wqebbs_per_page - 1)) in WQE_PAGE_OFF()
84 << (wq)->wqebb_size_shift); in WQE_PAGE_OFF()
87 static inline int WQE_PAGE_NUM(struct hinic_wq *wq, u16 idx) in WQE_PAGE_NUM() argument
89 return (((idx) >> ((wq)->wqebbs_per_page_shift)) in WQE_PAGE_NUM()
90 & ((wq)->num_q_pages - 1)); in WQE_PAGE_NUM()
376 static int alloc_wqes_shadow(struct hinic_wq *wq) in alloc_wqes_shadow() argument
378 struct hinic_hwif *hwif = wq->hwif; in alloc_wqes_shadow()
381 wq->shadow_wqe = devm_kcalloc(&pdev->dev, wq->num_q_pages, in alloc_wqes_shadow()
382 wq->max_wqe_size, GFP_KERNEL); in alloc_wqes_shadow()
383 if (!wq->shadow_wqe) in alloc_wqes_shadow()
386 wq->shadow_idx = devm_kcalloc(&pdev->dev, wq->num_q_pages, in alloc_wqes_shadow()
387 sizeof(*wq->shadow_idx), GFP_KERNEL); in alloc_wqes_shadow()
388 if (!wq->shadow_idx) in alloc_wqes_shadow()
394 devm_kfree(&pdev->dev, wq->shadow_wqe); in alloc_wqes_shadow()
402 static void free_wqes_shadow(struct hinic_wq *wq) in free_wqes_shadow() argument
404 struct hinic_hwif *hwif = wq->hwif; in free_wqes_shadow()
407 devm_kfree(&pdev->dev, wq->shadow_idx); in free_wqes_shadow()
408 devm_kfree(&pdev->dev, wq->shadow_wqe); in free_wqes_shadow()
417 static void free_wq_pages(struct hinic_wq *wq, struct hinic_hwif *hwif, in free_wq_pages() argument
424 void **vaddr = &wq->shadow_block_vaddr[i]; in free_wq_pages()
425 u64 *paddr = &wq->block_vaddr[i]; in free_wq_pages()
429 dma_free_coherent(&pdev->dev, wq->wq_page_size, *vaddr, in free_wq_pages()
433 free_wqes_shadow(wq); in free_wq_pages()
444 static int alloc_wq_pages(struct hinic_wq *wq, struct hinic_hwif *hwif, in alloc_wq_pages() argument
450 num_q_pages = ALIGN(WQ_SIZE(wq), wq->wq_page_size) / wq->wq_page_size; in alloc_wq_pages()
461 wq->num_q_pages = num_q_pages; in alloc_wq_pages()
463 err = alloc_wqes_shadow(wq); in alloc_wq_pages()
470 void **vaddr = &wq->shadow_block_vaddr[i]; in alloc_wq_pages()
471 u64 *paddr = &wq->block_vaddr[i]; in alloc_wq_pages()
474 *vaddr = dma_alloc_coherent(&pdev->dev, wq->wq_page_size, in alloc_wq_pages()
488 free_wq_pages(wq, hwif, i); in alloc_wq_pages()
503 int hinic_wq_allocate(struct hinic_wqs *wqs, struct hinic_wq *wq, in hinic_wq_allocate() argument
537 wq->hwif = hwif; in hinic_wq_allocate()
539 err = wqs_next_block(wqs, &wq->page_idx, &wq->block_idx); in hinic_wq_allocate()
545 wq->wqebb_size = wqebb_size; in hinic_wq_allocate()
546 wq->wq_page_size = wq_page_size; in hinic_wq_allocate()
547 wq->q_depth = q_depth; in hinic_wq_allocate()
548 wq->max_wqe_size = max_wqe_size; in hinic_wq_allocate()
549 wq->num_wqebbs_per_page = num_wqebbs_per_page; in hinic_wq_allocate()
550 wq->wqebbs_per_page_shift = ilog2(num_wqebbs_per_page); in hinic_wq_allocate()
551 wq->wqebb_size_shift = wqebb_size_shift; in hinic_wq_allocate()
552 wq->block_vaddr = WQ_BASE_VADDR(wqs, wq); in hinic_wq_allocate()
553 wq->shadow_block_vaddr = WQ_BASE_ADDR(wqs, wq); in hinic_wq_allocate()
554 wq->block_paddr = WQ_BASE_PADDR(wqs, wq); in hinic_wq_allocate()
556 err = alloc_wq_pages(wq, wqs->hwif, WQ_MAX_PAGES); in hinic_wq_allocate()
562 atomic_set(&wq->cons_idx, 0); in hinic_wq_allocate()
563 atomic_set(&wq->prod_idx, 0); in hinic_wq_allocate()
564 atomic_set(&wq->delta, q_depth); in hinic_wq_allocate()
565 wq->mask = q_depth - 1; in hinic_wq_allocate()
570 wqs_return_block(wqs, wq->page_idx, wq->block_idx); in hinic_wq_allocate()
579 void hinic_wq_free(struct hinic_wqs *wqs, struct hinic_wq *wq) in hinic_wq_free() argument
581 free_wq_pages(wq, wqs->hwif, wq->num_q_pages); in hinic_wq_free()
583 wqs_return_block(wqs, wq->page_idx, wq->block_idx); in hinic_wq_free()
600 struct hinic_wq *wq, struct hinic_hwif *hwif, in hinic_wqs_cmdq_alloc() argument
644 wq[i].hwif = hwif; in hinic_wqs_cmdq_alloc()
645 wq[i].page_idx = 0; in hinic_wqs_cmdq_alloc()
646 wq[i].block_idx = i; in hinic_wqs_cmdq_alloc()
648 wq[i].wqebb_size = wqebb_size; in hinic_wqs_cmdq_alloc()
649 wq[i].wq_page_size = wq_page_size; in hinic_wqs_cmdq_alloc()
650 wq[i].q_depth = q_depth; in hinic_wqs_cmdq_alloc()
651 wq[i].max_wqe_size = max_wqe_size; in hinic_wqs_cmdq_alloc()
652 wq[i].num_wqebbs_per_page = num_wqebbs_per_page; in hinic_wqs_cmdq_alloc()
653 wq[i].wqebbs_per_page_shift = num_wqebbs_per_page_shift; in hinic_wqs_cmdq_alloc()
654 wq[i].wqebb_size_shift = wqebb_size_shift; in hinic_wqs_cmdq_alloc()
655 wq[i].block_vaddr = CMDQ_BASE_VADDR(cmdq_pages, &wq[i]); in hinic_wqs_cmdq_alloc()
656 wq[i].shadow_block_vaddr = CMDQ_BASE_ADDR(cmdq_pages, &wq[i]); in hinic_wqs_cmdq_alloc()
657 wq[i].block_paddr = CMDQ_BASE_PADDR(cmdq_pages, &wq[i]); in hinic_wqs_cmdq_alloc()
659 err = alloc_wq_pages(&wq[i], cmdq_pages->hwif, in hinic_wqs_cmdq_alloc()
666 atomic_set(&wq[i].cons_idx, 0); in hinic_wqs_cmdq_alloc()
667 atomic_set(&wq[i].prod_idx, 0); in hinic_wqs_cmdq_alloc()
668 atomic_set(&wq[i].delta, q_depth); in hinic_wqs_cmdq_alloc()
669 wq[i].mask = q_depth - 1; in hinic_wqs_cmdq_alloc()
676 free_wq_pages(&wq[j], cmdq_pages->hwif, wq[j].num_q_pages); in hinic_wqs_cmdq_alloc()
689 struct hinic_wq *wq, int cmdq_blocks) in hinic_wqs_cmdq_free() argument
694 free_wq_pages(&wq[i], cmdq_pages->hwif, wq[i].num_q_pages); in hinic_wqs_cmdq_free()
699 static void copy_wqe_to_shadow(struct hinic_wq *wq, void *shadow_addr, in copy_wqe_to_shadow() argument
706 idx = MASKED_WQE_IDX(wq, idx); in copy_wqe_to_shadow()
707 wqebb_addr = WQ_PAGE_ADDR(wq, idx) + in copy_wqe_to_shadow()
708 WQE_PAGE_OFF(wq, idx); in copy_wqe_to_shadow()
710 memcpy(shadow_addr, wqebb_addr, wq->wqebb_size); in copy_wqe_to_shadow()
712 shadow_addr += wq->wqebb_size; in copy_wqe_to_shadow()
716 static void copy_wqe_from_shadow(struct hinic_wq *wq, void *shadow_addr, in copy_wqe_from_shadow() argument
723 idx = MASKED_WQE_IDX(wq, idx); in copy_wqe_from_shadow()
724 wqebb_addr = WQ_PAGE_ADDR(wq, idx) + in copy_wqe_from_shadow()
725 WQE_PAGE_OFF(wq, idx); in copy_wqe_from_shadow()
727 memcpy(wqebb_addr, shadow_addr, wq->wqebb_size); in copy_wqe_from_shadow()
728 shadow_addr += wq->wqebb_size; in copy_wqe_from_shadow()
740 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() argument
746 *prod_idx = MASKED_WQE_IDX(wq, atomic_read(&wq->prod_idx)); in hinic_get_wqe()
748 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) >> wq->wqebb_size_shift; in hinic_get_wqe()
750 if (atomic_sub_return(num_wqebbs, &wq->delta) <= 0) { in hinic_get_wqe()
751 atomic_add(num_wqebbs, &wq->delta); in hinic_get_wqe()
755 end_prod_idx = atomic_add_return(num_wqebbs, &wq->prod_idx); in hinic_get_wqe()
757 end_prod_idx = MASKED_WQE_IDX(wq, end_prod_idx); in hinic_get_wqe()
759 curr_prod_idx = MASKED_WQE_IDX(wq, curr_prod_idx); in hinic_get_wqe()
762 end_prod_idx = MASKED_WQE_IDX(wq, end_prod_idx - 1); in hinic_get_wqe()
764 curr_pg = WQE_PAGE_NUM(wq, curr_prod_idx); in hinic_get_wqe()
765 end_pg = WQE_PAGE_NUM(wq, end_prod_idx); in hinic_get_wqe()
773 void *shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size]; in hinic_get_wqe()
775 copy_wqe_to_shadow(wq, shadow_addr, num_wqebbs, *prod_idx); in hinic_get_wqe()
777 wq->shadow_idx[curr_pg] = *prod_idx; in hinic_get_wqe()
781 return WQ_PAGE_ADDR(wq, *prod_idx) + WQE_PAGE_OFF(wq, *prod_idx); in hinic_get_wqe()
789 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_return_wqe() argument
791 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_return_wqe()
793 atomic_sub(num_wqebbs, &wq->prod_idx); in hinic_return_wqe()
795 atomic_add(num_wqebbs, &wq->delta); in hinic_return_wqe()
803 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() argument
805 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_put_wqe()
806 >> wq->wqebb_size_shift; in hinic_put_wqe()
808 atomic_add(num_wqebbs, &wq->cons_idx); in hinic_put_wqe()
810 atomic_add(num_wqebbs, &wq->delta); in hinic_put_wqe()
821 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_read_wqe() argument
824 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_read_wqe()
825 >> wq->wqebb_size_shift; in hinic_read_wqe()
829 if ((atomic_read(&wq->delta) + num_wqebbs) > wq->q_depth) in hinic_read_wqe()
832 curr_cons_idx = atomic_read(&wq->cons_idx); in hinic_read_wqe()
834 curr_cons_idx = MASKED_WQE_IDX(wq, curr_cons_idx); in hinic_read_wqe()
835 end_cons_idx = MASKED_WQE_IDX(wq, curr_cons_idx + num_wqebbs - 1); in hinic_read_wqe()
837 curr_pg = WQE_PAGE_NUM(wq, curr_cons_idx); in hinic_read_wqe()
838 end_pg = WQE_PAGE_NUM(wq, end_cons_idx); in hinic_read_wqe()
846 void *shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size]; in hinic_read_wqe()
848 copy_wqe_to_shadow(wq, shadow_addr, num_wqebbs, *cons_idx); in hinic_read_wqe()
852 return WQ_PAGE_ADDR(wq, *cons_idx) + WQE_PAGE_OFF(wq, *cons_idx); in hinic_read_wqe()
862 struct hinic_hw_wqe *hinic_read_wqe_direct(struct hinic_wq *wq, u16 cons_idx) in hinic_read_wqe_direct() argument
864 return WQ_PAGE_ADDR(wq, cons_idx) + WQE_PAGE_OFF(wq, cons_idx); in hinic_read_wqe_direct()
874 static inline bool wqe_shadow(struct hinic_wq *wq, struct hinic_hw_wqe *wqe) in wqe_shadow() argument
876 size_t wqe_shadow_size = wq->num_q_pages * wq->max_wqe_size; in wqe_shadow()
878 return WQE_IN_RANGE(wqe, wq->shadow_wqe, in wqe_shadow()
879 &wq->shadow_wqe[wqe_shadow_size]); in wqe_shadow()
888 void hinic_write_wqe(struct hinic_wq *wq, struct hinic_hw_wqe *wqe, in hinic_write_wqe() argument
895 if (wqe_shadow(wq, wqe)) { in hinic_write_wqe()
896 curr_pg = WQE_SHADOW_PAGE(wq, wqe); in hinic_write_wqe()
898 prod_idx = wq->shadow_idx[curr_pg]; in hinic_write_wqe()
899 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_write_wqe()
900 shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size]; in hinic_write_wqe()
902 copy_wqe_from_shadow(wq, shadow_addr, num_wqebbs, prod_idx); in hinic_write_wqe()