Lines Matching refs:eq

27 #define GET_EQ_NUM_PAGES(eq, pg_size)           \  argument
28 (ALIGN((eq)->q_len * (eq)->elem_size, pg_size) / (pg_size))
30 #define GET_EQ_NUM_ELEMS_IN_PG(eq, pg_size) ((pg_size) / (eq)->elem_size) argument
32 #define EQ_CONS_IDX_REG_ADDR(eq) (((eq)->type == HINIC_AEQ) ? \ argument
33 HINIC_CSR_AEQ_CONS_IDX_ADDR((eq)->q_id) : \
34 HINIC_CSR_CEQ_CONS_IDX_ADDR((eq)->q_id))
36 #define EQ_PROD_IDX_REG_ADDR(eq) (((eq)->type == HINIC_AEQ) ? \ argument
37 HINIC_CSR_AEQ_PROD_IDX_ADDR((eq)->q_id) : \
38 HINIC_CSR_CEQ_PROD_IDX_ADDR((eq)->q_id))
40 #define EQ_HI_PHYS_ADDR_REG(eq, pg_num) (((eq)->type == HINIC_AEQ) ? \ argument
41 HINIC_CSR_AEQ_HI_PHYS_ADDR_REG((eq)->q_id, pg_num) : \
42 HINIC_CSR_CEQ_HI_PHYS_ADDR_REG((eq)->q_id, pg_num))
44 #define EQ_LO_PHYS_ADDR_REG(eq, pg_num) (((eq)->type == HINIC_AEQ) ? \ argument
45 HINIC_CSR_AEQ_LO_PHYS_ADDR_REG((eq)->q_id, pg_num) : \
46 HINIC_CSR_CEQ_LO_PHYS_ADDR_REG((eq)->q_id, pg_num))
48 #define GET_EQ_ELEMENT(eq, idx) \ argument
49 ((eq)->virt_addr[(idx) / (eq)->num_elem_in_pg] + \
50 (((idx) & ((eq)->num_elem_in_pg - 1)) * (eq)->elem_size))
52 #define GET_AEQ_ELEM(eq, idx) ((struct hinic_aeq_elem *) \ argument
53 GET_EQ_ELEMENT(eq, idx))
55 #define GET_CEQ_ELEM(eq, idx) ((u32 *) \ argument
56 GET_EQ_ELEMENT(eq, idx))
58 #define GET_CURR_AEQ_ELEM(eq) GET_AEQ_ELEM(eq, (eq)->cons_idx) argument
60 #define GET_CURR_CEQ_ELEM(eq) GET_CEQ_ELEM(eq, (eq)->cons_idx) argument
63 #define EQ_SET_HW_PAGE_SIZE_VAL(eq) (ilog2(PAGE_IN_4K((eq)->page_size))) argument
65 #define ELEMENT_SIZE_IN_32B(eq) (((eq)->elem_size) >> 5) argument
66 #define EQ_SET_HW_ELEM_SIZE_VAL(eq) (ilog2(ELEMENT_SIZE_IN_32B(eq))) argument
79 #define aeq_to_aeqs(eq) \ argument
80 container_of((eq) - (eq)->q_id, struct hinic_aeqs, aeq[0])
82 #define ceq_to_ceqs(eq) \ argument
83 container_of((eq) - (eq)->q_id, struct hinic_ceqs, ceq[0])
193 static void eq_update_ci(struct hinic_eq *eq, u32 arm_state) in eq_update_ci() argument
195 u32 val, addr = EQ_CONS_IDX_REG_ADDR(eq); in eq_update_ci()
198 val = hinic_hwif_read_reg(eq->hwif, addr); in eq_update_ci()
205 val |= HINIC_EQ_CI_SET(eq->cons_idx, IDX) | in eq_update_ci()
206 HINIC_EQ_CI_SET(eq->wrapped, WRAPPED) | in eq_update_ci()
211 hinic_hwif_write_reg(eq->hwif, addr, val); in eq_update_ci()
218 static void aeq_irq_handler(struct hinic_eq *eq) in aeq_irq_handler() argument
220 struct hinic_aeqs *aeqs = aeq_to_aeqs(eq); in aeq_irq_handler()
230 for (i = 0; i < eq->q_len; i++) { in aeq_irq_handler()
231 aeqe_curr = GET_CURR_AEQ_ELEM(eq); in aeq_irq_handler()
237 if (HINIC_EQ_ELEM_DESC_GET(aeqe_desc, WRAPPED) == eq->wrapped) in aeq_irq_handler()
268 eq->cons_idx++; in aeq_irq_handler()
270 if (eq->cons_idx == eq->q_len) { in aeq_irq_handler()
271 eq->cons_idx = 0; in aeq_irq_handler()
272 eq->wrapped = !eq->wrapped; in aeq_irq_handler()
314 static void ceq_irq_handler(struct hinic_eq *eq) in ceq_irq_handler() argument
316 struct hinic_ceqs *ceqs = ceq_to_ceqs(eq); in ceq_irq_handler()
320 for (i = 0; i < eq->q_len; i++) { in ceq_irq_handler()
321 ceqe = *(GET_CURR_CEQ_ELEM(eq)); in ceq_irq_handler()
327 if (HINIC_EQ_ELEM_DESC_GET(ceqe, WRAPPED) == eq->wrapped) in ceq_irq_handler()
332 eq->cons_idx++; in ceq_irq_handler()
334 if (eq->cons_idx == eq->q_len) { in ceq_irq_handler()
335 eq->cons_idx = 0; in ceq_irq_handler()
336 eq->wrapped = !eq->wrapped; in ceq_irq_handler()
347 struct hinic_eq *eq = data; in eq_irq_handler() local
349 if (eq->type == HINIC_AEQ) in eq_irq_handler()
350 aeq_irq_handler(eq); in eq_irq_handler()
351 else if (eq->type == HINIC_CEQ) in eq_irq_handler()
352 ceq_irq_handler(eq); in eq_irq_handler()
354 eq_update_ci(eq, EQ_ARMED); in eq_irq_handler()
421 static u32 get_ctrl0_val(struct hinic_eq *eq, u32 addr) in get_ctrl0_val() argument
423 struct msix_entry *msix_entry = &eq->msix_entry; in get_ctrl0_val()
424 enum hinic_eq_type type = eq->type; in get_ctrl0_val()
429 addr = HINIC_CSR_AEQ_CTRL_0_ADDR(eq->q_id); in get_ctrl0_val()
431 val = hinic_hwif_read_reg(eq->hwif, addr); in get_ctrl0_val()
440 HINIC_AEQ_CTRL_0_SET(HINIC_HWIF_PCI_INTF(eq->hwif), in get_ctrl0_val()
447 addr = HINIC_CSR_CEQ_CTRL_0_ADDR(eq->q_id); in get_ctrl0_val()
449 val = hinic_hwif_read_reg(eq->hwif, addr); in get_ctrl0_val()
460 HINIC_CEQ_CTRL_0_SET(HINIC_HWIF_PCI_INTF(eq->hwif), in get_ctrl0_val()
469 static void set_ctrl0(struct hinic_eq *eq) in set_ctrl0() argument
473 if (eq->type == HINIC_AEQ) in set_ctrl0()
474 addr = HINIC_CSR_AEQ_CTRL_0_ADDR(eq->q_id); in set_ctrl0()
476 addr = HINIC_CSR_CEQ_CTRL_0_ADDR(eq->q_id); in set_ctrl0()
478 val = get_ctrl0_val(eq, addr); in set_ctrl0()
480 hinic_hwif_write_reg(eq->hwif, addr, val); in set_ctrl0()
483 static u32 get_ctrl1_val(struct hinic_eq *eq, u32 addr) in get_ctrl1_val() argument
486 enum hinic_eq_type type = eq->type; in get_ctrl1_val()
490 addr = HINIC_CSR_AEQ_CTRL_1_ADDR(eq->q_id); in get_ctrl1_val()
492 page_size_val = EQ_SET_HW_PAGE_SIZE_VAL(eq); in get_ctrl1_val()
493 elem_size = EQ_SET_HW_ELEM_SIZE_VAL(eq); in get_ctrl1_val()
495 val = hinic_hwif_read_reg(eq->hwif, addr); in get_ctrl1_val()
501 ctrl1 = HINIC_AEQ_CTRL_1_SET(eq->q_len, LEN) | in get_ctrl1_val()
508 addr = HINIC_CSR_CEQ_CTRL_1_ADDR(eq->q_id); in get_ctrl1_val()
510 page_size_val = EQ_SET_HW_PAGE_SIZE_VAL(eq); in get_ctrl1_val()
512 val = hinic_hwif_read_reg(eq->hwif, addr); in get_ctrl1_val()
517 ctrl1 = HINIC_CEQ_CTRL_1_SET(eq->q_len, LEN) | in get_ctrl1_val()
525 static void set_ctrl1(struct hinic_eq *eq) in set_ctrl1() argument
529 if (eq->type == HINIC_AEQ) in set_ctrl1()
530 addr = HINIC_CSR_AEQ_CTRL_1_ADDR(eq->q_id); in set_ctrl1()
532 addr = HINIC_CSR_CEQ_CTRL_1_ADDR(eq->q_id); in set_ctrl1()
534 val = get_ctrl1_val(eq, addr); in set_ctrl1()
536 hinic_hwif_write_reg(eq->hwif, addr, val); in set_ctrl1()
539 static int set_ceq_ctrl_reg(struct hinic_eq *eq) in set_ceq_ctrl_reg() argument
542 struct hinic_hwdev *hwdev = eq->hwdev; in set_ceq_ctrl_reg()
551 addr = HINIC_CSR_CEQ_CTRL_0_ADDR(eq->q_id); in set_ceq_ctrl_reg()
552 ceq_ctrl.ctrl0 = get_ctrl0_val(eq, addr); in set_ceq_ctrl_reg()
553 addr = HINIC_CSR_CEQ_CTRL_1_ADDR(eq->q_id); in set_ceq_ctrl_reg()
554 ceq_ctrl.ctrl1 = get_ctrl1_val(eq, addr); in set_ceq_ctrl_reg()
557 ceq_ctrl.q_id = eq->q_id; in set_ceq_ctrl_reg()
566 eq->q_id, err, ceq_ctrl.status, out_size); in set_ceq_ctrl_reg()
577 static int set_eq_ctrls(struct hinic_eq *eq) in set_eq_ctrls() argument
579 if (HINIC_IS_VF(eq->hwif) && eq->type == HINIC_CEQ) in set_eq_ctrls()
580 return set_ceq_ctrl_reg(eq); in set_eq_ctrls()
582 set_ctrl0(eq); in set_eq_ctrls()
583 set_ctrl1(eq); in set_eq_ctrls()
592 static void aeq_elements_init(struct hinic_eq *eq, u32 init_val) in aeq_elements_init() argument
597 for (i = 0; i < eq->q_len; i++) { in aeq_elements_init()
598 aeqe = GET_AEQ_ELEM(eq, i); in aeq_elements_init()
610 static void ceq_elements_init(struct hinic_eq *eq, u32 init_val) in ceq_elements_init() argument
615 for (i = 0; i < eq->q_len; i++) { in ceq_elements_init()
616 ceqe = GET_CEQ_ELEM(eq, i); in ceq_elements_init()
629 static int alloc_eq_pages(struct hinic_eq *eq) in alloc_eq_pages() argument
631 struct hinic_hwif *hwif = eq->hwif; in alloc_eq_pages()
636 eq->dma_addr = devm_kcalloc(&pdev->dev, eq->num_pages, in alloc_eq_pages()
637 sizeof(*eq->dma_addr), GFP_KERNEL); in alloc_eq_pages()
638 if (!eq->dma_addr) in alloc_eq_pages()
641 eq->virt_addr = devm_kcalloc(&pdev->dev, eq->num_pages, in alloc_eq_pages()
642 sizeof(*eq->virt_addr), GFP_KERNEL); in alloc_eq_pages()
643 if (!eq->virt_addr) { in alloc_eq_pages()
648 for (pg = 0; pg < eq->num_pages; pg++) { in alloc_eq_pages()
649 eq->virt_addr[pg] = dma_alloc_coherent(&pdev->dev, in alloc_eq_pages()
650 eq->page_size, in alloc_eq_pages()
651 &eq->dma_addr[pg], in alloc_eq_pages()
653 if (!eq->virt_addr[pg]) { in alloc_eq_pages()
658 addr = EQ_HI_PHYS_ADDR_REG(eq, pg); in alloc_eq_pages()
659 val = upper_32_bits(eq->dma_addr[pg]); in alloc_eq_pages()
663 addr = EQ_LO_PHYS_ADDR_REG(eq, pg); in alloc_eq_pages()
664 val = lower_32_bits(eq->dma_addr[pg]); in alloc_eq_pages()
669 init_val = HINIC_EQ_ELEM_DESC_SET(eq->wrapped, WRAPPED); in alloc_eq_pages()
671 if (eq->type == HINIC_AEQ) in alloc_eq_pages()
672 aeq_elements_init(eq, init_val); in alloc_eq_pages()
673 else if (eq->type == HINIC_CEQ) in alloc_eq_pages()
674 ceq_elements_init(eq, init_val); in alloc_eq_pages()
680 dma_free_coherent(&pdev->dev, eq->page_size, in alloc_eq_pages()
681 eq->virt_addr[pg], in alloc_eq_pages()
682 eq->dma_addr[pg]); in alloc_eq_pages()
684 devm_kfree(&pdev->dev, eq->virt_addr); in alloc_eq_pages()
687 devm_kfree(&pdev->dev, eq->dma_addr); in alloc_eq_pages()
695 static void free_eq_pages(struct hinic_eq *eq) in free_eq_pages() argument
697 struct hinic_hwif *hwif = eq->hwif; in free_eq_pages()
701 for (pg = 0; pg < eq->num_pages; pg++) in free_eq_pages()
702 dma_free_coherent(&pdev->dev, eq->page_size, in free_eq_pages()
703 eq->virt_addr[pg], in free_eq_pages()
704 eq->dma_addr[pg]); in free_eq_pages()
706 devm_kfree(&pdev->dev, eq->virt_addr); in free_eq_pages()
707 devm_kfree(&pdev->dev, eq->dma_addr); in free_eq_pages()
722 static int init_eq(struct hinic_eq *eq, struct hinic_hwif *hwif, in init_eq() argument
729 eq->hwif = hwif; in init_eq()
730 eq->type = type; in init_eq()
731 eq->q_id = q_id; in init_eq()
732 eq->q_len = q_len; in init_eq()
733 eq->page_size = page_size; in init_eq()
736 hinic_hwif_write_reg(eq->hwif, EQ_CONS_IDX_REG_ADDR(eq), 0); in init_eq()
737 hinic_hwif_write_reg(eq->hwif, EQ_PROD_IDX_REG_ADDR(eq), 0); in init_eq()
739 eq->cons_idx = 0; in init_eq()
740 eq->wrapped = 0; in init_eq()
743 eq->elem_size = HINIC_AEQE_SIZE; in init_eq()
745 eq->elem_size = HINIC_CEQE_SIZE; in init_eq()
751 eq->num_pages = GET_EQ_NUM_PAGES(eq, page_size); in init_eq()
752 eq->num_elem_in_pg = GET_EQ_NUM_ELEMS_IN_PG(eq, page_size); in init_eq()
754 eq->msix_entry = entry; in init_eq()
756 if (eq->num_elem_in_pg & (eq->num_elem_in_pg - 1)) { in init_eq()
761 if (eq->num_pages > EQ_MAX_PAGES) { in init_eq()
766 err = set_eq_ctrls(eq); in init_eq()
772 eq_update_ci(eq, EQ_ARMED); in init_eq()
774 err = alloc_eq_pages(eq); in init_eq()
781 struct hinic_eq_work *aeq_work = &eq->aeq_work; in init_eq()
785 tasklet_setup(&eq->ceq_tasklet, ceq_tasklet); in init_eq()
789 hinic_msix_attr_set(eq->hwif, eq->msix_entry.entry, in init_eq()
797 snprintf(eq->irq_name, sizeof(eq->irq_name), "hinic_aeq%d@pci:%s", eq->q_id, in init_eq()
799 err = request_irq(entry.vector, aeq_interrupt, 0, eq->irq_name, eq); in init_eq()
801 snprintf(eq->irq_name, sizeof(eq->irq_name), "hinic_ceq%d@pci:%s", eq->q_id, in init_eq()
803 err = request_irq(entry.vector, ceq_interrupt, 0, eq->irq_name, eq); in init_eq()
814 free_eq_pages(eq); in init_eq()
822 static void remove_eq(struct hinic_eq *eq) in remove_eq() argument
824 hinic_set_msix_state(eq->hwif, eq->msix_entry.entry, in remove_eq()
826 free_irq(eq->msix_entry.vector, eq); in remove_eq()
828 if (eq->type == HINIC_AEQ) { in remove_eq()
829 struct hinic_eq_work *aeq_work = &eq->aeq_work; in remove_eq()
833 hinic_hwif_write_reg(eq->hwif, in remove_eq()
834 HINIC_CSR_AEQ_CTRL_1_ADDR(eq->q_id), 0); in remove_eq()
835 } else if (eq->type == HINIC_CEQ) { in remove_eq()
836 tasklet_kill(&eq->ceq_tasklet); in remove_eq()
838 hinic_hwif_write_reg(eq->hwif, in remove_eq()
839 HINIC_CSR_CEQ_CTRL_1_ADDR(eq->q_id), 0); in remove_eq()
843 eq->cons_idx = hinic_hwif_read_reg(eq->hwif, EQ_PROD_IDX_REG_ADDR(eq)); in remove_eq()
844 eq_update_ci(eq, EQ_NOT_ARMED); in remove_eq()
846 free_eq_pages(eq); in remove_eq()
961 struct hinic_eq *eq = NULL; in hinic_dump_ceq_info() local
966 eq = &hwdev->func_to_io.ceqs.ceq[q_id]; in hinic_dump_ceq_info()
967 addr = EQ_CONS_IDX_REG_ADDR(eq); in hinic_dump_ceq_info()
969 addr = EQ_PROD_IDX_REG_ADDR(eq); in hinic_dump_ceq_info()
972 q_id, ci, eq->cons_idx, pi, in hinic_dump_ceq_info()
973 eq->ceq_tasklet.state, in hinic_dump_ceq_info()
974 eq->wrapped, be32_to_cpu(*(__be32 *)(GET_CURR_CEQ_ELEM(eq)))); in hinic_dump_ceq_info()
981 struct hinic_eq *eq = NULL; in hinic_dump_aeq_info() local
986 eq = &hwdev->aeqs.aeq[q_id]; in hinic_dump_aeq_info()
987 addr = EQ_CONS_IDX_REG_ADDR(eq); in hinic_dump_aeq_info()
989 addr = EQ_PROD_IDX_REG_ADDR(eq); in hinic_dump_aeq_info()
991 aeqe_pos = GET_CURR_AEQ_ELEM(eq); in hinic_dump_aeq_info()
993 q_id, ci, pi, work_busy(&eq->aeq_work.work), in hinic_dump_aeq_info()
994 eq->wrapped, be32_to_cpu(aeqe_pos->desc)); in hinic_dump_aeq_info()