/linux-5.19.10/arch/xtensa/variants/test_mmuhifi_c3/include/variant/ |
D | tie.h | 114 XCHAL_SA_REG(s,0,0,2,0, aeq0, 8, 8, 8,0x0068, aeq,0 , 56,0,0,0) \ 115 XCHAL_SA_REG(s,0,0,2,0, aeq1, 8, 8, 8,0x0069, aeq,1 , 56,0,0,0) \ 116 XCHAL_SA_REG(s,0,0,2,0, aeq2, 8, 8, 8,0x006A, aeq,2 , 56,0,0,0) \ 117 XCHAL_SA_REG(s,0,0,2,0, aeq3, 8, 8, 8,0x006B, aeq,3 , 56,0,0,0)
|
/linux-5.19.10/arch/xtensa/variants/test_kc705_be/include/variant/ |
D | tie.h | 145 XCHAL_SA_REG(s,0,0,2,0, aeq0, 8, 8, 8,0x0068, aeq,0 , 56,0,0,0) \ 146 XCHAL_SA_REG(s,0,0,2,0, aeq1, 8, 8, 8,0x0069, aeq,1 , 56,0,0,0) \ 147 XCHAL_SA_REG(s,0,0,2,0, aeq2, 8, 8, 8,0x006A, aeq,2 , 56,0,0,0) \ 148 XCHAL_SA_REG(s,0,0,2,0, aeq3, 8, 8, 8,0x006B, aeq,3 , 56,0,0,0)
|
/linux-5.19.10/drivers/infiniband/hw/irdma/ |
D | hw.c | 205 struct irdma_aeq *aeq = &rf->aeq; in irdma_process_aeq() local 206 struct irdma_sc_aeq *sc_aeq = &aeq->sc_aeq; in irdma_process_aeq() 583 struct irdma_aeq *aeq = &rf->aeq; in irdma_destroy_virt_aeq() local 584 u32 pg_cnt = DIV_ROUND_UP(aeq->mem.size, PAGE_SIZE); in irdma_destroy_virt_aeq() 585 dma_addr_t *pg_arr = (dma_addr_t *)aeq->palloc.level1.addr; in irdma_destroy_virt_aeq() 588 irdma_free_pble(rf->pble_rsrc, &aeq->palloc); in irdma_destroy_virt_aeq() 589 vfree(aeq->mem.va); in irdma_destroy_virt_aeq() 603 struct irdma_aeq *aeq = &rf->aeq; in irdma_destroy_aeq() local 613 aeq->sc_aeq.size = 0; in irdma_destroy_aeq() 614 status = irdma_cqp_aeq_cmd(dev, &aeq->sc_aeq, IRDMA_OP_AEQ_DESTROY); in irdma_destroy_aeq() [all …]
|
D | ctrl.c | 3870 int irdma_sc_aeq_init(struct irdma_sc_aeq *aeq, in irdma_sc_aeq_init() argument 3884 aeq->size = sizeof(*aeq); in irdma_sc_aeq_init() 3885 aeq->polarity = 1; in irdma_sc_aeq_init() 3886 aeq->aeqe_base = (struct irdma_sc_aeqe *)info->aeqe_base; in irdma_sc_aeq_init() 3887 aeq->dev = info->dev; in irdma_sc_aeq_init() 3888 aeq->elem_cnt = info->elem_cnt; in irdma_sc_aeq_init() 3889 aeq->aeq_elem_pa = info->aeq_elem_pa; in irdma_sc_aeq_init() 3890 IRDMA_RING_INIT(aeq->aeq_ring, aeq->elem_cnt); in irdma_sc_aeq_init() 3891 aeq->virtual_map = info->virtual_map; in irdma_sc_aeq_init() 3892 aeq->pbl_list = (aeq->virtual_map ? info->pbl_list : NULL); in irdma_sc_aeq_init() [all …]
|
D | type.h | 646 struct irdma_sc_aeq *aeq; member 1221 int irdma_sc_aeq_init(struct irdma_sc_aeq *aeq, 1223 int irdma_sc_get_next_aeqe(struct irdma_sc_aeq *aeq, 1390 struct irdma_sc_aeq *aeq; member 1395 struct irdma_sc_aeq *aeq; member
|
D | icrdma_hw.c | 62 if (dev->ceq_itr && dev->aeq->msix_idx != idx) in icrdma_ena_irq()
|
D | main.h | 302 struct irdma_aeq aeq; member
|
D | utils.c | 1987 cqp_info->in.u.aeq_create.aeq = sc_aeq; in irdma_cqp_aeq_cmd()
|
/linux-5.19.10/drivers/net/ethernet/huawei/hinic/ |
D | hinic_hw_eqs.c | 80 container_of((eq) - (eq)->q_id, struct hinic_aeqs, aeq[0]) 364 struct hinic_eq *aeq; in eq_irq_work() local 366 aeq = aeq_work->data; in eq_irq_work() 367 eq_irq_handler(aeq); in eq_irq_work() 389 struct hinic_eq *aeq = data; in aeq_interrupt() local 393 hinic_msix_attr_cnt_clear(aeq->hwif, aeq->msix_entry.entry); in aeq_interrupt() 395 aeq_work = &aeq->aeq_work; in aeq_interrupt() 396 aeq_work->data = aeq; in aeq_interrupt() 398 aeqs = aeq_to_aeqs(aeq); in aeq_interrupt() 875 err = init_eq(&aeqs->aeq[q_id], hwif, HINIC_AEQ, q_id, q_len, in hinic_aeqs_init() [all …]
|
D | hinic_hw_eqs.h | 208 struct hinic_eq aeq[HINIC_MAX_AEQS]; member
|