Lines Matching refs:eint
67 static void __iomem *mtk_eint_get_offset(struct mtk_eint *eint, in mtk_eint_get_offset() argument
74 if (eint_num >= eint->hw->ap_num) in mtk_eint_get_offset()
75 eint_base = eint->hw->ap_num; in mtk_eint_get_offset()
77 reg = eint->base + offset + ((eint_num - eint_base) / 32) * 4; in mtk_eint_get_offset()
82 static unsigned int mtk_eint_can_en_debounce(struct mtk_eint *eint, in mtk_eint_can_en_debounce() argument
87 void __iomem *reg = mtk_eint_get_offset(eint, eint_num, in mtk_eint_can_en_debounce()
88 eint->regs->sens); in mtk_eint_can_en_debounce()
95 if (eint_num < eint->hw->db_cnt && sens != MTK_EINT_EDGE_SENSITIVE) in mtk_eint_can_en_debounce()
101 static int mtk_eint_flip_edge(struct mtk_eint *eint, int hwirq) in mtk_eint_flip_edge() argument
106 u32 port = (hwirq >> 5) & eint->hw->port_mask; in mtk_eint_flip_edge()
107 void __iomem *reg = eint->base + (port << 2); in mtk_eint_flip_edge()
109 curr_level = eint->gpio_xlate->get_gpio_state(eint->pctl, hwirq); in mtk_eint_flip_edge()
114 reg_offset = eint->regs->pol_clr; in mtk_eint_flip_edge()
116 reg_offset = eint->regs->pol_set; in mtk_eint_flip_edge()
119 curr_level = eint->gpio_xlate->get_gpio_state(eint->pctl, in mtk_eint_flip_edge()
128 struct mtk_eint *eint = irq_data_get_irq_chip_data(d); in mtk_eint_mask() local
130 void __iomem *reg = mtk_eint_get_offset(eint, d->hwirq, in mtk_eint_mask()
131 eint->regs->mask_set); in mtk_eint_mask()
133 eint->cur_mask[d->hwirq >> 5] &= ~mask; in mtk_eint_mask()
140 struct mtk_eint *eint = irq_data_get_irq_chip_data(d); in mtk_eint_unmask() local
142 void __iomem *reg = mtk_eint_get_offset(eint, d->hwirq, in mtk_eint_unmask()
143 eint->regs->mask_clr); in mtk_eint_unmask()
145 eint->cur_mask[d->hwirq >> 5] |= mask; in mtk_eint_unmask()
149 if (eint->dual_edge[d->hwirq]) in mtk_eint_unmask()
150 mtk_eint_flip_edge(eint, d->hwirq); in mtk_eint_unmask()
153 static unsigned int mtk_eint_get_mask(struct mtk_eint *eint, in mtk_eint_get_mask() argument
157 void __iomem *reg = mtk_eint_get_offset(eint, eint_num, in mtk_eint_get_mask()
158 eint->regs->mask); in mtk_eint_get_mask()
165 struct mtk_eint *eint = irq_data_get_irq_chip_data(d); in mtk_eint_ack() local
167 void __iomem *reg = mtk_eint_get_offset(eint, d->hwirq, in mtk_eint_ack()
168 eint->regs->ack); in mtk_eint_ack()
175 struct mtk_eint *eint = irq_data_get_irq_chip_data(d); in mtk_eint_set_type() local
182 dev_err(eint->dev, in mtk_eint_set_type()
189 eint->dual_edge[d->hwirq] = 1; in mtk_eint_set_type()
191 eint->dual_edge[d->hwirq] = 0; in mtk_eint_set_type()
193 if (!mtk_eint_get_mask(eint, d->hwirq)) { in mtk_eint_set_type()
201 reg = mtk_eint_get_offset(eint, d->hwirq, eint->regs->pol_clr); in mtk_eint_set_type()
204 reg = mtk_eint_get_offset(eint, d->hwirq, eint->regs->pol_set); in mtk_eint_set_type()
209 reg = mtk_eint_get_offset(eint, d->hwirq, eint->regs->sens_clr); in mtk_eint_set_type()
212 reg = mtk_eint_get_offset(eint, d->hwirq, eint->regs->sens_set); in mtk_eint_set_type()
225 struct mtk_eint *eint = irq_data_get_irq_chip_data(d); in mtk_eint_irq_set_wake() local
230 eint->wake_mask[reg] |= BIT(shift); in mtk_eint_irq_set_wake()
232 eint->wake_mask[reg] &= ~BIT(shift); in mtk_eint_irq_set_wake()
237 static void mtk_eint_chip_write_mask(const struct mtk_eint *eint, in mtk_eint_chip_write_mask() argument
243 for (port = 0; port < eint->hw->ports; port++) { in mtk_eint_chip_write_mask()
245 writel_relaxed(~buf[port], reg + eint->regs->mask_set); in mtk_eint_chip_write_mask()
246 writel_relaxed(buf[port], reg + eint->regs->mask_clr); in mtk_eint_chip_write_mask()
252 struct mtk_eint *eint = irq_data_get_irq_chip_data(d); in mtk_eint_irq_request_resources() local
257 err = eint->gpio_xlate->get_gpio_n(eint->pctl, d->hwirq, in mtk_eint_irq_request_resources()
260 dev_err(eint->dev, "Can not find pin\n"); in mtk_eint_irq_request_resources()
266 dev_err(eint->dev, "unable to lock HW IRQ %lu for IRQ\n", in mtk_eint_irq_request_resources()
271 err = eint->gpio_xlate->set_gpio_as_eint(eint->pctl, d->hwirq); in mtk_eint_irq_request_resources()
273 dev_err(eint->dev, "Can not eint mode\n"); in mtk_eint_irq_request_resources()
282 struct mtk_eint *eint = irq_data_get_irq_chip_data(d); in mtk_eint_irq_release_resources() local
286 eint->gpio_xlate->get_gpio_n(eint->pctl, d->hwirq, &gpio_n, in mtk_eint_irq_release_resources()
304 static unsigned int mtk_eint_hw_init(struct mtk_eint *eint) in mtk_eint_hw_init() argument
306 void __iomem *dom_en = eint->base + eint->regs->dom_en; in mtk_eint_hw_init()
307 void __iomem *mask_set = eint->base + eint->regs->mask_set; in mtk_eint_hw_init()
310 for (i = 0; i < eint->hw->ap_num; i += 32) { in mtk_eint_hw_init()
321 mtk_eint_debounce_process(struct mtk_eint *eint, int index) in mtk_eint_debounce_process() argument
326 ctrl_offset = (index / 4) * 4 + eint->regs->dbnc_ctrl; in mtk_eint_debounce_process()
327 dbnc = readl(eint->base + ctrl_offset); in mtk_eint_debounce_process()
330 ctrl_offset = (index / 4) * 4 + eint->regs->dbnc_set; in mtk_eint_debounce_process()
332 writel(rst, eint->base + ctrl_offset); in mtk_eint_debounce_process()
339 struct mtk_eint *eint = irq_desc_get_handler_data(desc); in mtk_eint_irq_handler() local
342 void __iomem *reg = mtk_eint_get_offset(eint, 0, eint->regs->stat); in mtk_eint_irq_handler()
346 for (eint_num = 0; eint_num < eint->hw->ap_num; eint_num += 32, in mtk_eint_irq_handler()
361 if (eint->wake_mask[mask_offset] & BIT(offset) && in mtk_eint_irq_handler()
362 !(eint->cur_mask[mask_offset] & BIT(offset))) { in mtk_eint_irq_handler()
364 eint->regs->stat + in mtk_eint_irq_handler()
365 eint->regs->mask_set); in mtk_eint_irq_handler()
368 dual_edge = eint->dual_edge[index]; in mtk_eint_irq_handler()
374 writel(BIT(offset), reg - eint->regs->stat + in mtk_eint_irq_handler()
375 eint->regs->soft_clr); in mtk_eint_irq_handler()
378 eint->gpio_xlate->get_gpio_state(eint->pctl, in mtk_eint_irq_handler()
382 generic_handle_domain_irq(eint->domain, index); in mtk_eint_irq_handler()
385 curr_level = mtk_eint_flip_edge(eint, index); in mtk_eint_irq_handler()
393 eint->regs->stat + in mtk_eint_irq_handler()
394 eint->regs->soft_set); in mtk_eint_irq_handler()
397 if (index < eint->hw->db_cnt) in mtk_eint_irq_handler()
398 mtk_eint_debounce_process(eint, index); in mtk_eint_irq_handler()
404 int mtk_eint_do_suspend(struct mtk_eint *eint) in mtk_eint_do_suspend() argument
406 mtk_eint_chip_write_mask(eint, eint->base, eint->wake_mask); in mtk_eint_do_suspend()
412 int mtk_eint_do_resume(struct mtk_eint *eint) in mtk_eint_do_resume() argument
414 mtk_eint_chip_write_mask(eint, eint->base, eint->cur_mask); in mtk_eint_do_resume()
420 int mtk_eint_set_debounce(struct mtk_eint *eint, unsigned long eint_num, in mtk_eint_set_debounce() argument
428 if (!eint->hw->db_time) in mtk_eint_set_debounce()
431 virq = irq_find_mapping(eint->domain, eint_num); in mtk_eint_set_debounce()
435 set_offset = (eint_num / 4) * 4 + eint->regs->dbnc_set; in mtk_eint_set_debounce()
436 clr_offset = (eint_num / 4) * 4 + eint->regs->dbnc_clr; in mtk_eint_set_debounce()
438 if (!mtk_eint_can_en_debounce(eint, eint_num)) in mtk_eint_set_debounce()
441 dbnc = eint->num_db_time; in mtk_eint_set_debounce()
442 for (i = 0; i < eint->num_db_time; i++) { in mtk_eint_set_debounce()
443 if (debounce <= eint->hw->db_time[i]) { in mtk_eint_set_debounce()
449 if (!mtk_eint_get_mask(eint, eint_num)) { in mtk_eint_set_debounce()
457 writel(clr_bit, eint->base + clr_offset); in mtk_eint_set_debounce()
462 writel(rst | bit, eint->base + set_offset); in mtk_eint_set_debounce()
476 int mtk_eint_find_irq(struct mtk_eint *eint, unsigned long eint_n) in mtk_eint_find_irq() argument
480 irq = irq_find_mapping(eint->domain, eint_n); in mtk_eint_find_irq()
488 int mtk_eint_do_init(struct mtk_eint *eint) in mtk_eint_do_init() argument
493 if (!eint->regs) in mtk_eint_do_init()
494 eint->regs = &mtk_generic_eint_regs; in mtk_eint_do_init()
496 eint->wake_mask = devm_kcalloc(eint->dev, eint->hw->ports, in mtk_eint_do_init()
497 sizeof(*eint->wake_mask), GFP_KERNEL); in mtk_eint_do_init()
498 if (!eint->wake_mask) in mtk_eint_do_init()
501 eint->cur_mask = devm_kcalloc(eint->dev, eint->hw->ports, in mtk_eint_do_init()
502 sizeof(*eint->cur_mask), GFP_KERNEL); in mtk_eint_do_init()
503 if (!eint->cur_mask) in mtk_eint_do_init()
506 eint->dual_edge = devm_kcalloc(eint->dev, eint->hw->ap_num, in mtk_eint_do_init()
508 if (!eint->dual_edge) in mtk_eint_do_init()
511 eint->domain = irq_domain_add_linear(eint->dev->of_node, in mtk_eint_do_init()
512 eint->hw->ap_num, in mtk_eint_do_init()
514 if (!eint->domain) in mtk_eint_do_init()
517 if (eint->hw->db_time) { in mtk_eint_do_init()
519 if (eint->hw->db_time[i] == 0) in mtk_eint_do_init()
521 eint->num_db_time = i; in mtk_eint_do_init()
524 mtk_eint_hw_init(eint); in mtk_eint_do_init()
525 for (i = 0; i < eint->hw->ap_num; i++) { in mtk_eint_do_init()
526 int virq = irq_create_mapping(eint->domain, i); in mtk_eint_do_init()
530 irq_set_chip_data(virq, eint); in mtk_eint_do_init()
533 irq_set_chained_handler_and_data(eint->irq, mtk_eint_irq_handler, in mtk_eint_do_init()
534 eint); in mtk_eint_do_init()