Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
0002 /*
0003  * Copyright (C) Sunplus Technology Co., Ltd.
0004  *       All rights reserved.
0005  */
0006 #include <linux/irq.h>
0007 #include <linux/irqdomain.h>
0008 #include <linux/io.h>
0009 #include <linux/irqchip.h>
0010 #include <linux/irqchip/chained_irq.h>
0011 #include <linux/of_address.h>
0012 #include <linux/of_irq.h>
0013 
0014 #define SP_INTC_HWIRQ_MIN   0
0015 #define SP_INTC_HWIRQ_MAX   223
0016 
0017 #define SP_INTC_NR_IRQS     (SP_INTC_HWIRQ_MAX - SP_INTC_HWIRQ_MIN + 1)
0018 #define SP_INTC_NR_GROUPS   DIV_ROUND_UP(SP_INTC_NR_IRQS, 32)
0019 #define SP_INTC_REG_SIZE    (SP_INTC_NR_GROUPS * 4)
0020 
0021 /* REG_GROUP_0 regs */
0022 #define REG_INTR_TYPE       (sp_intc.g0)
0023 #define REG_INTR_POLARITY   (REG_INTR_TYPE     + SP_INTC_REG_SIZE)
0024 #define REG_INTR_PRIORITY   (REG_INTR_POLARITY + SP_INTC_REG_SIZE)
0025 #define REG_INTR_MASK       (REG_INTR_PRIORITY + SP_INTC_REG_SIZE)
0026 
0027 /* REG_GROUP_1 regs */
0028 #define REG_INTR_CLEAR      (sp_intc.g1)
0029 #define REG_MASKED_EXT1     (REG_INTR_CLEAR    + SP_INTC_REG_SIZE)
0030 #define REG_MASKED_EXT0     (REG_MASKED_EXT1   + SP_INTC_REG_SIZE)
0031 #define REG_INTR_GROUP      (REG_INTR_CLEAR    + 31 * 4)
0032 
0033 #define GROUP_MASK      (BIT(SP_INTC_NR_GROUPS) - 1)
0034 #define GROUP_SHIFT_EXT1    (0)
0035 #define GROUP_SHIFT_EXT0    (8)
0036 
0037 /*
0038  * When GPIO_INT0~7 set to edge trigger, doesn't work properly.
0039  * WORKAROUND: change it to level trigger, and toggle the polarity
0040  * at ACK/Handler to make the HW work.
0041  */
0042 #define GPIO_INT0_HWIRQ     120
0043 #define GPIO_INT7_HWIRQ     127
0044 #define IS_GPIO_INT(irq)                    \
0045 ({                              \
0046     u32 i = irq;                        \
0047     (i >= GPIO_INT0_HWIRQ) && (i <= GPIO_INT7_HWIRQ);   \
0048 })
0049 
0050 /* index of states */
0051 enum {
0052     _IS_EDGE = 0,
0053     _IS_LOW,
0054     _IS_ACTIVE
0055 };
0056 
0057 #define STATE_BIT(irq, idx)     (((irq) - GPIO_INT0_HWIRQ) * 3 + (idx))
0058 #define ASSIGN_STATE(irq, idx, v)   assign_bit(STATE_BIT(irq, idx), sp_intc.states, v)
0059 #define TEST_STATE(irq, idx)        test_bit(STATE_BIT(irq, idx), sp_intc.states)
0060 
0061 static struct sp_intctl {
0062     /*
0063      * REG_GROUP_0: include type/polarity/priority/mask regs.
0064      * REG_GROUP_1: include clear/masked_ext0/masked_ext1/group regs.
0065      */
0066     void __iomem *g0; // REG_GROUP_0 base
0067     void __iomem *g1; // REG_GROUP_1 base
0068 
0069     struct irq_domain *domain;
0070     raw_spinlock_t lock;
0071 
0072     /*
0073      * store GPIO_INT states
0074      * each interrupt has 3 states: is_edge, is_low, is_active
0075      */
0076     DECLARE_BITMAP(states, (GPIO_INT7_HWIRQ - GPIO_INT0_HWIRQ + 1) * 3);
0077 } sp_intc;
0078 
0079 static struct irq_chip sp_intc_chip;
0080 
0081 static void sp_intc_assign_bit(u32 hwirq, void __iomem *base, bool value)
0082 {
0083     u32 offset, mask;
0084     unsigned long flags;
0085     void __iomem *reg;
0086 
0087     offset = (hwirq / 32) * 4;
0088     reg = base + offset;
0089 
0090     raw_spin_lock_irqsave(&sp_intc.lock, flags);
0091     mask = readl_relaxed(reg);
0092     if (value)
0093         mask |= BIT(hwirq % 32);
0094     else
0095         mask &= ~BIT(hwirq % 32);
0096     writel_relaxed(mask, reg);
0097     raw_spin_unlock_irqrestore(&sp_intc.lock, flags);
0098 }
0099 
0100 static void sp_intc_ack_irq(struct irq_data *d)
0101 {
0102     u32 hwirq = d->hwirq;
0103 
0104     if (unlikely(IS_GPIO_INT(hwirq) && TEST_STATE(hwirq, _IS_EDGE))) { // WORKAROUND
0105         sp_intc_assign_bit(hwirq, REG_INTR_POLARITY, !TEST_STATE(hwirq, _IS_LOW));
0106         ASSIGN_STATE(hwirq, _IS_ACTIVE, true);
0107     }
0108 
0109     sp_intc_assign_bit(hwirq, REG_INTR_CLEAR, 1);
0110 }
0111 
0112 static void sp_intc_mask_irq(struct irq_data *d)
0113 {
0114     sp_intc_assign_bit(d->hwirq, REG_INTR_MASK, 0);
0115 }
0116 
0117 static void sp_intc_unmask_irq(struct irq_data *d)
0118 {
0119     sp_intc_assign_bit(d->hwirq, REG_INTR_MASK, 1);
0120 }
0121 
0122 static int sp_intc_set_type(struct irq_data *d, unsigned int type)
0123 {
0124     u32 hwirq = d->hwirq;
0125     bool is_edge = !(type & IRQ_TYPE_LEVEL_MASK);
0126     bool is_low = (type == IRQ_TYPE_LEVEL_LOW || type == IRQ_TYPE_EDGE_FALLING);
0127 
0128     irq_set_handler_locked(d, is_edge ? handle_edge_irq : handle_level_irq);
0129 
0130     if (unlikely(IS_GPIO_INT(hwirq) && is_edge)) { // WORKAROUND
0131         /* store states */
0132         ASSIGN_STATE(hwirq, _IS_EDGE, is_edge);
0133         ASSIGN_STATE(hwirq, _IS_LOW, is_low);
0134         ASSIGN_STATE(hwirq, _IS_ACTIVE, false);
0135         /* change to level */
0136         is_edge = false;
0137     }
0138 
0139     sp_intc_assign_bit(hwirq, REG_INTR_TYPE, is_edge);
0140     sp_intc_assign_bit(hwirq, REG_INTR_POLARITY, is_low);
0141 
0142     return 0;
0143 }
0144 
0145 static int sp_intc_get_ext_irq(int ext_num)
0146 {
0147     void __iomem *base = ext_num ? REG_MASKED_EXT1 : REG_MASKED_EXT0;
0148     u32 shift = ext_num ? GROUP_SHIFT_EXT1 : GROUP_SHIFT_EXT0;
0149     u32 groups;
0150     u32 pending_group;
0151     u32 group;
0152     u32 pending_irq;
0153 
0154     groups = readl_relaxed(REG_INTR_GROUP);
0155     pending_group = (groups >> shift) & GROUP_MASK;
0156     if (!pending_group)
0157         return -1;
0158 
0159     group = fls(pending_group) - 1;
0160     pending_irq = readl_relaxed(base + group * 4);
0161     if (!pending_irq)
0162         return -1;
0163 
0164     return (group * 32) + fls(pending_irq) - 1;
0165 }
0166 
0167 static void sp_intc_handle_ext_cascaded(struct irq_desc *desc)
0168 {
0169     struct irq_chip *chip = irq_desc_get_chip(desc);
0170     int ext_num = (uintptr_t)irq_desc_get_handler_data(desc);
0171     int hwirq;
0172 
0173     chained_irq_enter(chip, desc);
0174 
0175     while ((hwirq = sp_intc_get_ext_irq(ext_num)) >= 0) {
0176         if (unlikely(IS_GPIO_INT(hwirq) && TEST_STATE(hwirq, _IS_ACTIVE))) { // WORKAROUND
0177             ASSIGN_STATE(hwirq, _IS_ACTIVE, false);
0178             sp_intc_assign_bit(hwirq, REG_INTR_POLARITY, TEST_STATE(hwirq, _IS_LOW));
0179         } else {
0180             generic_handle_domain_irq(sp_intc.domain, hwirq);
0181         }
0182     }
0183 
0184     chained_irq_exit(chip, desc);
0185 }
0186 
0187 static struct irq_chip sp_intc_chip = {
0188     .name = "sp_intc",
0189     .irq_ack = sp_intc_ack_irq,
0190     .irq_mask = sp_intc_mask_irq,
0191     .irq_unmask = sp_intc_unmask_irq,
0192     .irq_set_type = sp_intc_set_type,
0193 };
0194 
0195 static int sp_intc_irq_domain_map(struct irq_domain *domain,
0196                   unsigned int irq, irq_hw_number_t hwirq)
0197 {
0198     irq_set_chip_and_handler(irq, &sp_intc_chip, handle_level_irq);
0199     irq_set_chip_data(irq, &sp_intc_chip);
0200     irq_set_noprobe(irq);
0201 
0202     return 0;
0203 }
0204 
0205 static const struct irq_domain_ops sp_intc_dm_ops = {
0206     .xlate = irq_domain_xlate_twocell,
0207     .map = sp_intc_irq_domain_map,
0208 };
0209 
0210 static int sp_intc_irq_map(struct device_node *node, int i)
0211 {
0212     unsigned int irq;
0213 
0214     irq = irq_of_parse_and_map(node, i);
0215     if (!irq)
0216         return -ENOENT;
0217 
0218     irq_set_chained_handler_and_data(irq, sp_intc_handle_ext_cascaded, (void *)(uintptr_t)i);
0219 
0220     return 0;
0221 }
0222 
0223 static int __init sp_intc_init_dt(struct device_node *node, struct device_node *parent)
0224 {
0225     int i, ret;
0226 
0227     sp_intc.g0 = of_iomap(node, 0);
0228     if (!sp_intc.g0)
0229         return -ENXIO;
0230 
0231     sp_intc.g1 = of_iomap(node, 1);
0232     if (!sp_intc.g1) {
0233         ret = -ENXIO;
0234         goto out_unmap0;
0235     }
0236 
0237     ret = sp_intc_irq_map(node, 0); // EXT_INT0
0238     if (ret)
0239         goto out_unmap1;
0240 
0241     ret = sp_intc_irq_map(node, 1); // EXT_INT1
0242     if (ret)
0243         goto out_unmap1;
0244 
0245     /* initial regs */
0246     for (i = 0; i < SP_INTC_NR_GROUPS; i++) {
0247         /* all mask */
0248         writel_relaxed(0, REG_INTR_MASK + i * 4);
0249         /* all edge */
0250         writel_relaxed(~0, REG_INTR_TYPE + i * 4);
0251         /* all high-active */
0252         writel_relaxed(0, REG_INTR_POLARITY + i * 4);
0253         /* all EXT_INT0 */
0254         writel_relaxed(~0, REG_INTR_PRIORITY + i * 4);
0255         /* all clear */
0256         writel_relaxed(~0, REG_INTR_CLEAR + i * 4);
0257     }
0258 
0259     sp_intc.domain = irq_domain_add_linear(node, SP_INTC_NR_IRQS,
0260                            &sp_intc_dm_ops, &sp_intc);
0261     if (!sp_intc.domain) {
0262         ret = -ENOMEM;
0263         goto out_unmap1;
0264     }
0265 
0266     raw_spin_lock_init(&sp_intc.lock);
0267 
0268     return 0;
0269 
0270 out_unmap1:
0271     iounmap(sp_intc.g1);
0272 out_unmap0:
0273     iounmap(sp_intc.g0);
0274 
0275     return ret;
0276 }
0277 
0278 IRQCHIP_DECLARE(sp_intc, "sunplus,sp7021-intc", sp_intc_init_dt);