• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  GT641xx IRQ routines.
3  *
4  *  Copyright (C) 2007	Yoichi Yuasa <yuasa@linux-mips.org>
5  *
6  *  This program is free software; you can redistribute it and/or modify
7  *  it under the terms of the GNU General Public License as published by
8  *  the Free Software Foundation; either version 2 of the License, or
9  *  (at your option) any later version.
10  *
11  *  This program is distributed in the hope that it will be useful,
12  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  *  GNU General Public License for more details.
15  *
16  *  You should have received a copy of the GNU General Public License
17  *  along with this program; if not, write to the Free Software
18  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
19  */
20 #include <linux/hardirq.h>
21 #include <linux/init.h>
22 #include <linux/irq.h>
23 #include <linux/spinlock.h>
24 #include <linux/types.h>
25 
26 #include <asm/gt64120.h>
27 
28 #define GT641XX_IRQ_TO_BIT(irq) (1U << (irq - GT641XX_IRQ_BASE))
29 
30 static DEFINE_RAW_SPINLOCK(gt641xx_irq_lock);
31 
ack_gt641xx_irq(struct irq_data * d)32 static void ack_gt641xx_irq(struct irq_data *d)
33 {
34 	unsigned long flags;
35 	u32 cause;
36 
37 	raw_spin_lock_irqsave(&gt641xx_irq_lock, flags);
38 	cause = GT_READ(GT_INTRCAUSE_OFS);
39 	cause &= ~GT641XX_IRQ_TO_BIT(d->irq);
40 	GT_WRITE(GT_INTRCAUSE_OFS, cause);
41 	raw_spin_unlock_irqrestore(&gt641xx_irq_lock, flags);
42 }
43 
mask_gt641xx_irq(struct irq_data * d)44 static void mask_gt641xx_irq(struct irq_data *d)
45 {
46 	unsigned long flags;
47 	u32 mask;
48 
49 	raw_spin_lock_irqsave(&gt641xx_irq_lock, flags);
50 	mask = GT_READ(GT_INTRMASK_OFS);
51 	mask &= ~GT641XX_IRQ_TO_BIT(d->irq);
52 	GT_WRITE(GT_INTRMASK_OFS, mask);
53 	raw_spin_unlock_irqrestore(&gt641xx_irq_lock, flags);
54 }
55 
mask_ack_gt641xx_irq(struct irq_data * d)56 static void mask_ack_gt641xx_irq(struct irq_data *d)
57 {
58 	unsigned long flags;
59 	u32 cause, mask;
60 
61 	raw_spin_lock_irqsave(&gt641xx_irq_lock, flags);
62 	mask = GT_READ(GT_INTRMASK_OFS);
63 	mask &= ~GT641XX_IRQ_TO_BIT(d->irq);
64 	GT_WRITE(GT_INTRMASK_OFS, mask);
65 
66 	cause = GT_READ(GT_INTRCAUSE_OFS);
67 	cause &= ~GT641XX_IRQ_TO_BIT(d->irq);
68 	GT_WRITE(GT_INTRCAUSE_OFS, cause);
69 	raw_spin_unlock_irqrestore(&gt641xx_irq_lock, flags);
70 }
71 
unmask_gt641xx_irq(struct irq_data * d)72 static void unmask_gt641xx_irq(struct irq_data *d)
73 {
74 	unsigned long flags;
75 	u32 mask;
76 
77 	raw_spin_lock_irqsave(&gt641xx_irq_lock, flags);
78 	mask = GT_READ(GT_INTRMASK_OFS);
79 	mask |= GT641XX_IRQ_TO_BIT(d->irq);
80 	GT_WRITE(GT_INTRMASK_OFS, mask);
81 	raw_spin_unlock_irqrestore(&gt641xx_irq_lock, flags);
82 }
83 
84 static struct irq_chip gt641xx_irq_chip = {
85 	.name		= "GT641xx",
86 	.irq_ack	= ack_gt641xx_irq,
87 	.irq_mask	= mask_gt641xx_irq,
88 	.irq_mask_ack	= mask_ack_gt641xx_irq,
89 	.irq_unmask	= unmask_gt641xx_irq,
90 };
91 
gt641xx_irq_dispatch(void)92 void gt641xx_irq_dispatch(void)
93 {
94 	u32 cause, mask;
95 	int i;
96 
97 	cause = GT_READ(GT_INTRCAUSE_OFS);
98 	mask = GT_READ(GT_INTRMASK_OFS);
99 	cause &= mask;
100 
101 	/*
102 	 * bit0 : logical or of all the interrupt bits.
103 	 * bit30: logical or of bits[29:26,20:1].
104 	 * bit31: logical or of bits[25:1].
105 	 */
106 	for (i = 1; i < 30; i++) {
107 		if (cause & (1U << i)) {
108 			do_IRQ(GT641XX_IRQ_BASE + i);
109 			return;
110 		}
111 	}
112 
113 	atomic_inc(&irq_err_count);
114 }
115 
gt641xx_irq_init(void)116 void __init gt641xx_irq_init(void)
117 {
118 	int i;
119 
120 	GT_WRITE(GT_INTRMASK_OFS, 0);
121 	GT_WRITE(GT_INTRCAUSE_OFS, 0);
122 
123 	/*
124 	 * bit0 : logical or of all the interrupt bits.
125 	 * bit30: logical or of bits[29:26,20:1].
126 	 * bit31: logical or of bits[25:1].
127 	 */
128 	for (i = 1; i < 30; i++)
129 		irq_set_chip_and_handler(GT641XX_IRQ_BASE + i,
130 					 &gt641xx_irq_chip, handle_level_irq);
131 }
132