1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Support for virtual IRQ subgroups.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Copyright (C) 2010 Paul Mundt
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * This file is subject to the terms and conditions of the GNU General Public
7*4882a593Smuzhiyun * License. See the file "COPYING" in the main directory of this archive
8*4882a593Smuzhiyun * for more details.
9*4882a593Smuzhiyun */
10*4882a593Smuzhiyun #define pr_fmt(fmt) "intc: " fmt
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #include <linux/slab.h>
13*4882a593Smuzhiyun #include <linux/irq.h>
14*4882a593Smuzhiyun #include <linux/list.h>
15*4882a593Smuzhiyun #include <linux/radix-tree.h>
16*4882a593Smuzhiyun #include <linux/spinlock.h>
17*4882a593Smuzhiyun #include <linux/export.h>
18*4882a593Smuzhiyun #include "internals.h"
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun static struct intc_map_entry intc_irq_xlate[INTC_NR_IRQS];
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun struct intc_virq_list {
23*4882a593Smuzhiyun unsigned int irq;
24*4882a593Smuzhiyun struct intc_virq_list *next;
25*4882a593Smuzhiyun };
26*4882a593Smuzhiyun
27*4882a593Smuzhiyun #define for_each_virq(entry, head) \
28*4882a593Smuzhiyun for (entry = head; entry; entry = entry->next)
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun /*
31*4882a593Smuzhiyun * Tags for the radix tree
32*4882a593Smuzhiyun */
33*4882a593Smuzhiyun #define INTC_TAG_VIRQ_NEEDS_ALLOC 0
34*4882a593Smuzhiyun
intc_irq_xlate_set(unsigned int irq,intc_enum id,struct intc_desc_int * d)35*4882a593Smuzhiyun void intc_irq_xlate_set(unsigned int irq, intc_enum id, struct intc_desc_int *d)
36*4882a593Smuzhiyun {
37*4882a593Smuzhiyun unsigned long flags;
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun raw_spin_lock_irqsave(&intc_big_lock, flags);
40*4882a593Smuzhiyun intc_irq_xlate[irq].enum_id = id;
41*4882a593Smuzhiyun intc_irq_xlate[irq].desc = d;
42*4882a593Smuzhiyun raw_spin_unlock_irqrestore(&intc_big_lock, flags);
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun
intc_irq_xlate_get(unsigned int irq)45*4882a593Smuzhiyun struct intc_map_entry *intc_irq_xlate_get(unsigned int irq)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun return intc_irq_xlate + irq;
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun
intc_irq_lookup(const char * chipname,intc_enum enum_id)50*4882a593Smuzhiyun int intc_irq_lookup(const char *chipname, intc_enum enum_id)
51*4882a593Smuzhiyun {
52*4882a593Smuzhiyun struct intc_map_entry *ptr;
53*4882a593Smuzhiyun struct intc_desc_int *d;
54*4882a593Smuzhiyun int irq = -1;
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun list_for_each_entry(d, &intc_list, list) {
57*4882a593Smuzhiyun int tagged;
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun if (strcmp(d->chip.name, chipname) != 0)
60*4882a593Smuzhiyun continue;
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun /*
63*4882a593Smuzhiyun * Catch early lookups for subgroup VIRQs that have not
64*4882a593Smuzhiyun * yet been allocated an IRQ. This already includes a
65*4882a593Smuzhiyun * fast-path out if the tree is untagged, so there is no
66*4882a593Smuzhiyun * need to explicitly test the root tree.
67*4882a593Smuzhiyun */
68*4882a593Smuzhiyun tagged = radix_tree_tag_get(&d->tree, enum_id,
69*4882a593Smuzhiyun INTC_TAG_VIRQ_NEEDS_ALLOC);
70*4882a593Smuzhiyun if (unlikely(tagged))
71*4882a593Smuzhiyun break;
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun ptr = radix_tree_lookup(&d->tree, enum_id);
74*4882a593Smuzhiyun if (ptr) {
75*4882a593Smuzhiyun irq = ptr - intc_irq_xlate;
76*4882a593Smuzhiyun break;
77*4882a593Smuzhiyun }
78*4882a593Smuzhiyun }
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun return irq;
81*4882a593Smuzhiyun }
82*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(intc_irq_lookup);
83*4882a593Smuzhiyun
add_virq_to_pirq(unsigned int irq,unsigned int virq)84*4882a593Smuzhiyun static int add_virq_to_pirq(unsigned int irq, unsigned int virq)
85*4882a593Smuzhiyun {
86*4882a593Smuzhiyun struct intc_virq_list *entry;
87*4882a593Smuzhiyun struct intc_virq_list **last = NULL;
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun /* scan for duplicates */
90*4882a593Smuzhiyun for_each_virq(entry, irq_get_handler_data(irq)) {
91*4882a593Smuzhiyun if (entry->irq == virq)
92*4882a593Smuzhiyun return 0;
93*4882a593Smuzhiyun last = &entry->next;
94*4882a593Smuzhiyun }
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun entry = kzalloc(sizeof(struct intc_virq_list), GFP_ATOMIC);
97*4882a593Smuzhiyun if (!entry)
98*4882a593Smuzhiyun return -ENOMEM;
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun entry->irq = virq;
101*4882a593Smuzhiyun
102*4882a593Smuzhiyun if (last)
103*4882a593Smuzhiyun *last = entry;
104*4882a593Smuzhiyun else
105*4882a593Smuzhiyun irq_set_handler_data(irq, entry);
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun return 0;
108*4882a593Smuzhiyun }
109*4882a593Smuzhiyun
intc_virq_handler(struct irq_desc * desc)110*4882a593Smuzhiyun static void intc_virq_handler(struct irq_desc *desc)
111*4882a593Smuzhiyun {
112*4882a593Smuzhiyun unsigned int irq = irq_desc_get_irq(desc);
113*4882a593Smuzhiyun struct irq_data *data = irq_desc_get_irq_data(desc);
114*4882a593Smuzhiyun struct irq_chip *chip = irq_data_get_irq_chip(data);
115*4882a593Smuzhiyun struct intc_virq_list *entry, *vlist = irq_data_get_irq_handler_data(data);
116*4882a593Smuzhiyun struct intc_desc_int *d = get_intc_desc(irq);
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun chip->irq_mask_ack(data);
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun for_each_virq(entry, vlist) {
121*4882a593Smuzhiyun unsigned long addr, handle;
122*4882a593Smuzhiyun struct irq_desc *vdesc = irq_to_desc(entry->irq);
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun if (vdesc) {
125*4882a593Smuzhiyun handle = (unsigned long)irq_desc_get_handler_data(vdesc);
126*4882a593Smuzhiyun addr = INTC_REG(d, _INTC_ADDR_E(handle), 0);
127*4882a593Smuzhiyun if (intc_reg_fns[_INTC_FN(handle)](addr, handle, 0))
128*4882a593Smuzhiyun generic_handle_irq_desc(vdesc);
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun }
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun chip->irq_unmask(data);
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun
intc_subgroup_data(struct intc_subgroup * subgroup,struct intc_desc_int * d,unsigned int index)135*4882a593Smuzhiyun static unsigned long __init intc_subgroup_data(struct intc_subgroup *subgroup,
136*4882a593Smuzhiyun struct intc_desc_int *d,
137*4882a593Smuzhiyun unsigned int index)
138*4882a593Smuzhiyun {
139*4882a593Smuzhiyun unsigned int fn = REG_FN_TEST_BASE + (subgroup->reg_width >> 3) - 1;
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun return _INTC_MK(fn, MODE_ENABLE_REG, intc_get_reg(d, subgroup->reg),
142*4882a593Smuzhiyun 0, 1, (subgroup->reg_width - 1) - index);
143*4882a593Smuzhiyun }
144*4882a593Smuzhiyun
intc_subgroup_init_one(struct intc_desc * desc,struct intc_desc_int * d,struct intc_subgroup * subgroup)145*4882a593Smuzhiyun static void __init intc_subgroup_init_one(struct intc_desc *desc,
146*4882a593Smuzhiyun struct intc_desc_int *d,
147*4882a593Smuzhiyun struct intc_subgroup *subgroup)
148*4882a593Smuzhiyun {
149*4882a593Smuzhiyun struct intc_map_entry *mapped;
150*4882a593Smuzhiyun unsigned int pirq;
151*4882a593Smuzhiyun unsigned long flags;
152*4882a593Smuzhiyun int i;
153*4882a593Smuzhiyun
154*4882a593Smuzhiyun mapped = radix_tree_lookup(&d->tree, subgroup->parent_id);
155*4882a593Smuzhiyun if (!mapped) {
156*4882a593Smuzhiyun WARN_ON(1);
157*4882a593Smuzhiyun return;
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun pirq = mapped - intc_irq_xlate;
161*4882a593Smuzhiyun
162*4882a593Smuzhiyun raw_spin_lock_irqsave(&d->lock, flags);
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(subgroup->enum_ids); i++) {
165*4882a593Smuzhiyun struct intc_subgroup_entry *entry;
166*4882a593Smuzhiyun int err;
167*4882a593Smuzhiyun
168*4882a593Smuzhiyun if (!subgroup->enum_ids[i])
169*4882a593Smuzhiyun continue;
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun entry = kmalloc(sizeof(*entry), GFP_NOWAIT);
172*4882a593Smuzhiyun if (!entry)
173*4882a593Smuzhiyun break;
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun entry->pirq = pirq;
176*4882a593Smuzhiyun entry->enum_id = subgroup->enum_ids[i];
177*4882a593Smuzhiyun entry->handle = intc_subgroup_data(subgroup, d, i);
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun err = radix_tree_insert(&d->tree, entry->enum_id, entry);
180*4882a593Smuzhiyun if (unlikely(err < 0))
181*4882a593Smuzhiyun break;
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun radix_tree_tag_set(&d->tree, entry->enum_id,
184*4882a593Smuzhiyun INTC_TAG_VIRQ_NEEDS_ALLOC);
185*4882a593Smuzhiyun }
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun raw_spin_unlock_irqrestore(&d->lock, flags);
188*4882a593Smuzhiyun }
189*4882a593Smuzhiyun
intc_subgroup_init(struct intc_desc * desc,struct intc_desc_int * d)190*4882a593Smuzhiyun void __init intc_subgroup_init(struct intc_desc *desc, struct intc_desc_int *d)
191*4882a593Smuzhiyun {
192*4882a593Smuzhiyun int i;
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun if (!desc->hw.subgroups)
195*4882a593Smuzhiyun return;
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun for (i = 0; i < desc->hw.nr_subgroups; i++)
198*4882a593Smuzhiyun intc_subgroup_init_one(desc, d, desc->hw.subgroups + i);
199*4882a593Smuzhiyun }
200*4882a593Smuzhiyun
intc_subgroup_map(struct intc_desc_int * d)201*4882a593Smuzhiyun static void __init intc_subgroup_map(struct intc_desc_int *d)
202*4882a593Smuzhiyun {
203*4882a593Smuzhiyun struct intc_subgroup_entry *entries[32];
204*4882a593Smuzhiyun unsigned long flags;
205*4882a593Smuzhiyun unsigned int nr_found;
206*4882a593Smuzhiyun int i;
207*4882a593Smuzhiyun
208*4882a593Smuzhiyun raw_spin_lock_irqsave(&d->lock, flags);
209*4882a593Smuzhiyun
210*4882a593Smuzhiyun restart:
211*4882a593Smuzhiyun nr_found = radix_tree_gang_lookup_tag_slot(&d->tree,
212*4882a593Smuzhiyun (void ***)entries, 0, ARRAY_SIZE(entries),
213*4882a593Smuzhiyun INTC_TAG_VIRQ_NEEDS_ALLOC);
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun for (i = 0; i < nr_found; i++) {
216*4882a593Smuzhiyun struct intc_subgroup_entry *entry;
217*4882a593Smuzhiyun int irq;
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun entry = radix_tree_deref_slot((void **)entries[i]);
220*4882a593Smuzhiyun if (unlikely(!entry))
221*4882a593Smuzhiyun continue;
222*4882a593Smuzhiyun if (radix_tree_deref_retry(entry))
223*4882a593Smuzhiyun goto restart;
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun irq = irq_alloc_desc(numa_node_id());
226*4882a593Smuzhiyun if (unlikely(irq < 0)) {
227*4882a593Smuzhiyun pr_err("no more free IRQs, bailing..\n");
228*4882a593Smuzhiyun break;
229*4882a593Smuzhiyun }
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun activate_irq(irq);
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun pr_info("Setting up a chained VIRQ from %d -> %d\n",
234*4882a593Smuzhiyun irq, entry->pirq);
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun intc_irq_xlate_set(irq, entry->enum_id, d);
237*4882a593Smuzhiyun
238*4882a593Smuzhiyun irq_set_chip_and_handler_name(irq, irq_get_chip(entry->pirq),
239*4882a593Smuzhiyun handle_simple_irq, "virq");
240*4882a593Smuzhiyun irq_set_chip_data(irq, irq_get_chip_data(entry->pirq));
241*4882a593Smuzhiyun
242*4882a593Smuzhiyun irq_set_handler_data(irq, (void *)entry->handle);
243*4882a593Smuzhiyun
244*4882a593Smuzhiyun /*
245*4882a593Smuzhiyun * Set the virtual IRQ as non-threadable.
246*4882a593Smuzhiyun */
247*4882a593Smuzhiyun irq_set_nothread(irq);
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun /* Set handler data before installing the handler */
250*4882a593Smuzhiyun add_virq_to_pirq(entry->pirq, irq);
251*4882a593Smuzhiyun irq_set_chained_handler(entry->pirq, intc_virq_handler);
252*4882a593Smuzhiyun
253*4882a593Smuzhiyun radix_tree_tag_clear(&d->tree, entry->enum_id,
254*4882a593Smuzhiyun INTC_TAG_VIRQ_NEEDS_ALLOC);
255*4882a593Smuzhiyun radix_tree_replace_slot(&d->tree, (void **)entries[i],
256*4882a593Smuzhiyun &intc_irq_xlate[irq]);
257*4882a593Smuzhiyun }
258*4882a593Smuzhiyun
259*4882a593Smuzhiyun raw_spin_unlock_irqrestore(&d->lock, flags);
260*4882a593Smuzhiyun }
261*4882a593Smuzhiyun
intc_finalize(void)262*4882a593Smuzhiyun void __init intc_finalize(void)
263*4882a593Smuzhiyun {
264*4882a593Smuzhiyun struct intc_desc_int *d;
265*4882a593Smuzhiyun
266*4882a593Smuzhiyun list_for_each_entry(d, &intc_list, list)
267*4882a593Smuzhiyun if (radix_tree_tagged(&d->tree, INTC_TAG_VIRQ_NEEDS_ALLOC))
268*4882a593Smuzhiyun intc_subgroup_map(d);
269*4882a593Smuzhiyun }
270