• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Machine vector for IA-64.
3  *
4  * Copyright (C) 1999 Silicon Graphics, Inc.
5  * Copyright (C) Srinivasa Thirumalachar <sprasad@engr.sgi.com>
6  * Copyright (C) Vijay Chander <vijay@engr.sgi.com>
7  * Copyright (C) 1999-2001, 2003-2004 Hewlett-Packard Co.
8  *	David Mosberger-Tang <davidm@hpl.hp.com>
9  */
10 #ifndef _ASM_IA64_MACHVEC_H
11 #define _ASM_IA64_MACHVEC_H
12 
13 #include <linux/types.h>
14 #include <linux/swiotlb.h>
15 
16 /* forward declarations: */
17 struct device;
18 struct pt_regs;
19 struct scatterlist;
20 struct page;
21 struct mm_struct;
22 struct pci_bus;
23 struct task_struct;
24 struct pci_dev;
25 struct msi_desc;
26 struct dma_attrs;
27 
28 typedef void ia64_mv_setup_t (char **);
29 typedef void ia64_mv_cpu_init_t (void);
30 typedef void ia64_mv_irq_init_t (void);
31 typedef void ia64_mv_send_ipi_t (int, int, int, int);
32 typedef void ia64_mv_timer_interrupt_t (int, void *);
33 typedef void ia64_mv_global_tlb_purge_t (struct mm_struct *, unsigned long, unsigned long, unsigned long);
34 typedef void ia64_mv_tlb_migrate_finish_t (struct mm_struct *);
35 typedef u8 ia64_mv_irq_to_vector (int);
36 typedef unsigned int ia64_mv_local_vector_to_irq (u8);
37 typedef char *ia64_mv_pci_get_legacy_mem_t (struct pci_bus *);
38 typedef int ia64_mv_pci_legacy_read_t (struct pci_bus *, u16 port, u32 *val,
39 				       u8 size);
40 typedef int ia64_mv_pci_legacy_write_t (struct pci_bus *, u16 port, u32 val,
41 					u8 size);
42 typedef void ia64_mv_migrate_t(struct task_struct * task);
43 typedef void ia64_mv_pci_fixup_bus_t (struct pci_bus *);
44 typedef void ia64_mv_kernel_launch_event_t(void);
45 
46 /* DMA-mapping interface: */
47 typedef void ia64_mv_dma_init (void);
48 typedef void *ia64_mv_dma_alloc_coherent (struct device *, size_t, dma_addr_t *, gfp_t);
49 typedef void ia64_mv_dma_free_coherent (struct device *, size_t, void *, dma_addr_t);
50 typedef dma_addr_t ia64_mv_dma_map_single (struct device *, void *, size_t, int);
51 typedef void ia64_mv_dma_unmap_single (struct device *, dma_addr_t, size_t, int);
52 typedef int ia64_mv_dma_map_sg (struct device *, struct scatterlist *, int, int);
53 typedef void ia64_mv_dma_unmap_sg (struct device *, struct scatterlist *, int, int);
54 typedef void ia64_mv_dma_sync_single_for_cpu (struct device *, dma_addr_t, size_t, int);
55 typedef void ia64_mv_dma_sync_sg_for_cpu (struct device *, struct scatterlist *, int, int);
56 typedef void ia64_mv_dma_sync_single_for_device (struct device *, dma_addr_t, size_t, int);
57 typedef void ia64_mv_dma_sync_sg_for_device (struct device *, struct scatterlist *, int, int);
58 typedef int ia64_mv_dma_mapping_error(struct device *, dma_addr_t dma_addr);
59 typedef int ia64_mv_dma_supported (struct device *, u64);
60 
61 typedef dma_addr_t ia64_mv_dma_map_single_attrs (struct device *, void *, size_t, int, struct dma_attrs *);
62 typedef void ia64_mv_dma_unmap_single_attrs (struct device *, dma_addr_t, size_t, int, struct dma_attrs *);
63 typedef int ia64_mv_dma_map_sg_attrs (struct device *, struct scatterlist *, int, int, struct dma_attrs *);
64 typedef void ia64_mv_dma_unmap_sg_attrs (struct device *, struct scatterlist *, int, int, struct dma_attrs *);
65 typedef u64 ia64_mv_dma_get_required_mask (struct device *);
66 
67 /*
68  * WARNING: The legacy I/O space is _architected_.  Platforms are
69  * expected to follow this architected model (see Section 10.7 in the
70  * IA-64 Architecture Software Developer's Manual).  Unfortunately,
71  * some broken machines do not follow that model, which is why we have
72  * to make the inX/outX operations part of the machine vector.
73  * Platform designers should follow the architected model whenever
74  * possible.
75  */
76 typedef unsigned int ia64_mv_inb_t (unsigned long);
77 typedef unsigned int ia64_mv_inw_t (unsigned long);
78 typedef unsigned int ia64_mv_inl_t (unsigned long);
79 typedef void ia64_mv_outb_t (unsigned char, unsigned long);
80 typedef void ia64_mv_outw_t (unsigned short, unsigned long);
81 typedef void ia64_mv_outl_t (unsigned int, unsigned long);
82 typedef void ia64_mv_mmiowb_t (void);
83 typedef unsigned char ia64_mv_readb_t (const volatile void __iomem *);
84 typedef unsigned short ia64_mv_readw_t (const volatile void __iomem *);
85 typedef unsigned int ia64_mv_readl_t (const volatile void __iomem *);
86 typedef unsigned long ia64_mv_readq_t (const volatile void __iomem *);
87 typedef unsigned char ia64_mv_readb_relaxed_t (const volatile void __iomem *);
88 typedef unsigned short ia64_mv_readw_relaxed_t (const volatile void __iomem *);
89 typedef unsigned int ia64_mv_readl_relaxed_t (const volatile void __iomem *);
90 typedef unsigned long ia64_mv_readq_relaxed_t (const volatile void __iomem *);
91 
92 typedef int ia64_mv_setup_msi_irq_t (struct pci_dev *pdev, struct msi_desc *);
93 typedef void ia64_mv_teardown_msi_irq_t (unsigned int irq);
94 
95 static inline void
machvec_noop(void)96 machvec_noop (void)
97 {
98 }
99 
100 static inline void
machvec_noop_mm(struct mm_struct * mm)101 machvec_noop_mm (struct mm_struct *mm)
102 {
103 }
104 
105 static inline void
machvec_noop_task(struct task_struct * task)106 machvec_noop_task (struct task_struct *task)
107 {
108 }
109 
110 static inline void
machvec_noop_bus(struct pci_bus * bus)111 machvec_noop_bus (struct pci_bus *bus)
112 {
113 }
114 
115 extern void machvec_setup (char **);
116 extern void machvec_timer_interrupt (int, void *);
117 extern void machvec_dma_sync_single (struct device *, dma_addr_t, size_t, int);
118 extern void machvec_dma_sync_sg (struct device *, struct scatterlist *, int, int);
119 extern void machvec_tlb_migrate_finish (struct mm_struct *);
120 
121 # if defined (CONFIG_IA64_HP_SIM)
122 #  include <asm/machvec_hpsim.h>
123 # elif defined (CONFIG_IA64_DIG)
124 #  include <asm/machvec_dig.h>
125 # elif defined(CONFIG_IA64_DIG_VTD)
126 #  include <asm/machvec_dig_vtd.h>
127 # elif defined (CONFIG_IA64_HP_ZX1)
128 #  include <asm/machvec_hpzx1.h>
129 # elif defined (CONFIG_IA64_HP_ZX1_SWIOTLB)
130 #  include <asm/machvec_hpzx1_swiotlb.h>
131 # elif defined (CONFIG_IA64_SGI_SN2)
132 #  include <asm/machvec_sn2.h>
133 # elif defined (CONFIG_IA64_SGI_UV)
134 #  include <asm/machvec_uv.h>
135 # elif defined (CONFIG_IA64_XEN_GUEST)
136 #  include <asm/machvec_xen.h>
137 # elif defined (CONFIG_IA64_GENERIC)
138 
139 # ifdef MACHVEC_PLATFORM_HEADER
140 #  include MACHVEC_PLATFORM_HEADER
141 # else
142 #  define platform_name		ia64_mv.name
143 #  define platform_setup	ia64_mv.setup
144 #  define platform_cpu_init	ia64_mv.cpu_init
145 #  define platform_irq_init	ia64_mv.irq_init
146 #  define platform_send_ipi	ia64_mv.send_ipi
147 #  define platform_timer_interrupt	ia64_mv.timer_interrupt
148 #  define platform_global_tlb_purge	ia64_mv.global_tlb_purge
149 #  define platform_tlb_migrate_finish	ia64_mv.tlb_migrate_finish
150 #  define platform_dma_init		ia64_mv.dma_init
151 #  define platform_dma_alloc_coherent	ia64_mv.dma_alloc_coherent
152 #  define platform_dma_free_coherent	ia64_mv.dma_free_coherent
153 #  define platform_dma_map_single_attrs	ia64_mv.dma_map_single_attrs
154 #  define platform_dma_unmap_single_attrs	ia64_mv.dma_unmap_single_attrs
155 #  define platform_dma_map_sg_attrs	ia64_mv.dma_map_sg_attrs
156 #  define platform_dma_unmap_sg_attrs	ia64_mv.dma_unmap_sg_attrs
157 #  define platform_dma_sync_single_for_cpu ia64_mv.dma_sync_single_for_cpu
158 #  define platform_dma_sync_sg_for_cpu	ia64_mv.dma_sync_sg_for_cpu
159 #  define platform_dma_sync_single_for_device ia64_mv.dma_sync_single_for_device
160 #  define platform_dma_sync_sg_for_device ia64_mv.dma_sync_sg_for_device
161 #  define platform_dma_mapping_error		ia64_mv.dma_mapping_error
162 #  define platform_dma_supported	ia64_mv.dma_supported
163 #  define platform_dma_get_required_mask ia64_mv.dma_get_required_mask
164 #  define platform_irq_to_vector	ia64_mv.irq_to_vector
165 #  define platform_local_vector_to_irq	ia64_mv.local_vector_to_irq
166 #  define platform_pci_get_legacy_mem	ia64_mv.pci_get_legacy_mem
167 #  define platform_pci_legacy_read	ia64_mv.pci_legacy_read
168 #  define platform_pci_legacy_write	ia64_mv.pci_legacy_write
169 #  define platform_inb		ia64_mv.inb
170 #  define platform_inw		ia64_mv.inw
171 #  define platform_inl		ia64_mv.inl
172 #  define platform_outb		ia64_mv.outb
173 #  define platform_outw		ia64_mv.outw
174 #  define platform_outl		ia64_mv.outl
175 #  define platform_mmiowb	ia64_mv.mmiowb
176 #  define platform_readb        ia64_mv.readb
177 #  define platform_readw        ia64_mv.readw
178 #  define platform_readl        ia64_mv.readl
179 #  define platform_readq        ia64_mv.readq
180 #  define platform_readb_relaxed        ia64_mv.readb_relaxed
181 #  define platform_readw_relaxed        ia64_mv.readw_relaxed
182 #  define platform_readl_relaxed        ia64_mv.readl_relaxed
183 #  define platform_readq_relaxed        ia64_mv.readq_relaxed
184 #  define platform_migrate		ia64_mv.migrate
185 #  define platform_setup_msi_irq	ia64_mv.setup_msi_irq
186 #  define platform_teardown_msi_irq	ia64_mv.teardown_msi_irq
187 #  define platform_pci_fixup_bus	ia64_mv.pci_fixup_bus
188 #  define platform_kernel_launch_event	ia64_mv.kernel_launch_event
189 # endif
190 
191 /* __attribute__((__aligned__(16))) is required to make size of the
192  * structure multiple of 16 bytes.
193  * This will fillup the holes created because of section 3.3.1 in
194  * Software Conventions guide.
195  */
196 struct ia64_machine_vector {
197 	const char *name;
198 	ia64_mv_setup_t *setup;
199 	ia64_mv_cpu_init_t *cpu_init;
200 	ia64_mv_irq_init_t *irq_init;
201 	ia64_mv_send_ipi_t *send_ipi;
202 	ia64_mv_timer_interrupt_t *timer_interrupt;
203 	ia64_mv_global_tlb_purge_t *global_tlb_purge;
204 	ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish;
205 	ia64_mv_dma_init *dma_init;
206 	ia64_mv_dma_alloc_coherent *dma_alloc_coherent;
207 	ia64_mv_dma_free_coherent *dma_free_coherent;
208 	ia64_mv_dma_map_single_attrs *dma_map_single_attrs;
209 	ia64_mv_dma_unmap_single_attrs *dma_unmap_single_attrs;
210 	ia64_mv_dma_map_sg_attrs *dma_map_sg_attrs;
211 	ia64_mv_dma_unmap_sg_attrs *dma_unmap_sg_attrs;
212 	ia64_mv_dma_sync_single_for_cpu *dma_sync_single_for_cpu;
213 	ia64_mv_dma_sync_sg_for_cpu *dma_sync_sg_for_cpu;
214 	ia64_mv_dma_sync_single_for_device *dma_sync_single_for_device;
215 	ia64_mv_dma_sync_sg_for_device *dma_sync_sg_for_device;
216 	ia64_mv_dma_mapping_error *dma_mapping_error;
217 	ia64_mv_dma_supported *dma_supported;
218 	ia64_mv_dma_get_required_mask *dma_get_required_mask;
219 	ia64_mv_irq_to_vector *irq_to_vector;
220 	ia64_mv_local_vector_to_irq *local_vector_to_irq;
221 	ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem;
222 	ia64_mv_pci_legacy_read_t *pci_legacy_read;
223 	ia64_mv_pci_legacy_write_t *pci_legacy_write;
224 	ia64_mv_inb_t *inb;
225 	ia64_mv_inw_t *inw;
226 	ia64_mv_inl_t *inl;
227 	ia64_mv_outb_t *outb;
228 	ia64_mv_outw_t *outw;
229 	ia64_mv_outl_t *outl;
230 	ia64_mv_mmiowb_t *mmiowb;
231 	ia64_mv_readb_t *readb;
232 	ia64_mv_readw_t *readw;
233 	ia64_mv_readl_t *readl;
234 	ia64_mv_readq_t *readq;
235 	ia64_mv_readb_relaxed_t *readb_relaxed;
236 	ia64_mv_readw_relaxed_t *readw_relaxed;
237 	ia64_mv_readl_relaxed_t *readl_relaxed;
238 	ia64_mv_readq_relaxed_t *readq_relaxed;
239 	ia64_mv_migrate_t *migrate;
240 	ia64_mv_setup_msi_irq_t *setup_msi_irq;
241 	ia64_mv_teardown_msi_irq_t *teardown_msi_irq;
242 	ia64_mv_pci_fixup_bus_t *pci_fixup_bus;
243 	ia64_mv_kernel_launch_event_t *kernel_launch_event;
244 } __attribute__((__aligned__(16))); /* align attrib? see above comment */
245 
246 #define MACHVEC_INIT(name)			\
247 {						\
248 	#name,					\
249 	platform_setup,				\
250 	platform_cpu_init,			\
251 	platform_irq_init,			\
252 	platform_send_ipi,			\
253 	platform_timer_interrupt,		\
254 	platform_global_tlb_purge,		\
255 	platform_tlb_migrate_finish,		\
256 	platform_dma_init,			\
257 	platform_dma_alloc_coherent,		\
258 	platform_dma_free_coherent,		\
259 	platform_dma_map_single_attrs,		\
260 	platform_dma_unmap_single_attrs,	\
261 	platform_dma_map_sg_attrs,		\
262 	platform_dma_unmap_sg_attrs,		\
263 	platform_dma_sync_single_for_cpu,	\
264 	platform_dma_sync_sg_for_cpu,		\
265 	platform_dma_sync_single_for_device,	\
266 	platform_dma_sync_sg_for_device,	\
267 	platform_dma_mapping_error,			\
268 	platform_dma_supported,			\
269 	platform_dma_get_required_mask,		\
270 	platform_irq_to_vector,			\
271 	platform_local_vector_to_irq,		\
272 	platform_pci_get_legacy_mem,		\
273 	platform_pci_legacy_read,		\
274 	platform_pci_legacy_write,		\
275 	platform_inb,				\
276 	platform_inw,				\
277 	platform_inl,				\
278 	platform_outb,				\
279 	platform_outw,				\
280 	platform_outl,				\
281 	platform_mmiowb,			\
282 	platform_readb,				\
283 	platform_readw,				\
284 	platform_readl,				\
285 	platform_readq,				\
286 	platform_readb_relaxed,			\
287 	platform_readw_relaxed,			\
288 	platform_readl_relaxed,			\
289 	platform_readq_relaxed,			\
290 	platform_migrate,			\
291 	platform_setup_msi_irq,			\
292 	platform_teardown_msi_irq,		\
293 	platform_pci_fixup_bus,			\
294 	platform_kernel_launch_event            \
295 }
296 
297 extern struct ia64_machine_vector ia64_mv;
298 extern void machvec_init (const char *name);
299 extern void machvec_init_from_cmdline(const char *cmdline);
300 
301 # else
302 #  error Unknown configuration.  Update arch/ia64/include/asm/machvec.h.
303 # endif /* CONFIG_IA64_GENERIC */
304 
305 /*
306  * Define default versions so we can extend machvec for new platforms without having
307  * to update the machvec files for all existing platforms.
308  */
309 #ifndef platform_setup
310 # define platform_setup			machvec_setup
311 #endif
312 #ifndef platform_cpu_init
313 # define platform_cpu_init		machvec_noop
314 #endif
315 #ifndef platform_irq_init
316 # define platform_irq_init		machvec_noop
317 #endif
318 
319 #ifndef platform_send_ipi
320 # define platform_send_ipi		ia64_send_ipi	/* default to architected version */
321 #endif
322 #ifndef platform_timer_interrupt
323 # define platform_timer_interrupt 	machvec_timer_interrupt
324 #endif
325 #ifndef platform_global_tlb_purge
326 # define platform_global_tlb_purge	ia64_global_tlb_purge /* default to architected version */
327 #endif
328 #ifndef platform_tlb_migrate_finish
329 # define platform_tlb_migrate_finish	machvec_noop_mm
330 #endif
331 #ifndef platform_kernel_launch_event
332 # define platform_kernel_launch_event	machvec_noop
333 #endif
334 #ifndef platform_dma_init
335 # define platform_dma_init		swiotlb_init
336 #endif
337 #ifndef platform_dma_alloc_coherent
338 # define platform_dma_alloc_coherent	swiotlb_alloc_coherent
339 #endif
340 #ifndef platform_dma_free_coherent
341 # define platform_dma_free_coherent	swiotlb_free_coherent
342 #endif
343 #ifndef platform_dma_map_single_attrs
344 # define platform_dma_map_single_attrs	swiotlb_map_single_attrs
345 #endif
346 #ifndef platform_dma_unmap_single_attrs
347 # define platform_dma_unmap_single_attrs	swiotlb_unmap_single_attrs
348 #endif
349 #ifndef platform_dma_map_sg_attrs
350 # define platform_dma_map_sg_attrs	swiotlb_map_sg_attrs
351 #endif
352 #ifndef platform_dma_unmap_sg_attrs
353 # define platform_dma_unmap_sg_attrs	swiotlb_unmap_sg_attrs
354 #endif
355 #ifndef platform_dma_sync_single_for_cpu
356 # define platform_dma_sync_single_for_cpu	swiotlb_sync_single_for_cpu
357 #endif
358 #ifndef platform_dma_sync_sg_for_cpu
359 # define platform_dma_sync_sg_for_cpu		swiotlb_sync_sg_for_cpu
360 #endif
361 #ifndef platform_dma_sync_single_for_device
362 # define platform_dma_sync_single_for_device	swiotlb_sync_single_for_device
363 #endif
364 #ifndef platform_dma_sync_sg_for_device
365 # define platform_dma_sync_sg_for_device	swiotlb_sync_sg_for_device
366 #endif
367 #ifndef platform_dma_mapping_error
368 # define platform_dma_mapping_error		swiotlb_dma_mapping_error
369 #endif
370 #ifndef platform_dma_supported
371 # define  platform_dma_supported	swiotlb_dma_supported
372 #endif
373 #ifndef platform_dma_get_required_mask
374 # define  platform_dma_get_required_mask	ia64_dma_get_required_mask
375 #endif
376 #ifndef platform_irq_to_vector
377 # define platform_irq_to_vector		__ia64_irq_to_vector
378 #endif
379 #ifndef platform_local_vector_to_irq
380 # define platform_local_vector_to_irq	__ia64_local_vector_to_irq
381 #endif
382 #ifndef platform_pci_get_legacy_mem
383 # define platform_pci_get_legacy_mem	ia64_pci_get_legacy_mem
384 #endif
385 #ifndef platform_pci_legacy_read
386 # define platform_pci_legacy_read	ia64_pci_legacy_read
387 extern int ia64_pci_legacy_read(struct pci_bus *bus, u16 port, u32 *val, u8 size);
388 #endif
389 #ifndef platform_pci_legacy_write
390 # define platform_pci_legacy_write	ia64_pci_legacy_write
391 extern int ia64_pci_legacy_write(struct pci_bus *bus, u16 port, u32 val, u8 size);
392 #endif
393 #ifndef platform_inb
394 # define platform_inb		__ia64_inb
395 #endif
396 #ifndef platform_inw
397 # define platform_inw		__ia64_inw
398 #endif
399 #ifndef platform_inl
400 # define platform_inl		__ia64_inl
401 #endif
402 #ifndef platform_outb
403 # define platform_outb		__ia64_outb
404 #endif
405 #ifndef platform_outw
406 # define platform_outw		__ia64_outw
407 #endif
408 #ifndef platform_outl
409 # define platform_outl		__ia64_outl
410 #endif
411 #ifndef platform_mmiowb
412 # define platform_mmiowb	__ia64_mmiowb
413 #endif
414 #ifndef platform_readb
415 # define platform_readb		__ia64_readb
416 #endif
417 #ifndef platform_readw
418 # define platform_readw		__ia64_readw
419 #endif
420 #ifndef platform_readl
421 # define platform_readl		__ia64_readl
422 #endif
423 #ifndef platform_readq
424 # define platform_readq		__ia64_readq
425 #endif
426 #ifndef platform_readb_relaxed
427 # define platform_readb_relaxed	__ia64_readb_relaxed
428 #endif
429 #ifndef platform_readw_relaxed
430 # define platform_readw_relaxed	__ia64_readw_relaxed
431 #endif
432 #ifndef platform_readl_relaxed
433 # define platform_readl_relaxed	__ia64_readl_relaxed
434 #endif
435 #ifndef platform_readq_relaxed
436 # define platform_readq_relaxed	__ia64_readq_relaxed
437 #endif
438 #ifndef platform_migrate
439 # define platform_migrate machvec_noop_task
440 #endif
441 #ifndef platform_setup_msi_irq
442 # define platform_setup_msi_irq		((ia64_mv_setup_msi_irq_t*)NULL)
443 #endif
444 #ifndef platform_teardown_msi_irq
445 # define platform_teardown_msi_irq	((ia64_mv_teardown_msi_irq_t*)NULL)
446 #endif
447 #ifndef platform_pci_fixup_bus
448 # define platform_pci_fixup_bus	machvec_noop_bus
449 #endif
450 
451 #endif /* _ASM_IA64_MACHVEC_H */
452