[PATCH] stack overflow safe kdump: safe smp_send_nmi_allbutself()
[linux-2.6.22.y-op.git] / include / asm-ia64 / machvec.h
blob15b545a897a4b6a05843f12f4691c37acad46885
1 /*
2 * Machine vector for IA-64.
4 * Copyright (C) 1999 Silicon Graphics, Inc.
5 * Copyright (C) Srinivasa Thirumalachar <sprasad@engr.sgi.com>
6 * Copyright (C) Vijay Chander <vijay@engr.sgi.com>
7 * Copyright (C) 1999-2001, 2003-2004 Hewlett-Packard Co.
8 * David Mosberger-Tang <davidm@hpl.hp.com>
9 */
10 #ifndef _ASM_IA64_MACHVEC_H
11 #define _ASM_IA64_MACHVEC_H
13 #include <linux/types.h>
15 /* forward declarations: */
16 struct device;
17 struct pt_regs;
18 struct scatterlist;
19 struct page;
20 struct mm_struct;
21 struct pci_bus;
22 struct task_struct;
24 typedef void ia64_mv_setup_t (char **);
25 typedef void ia64_mv_cpu_init_t (void);
26 typedef void ia64_mv_irq_init_t (void);
27 typedef void ia64_mv_send_ipi_t (int, int, int, int);
28 typedef void ia64_mv_timer_interrupt_t (int, void *, struct pt_regs *);
29 typedef void ia64_mv_global_tlb_purge_t (struct mm_struct *, unsigned long, unsigned long, unsigned long);
30 typedef void ia64_mv_tlb_migrate_finish_t (struct mm_struct *);
31 typedef unsigned int ia64_mv_local_vector_to_irq (u8);
32 typedef char *ia64_mv_pci_get_legacy_mem_t (struct pci_bus *);
33 typedef int ia64_mv_pci_legacy_read_t (struct pci_bus *, u16 port, u32 *val,
34 u8 size);
35 typedef int ia64_mv_pci_legacy_write_t (struct pci_bus *, u16 port, u32 val,
36 u8 size);
37 typedef void ia64_mv_migrate_t(struct task_struct * task);
39 /* DMA-mapping interface: */
40 typedef void ia64_mv_dma_init (void);
41 typedef void *ia64_mv_dma_alloc_coherent (struct device *, size_t, dma_addr_t *, gfp_t);
42 typedef void ia64_mv_dma_free_coherent (struct device *, size_t, void *, dma_addr_t);
43 typedef dma_addr_t ia64_mv_dma_map_single (struct device *, void *, size_t, int);
44 typedef void ia64_mv_dma_unmap_single (struct device *, dma_addr_t, size_t, int);
45 typedef int ia64_mv_dma_map_sg (struct device *, struct scatterlist *, int, int);
46 typedef void ia64_mv_dma_unmap_sg (struct device *, struct scatterlist *, int, int);
47 typedef void ia64_mv_dma_sync_single_for_cpu (struct device *, dma_addr_t, size_t, int);
48 typedef void ia64_mv_dma_sync_sg_for_cpu (struct device *, struct scatterlist *, int, int);
49 typedef void ia64_mv_dma_sync_single_for_device (struct device *, dma_addr_t, size_t, int);
50 typedef void ia64_mv_dma_sync_sg_for_device (struct device *, struct scatterlist *, int, int);
51 typedef int ia64_mv_dma_mapping_error (dma_addr_t dma_addr);
52 typedef int ia64_mv_dma_supported (struct device *, u64);
55 * WARNING: The legacy I/O space is _architected_. Platforms are
56 * expected to follow this architected model (see Section 10.7 in the
57 * IA-64 Architecture Software Developer's Manual). Unfortunately,
58 * some broken machines do not follow that model, which is why we have
59 * to make the inX/outX operations part of the machine vector.
60 * Platform designers should follow the architected model whenever
61 * possible.
63 typedef unsigned int ia64_mv_inb_t (unsigned long);
64 typedef unsigned int ia64_mv_inw_t (unsigned long);
65 typedef unsigned int ia64_mv_inl_t (unsigned long);
66 typedef void ia64_mv_outb_t (unsigned char, unsigned long);
67 typedef void ia64_mv_outw_t (unsigned short, unsigned long);
68 typedef void ia64_mv_outl_t (unsigned int, unsigned long);
69 typedef void ia64_mv_mmiowb_t (void);
70 typedef unsigned char ia64_mv_readb_t (const volatile void __iomem *);
71 typedef unsigned short ia64_mv_readw_t (const volatile void __iomem *);
72 typedef unsigned int ia64_mv_readl_t (const volatile void __iomem *);
73 typedef unsigned long ia64_mv_readq_t (const volatile void __iomem *);
74 typedef unsigned char ia64_mv_readb_relaxed_t (const volatile void __iomem *);
75 typedef unsigned short ia64_mv_readw_relaxed_t (const volatile void __iomem *);
76 typedef unsigned int ia64_mv_readl_relaxed_t (const volatile void __iomem *);
77 typedef unsigned long ia64_mv_readq_relaxed_t (const volatile void __iomem *);
78 typedef int ia64_mv_msi_init_t (void);
80 static inline void
81 machvec_noop (void)
85 static inline void
86 machvec_noop_mm (struct mm_struct *mm)
90 static inline void
91 machvec_noop_task (struct task_struct *task)
95 extern void machvec_setup (char **);
96 extern void machvec_timer_interrupt (int, void *, struct pt_regs *);
97 extern void machvec_dma_sync_single (struct device *, dma_addr_t, size_t, int);
98 extern void machvec_dma_sync_sg (struct device *, struct scatterlist *, int, int);
99 extern void machvec_tlb_migrate_finish (struct mm_struct *);
101 # if defined (CONFIG_IA64_HP_SIM)
102 # include <asm/machvec_hpsim.h>
103 # elif defined (CONFIG_IA64_DIG)
104 # include <asm/machvec_dig.h>
105 # elif defined (CONFIG_IA64_HP_ZX1)
106 # include <asm/machvec_hpzx1.h>
107 # elif defined (CONFIG_IA64_HP_ZX1_SWIOTLB)
108 # include <asm/machvec_hpzx1_swiotlb.h>
109 # elif defined (CONFIG_IA64_SGI_SN2)
110 # include <asm/machvec_sn2.h>
111 # elif defined (CONFIG_IA64_GENERIC)
113 # ifdef MACHVEC_PLATFORM_HEADER
114 # include MACHVEC_PLATFORM_HEADER
115 # else
116 # define platform_name ia64_mv.name
117 # define platform_setup ia64_mv.setup
118 # define platform_cpu_init ia64_mv.cpu_init
119 # define platform_irq_init ia64_mv.irq_init
120 # define platform_send_ipi ia64_mv.send_ipi
121 # define platform_timer_interrupt ia64_mv.timer_interrupt
122 # define platform_global_tlb_purge ia64_mv.global_tlb_purge
123 # define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish
124 # define platform_dma_init ia64_mv.dma_init
125 # define platform_dma_alloc_coherent ia64_mv.dma_alloc_coherent
126 # define platform_dma_free_coherent ia64_mv.dma_free_coherent
127 # define platform_dma_map_single ia64_mv.dma_map_single
128 # define platform_dma_unmap_single ia64_mv.dma_unmap_single
129 # define platform_dma_map_sg ia64_mv.dma_map_sg
130 # define platform_dma_unmap_sg ia64_mv.dma_unmap_sg
131 # define platform_dma_sync_single_for_cpu ia64_mv.dma_sync_single_for_cpu
132 # define platform_dma_sync_sg_for_cpu ia64_mv.dma_sync_sg_for_cpu
133 # define platform_dma_sync_single_for_device ia64_mv.dma_sync_single_for_device
134 # define platform_dma_sync_sg_for_device ia64_mv.dma_sync_sg_for_device
135 # define platform_dma_mapping_error ia64_mv.dma_mapping_error
136 # define platform_dma_supported ia64_mv.dma_supported
137 # define platform_local_vector_to_irq ia64_mv.local_vector_to_irq
138 # define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem
139 # define platform_pci_legacy_read ia64_mv.pci_legacy_read
140 # define platform_pci_legacy_write ia64_mv.pci_legacy_write
141 # define platform_inb ia64_mv.inb
142 # define platform_inw ia64_mv.inw
143 # define platform_inl ia64_mv.inl
144 # define platform_outb ia64_mv.outb
145 # define platform_outw ia64_mv.outw
146 # define platform_outl ia64_mv.outl
147 # define platform_mmiowb ia64_mv.mmiowb
148 # define platform_readb ia64_mv.readb
149 # define platform_readw ia64_mv.readw
150 # define platform_readl ia64_mv.readl
151 # define platform_readq ia64_mv.readq
152 # define platform_readb_relaxed ia64_mv.readb_relaxed
153 # define platform_readw_relaxed ia64_mv.readw_relaxed
154 # define platform_readl_relaxed ia64_mv.readl_relaxed
155 # define platform_readq_relaxed ia64_mv.readq_relaxed
156 # define platform_migrate ia64_mv.migrate
157 # define platform_msi_init ia64_mv.msi_init
158 # endif
160 /* __attribute__((__aligned__(16))) is required to make size of the
161 * structure multiple of 16 bytes.
162 * This will fillup the holes created because of section 3.3.1 in
163 * Software Conventions guide.
165 struct ia64_machine_vector {
166 const char *name;
167 ia64_mv_setup_t *setup;
168 ia64_mv_cpu_init_t *cpu_init;
169 ia64_mv_irq_init_t *irq_init;
170 ia64_mv_send_ipi_t *send_ipi;
171 ia64_mv_timer_interrupt_t *timer_interrupt;
172 ia64_mv_global_tlb_purge_t *global_tlb_purge;
173 ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish;
174 ia64_mv_dma_init *dma_init;
175 ia64_mv_dma_alloc_coherent *dma_alloc_coherent;
176 ia64_mv_dma_free_coherent *dma_free_coherent;
177 ia64_mv_dma_map_single *dma_map_single;
178 ia64_mv_dma_unmap_single *dma_unmap_single;
179 ia64_mv_dma_map_sg *dma_map_sg;
180 ia64_mv_dma_unmap_sg *dma_unmap_sg;
181 ia64_mv_dma_sync_single_for_cpu *dma_sync_single_for_cpu;
182 ia64_mv_dma_sync_sg_for_cpu *dma_sync_sg_for_cpu;
183 ia64_mv_dma_sync_single_for_device *dma_sync_single_for_device;
184 ia64_mv_dma_sync_sg_for_device *dma_sync_sg_for_device;
185 ia64_mv_dma_mapping_error *dma_mapping_error;
186 ia64_mv_dma_supported *dma_supported;
187 ia64_mv_local_vector_to_irq *local_vector_to_irq;
188 ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem;
189 ia64_mv_pci_legacy_read_t *pci_legacy_read;
190 ia64_mv_pci_legacy_write_t *pci_legacy_write;
191 ia64_mv_inb_t *inb;
192 ia64_mv_inw_t *inw;
193 ia64_mv_inl_t *inl;
194 ia64_mv_outb_t *outb;
195 ia64_mv_outw_t *outw;
196 ia64_mv_outl_t *outl;
197 ia64_mv_mmiowb_t *mmiowb;
198 ia64_mv_readb_t *readb;
199 ia64_mv_readw_t *readw;
200 ia64_mv_readl_t *readl;
201 ia64_mv_readq_t *readq;
202 ia64_mv_readb_relaxed_t *readb_relaxed;
203 ia64_mv_readw_relaxed_t *readw_relaxed;
204 ia64_mv_readl_relaxed_t *readl_relaxed;
205 ia64_mv_readq_relaxed_t *readq_relaxed;
206 ia64_mv_migrate_t *migrate;
207 ia64_mv_msi_init_t *msi_init;
208 } __attribute__((__aligned__(16))); /* align attrib? see above comment */
210 #define MACHVEC_INIT(name) \
212 #name, \
213 platform_setup, \
214 platform_cpu_init, \
215 platform_irq_init, \
216 platform_send_ipi, \
217 platform_timer_interrupt, \
218 platform_global_tlb_purge, \
219 platform_tlb_migrate_finish, \
220 platform_dma_init, \
221 platform_dma_alloc_coherent, \
222 platform_dma_free_coherent, \
223 platform_dma_map_single, \
224 platform_dma_unmap_single, \
225 platform_dma_map_sg, \
226 platform_dma_unmap_sg, \
227 platform_dma_sync_single_for_cpu, \
228 platform_dma_sync_sg_for_cpu, \
229 platform_dma_sync_single_for_device, \
230 platform_dma_sync_sg_for_device, \
231 platform_dma_mapping_error, \
232 platform_dma_supported, \
233 platform_local_vector_to_irq, \
234 platform_pci_get_legacy_mem, \
235 platform_pci_legacy_read, \
236 platform_pci_legacy_write, \
237 platform_inb, \
238 platform_inw, \
239 platform_inl, \
240 platform_outb, \
241 platform_outw, \
242 platform_outl, \
243 platform_mmiowb, \
244 platform_readb, \
245 platform_readw, \
246 platform_readl, \
247 platform_readq, \
248 platform_readb_relaxed, \
249 platform_readw_relaxed, \
250 platform_readl_relaxed, \
251 platform_readq_relaxed, \
252 platform_migrate, \
253 platform_msi_init, \
256 extern struct ia64_machine_vector ia64_mv;
257 extern void machvec_init (const char *name);
259 # else
260 # error Unknown configuration. Update asm-ia64/machvec.h.
261 # endif /* CONFIG_IA64_GENERIC */
264 * Declare default routines which aren't declared anywhere else:
266 extern ia64_mv_dma_init swiotlb_init;
267 extern ia64_mv_dma_alloc_coherent swiotlb_alloc_coherent;
268 extern ia64_mv_dma_free_coherent swiotlb_free_coherent;
269 extern ia64_mv_dma_map_single swiotlb_map_single;
270 extern ia64_mv_dma_unmap_single swiotlb_unmap_single;
271 extern ia64_mv_dma_map_sg swiotlb_map_sg;
272 extern ia64_mv_dma_unmap_sg swiotlb_unmap_sg;
273 extern ia64_mv_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu;
274 extern ia64_mv_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu;
275 extern ia64_mv_dma_sync_single_for_device swiotlb_sync_single_for_device;
276 extern ia64_mv_dma_sync_sg_for_device swiotlb_sync_sg_for_device;
277 extern ia64_mv_dma_mapping_error swiotlb_dma_mapping_error;
278 extern ia64_mv_dma_supported swiotlb_dma_supported;
281 * Define default versions so we can extend machvec for new platforms without having
282 * to update the machvec files for all existing platforms.
284 #ifndef platform_setup
285 # define platform_setup machvec_setup
286 #endif
287 #ifndef platform_cpu_init
288 # define platform_cpu_init machvec_noop
289 #endif
290 #ifndef platform_irq_init
291 # define platform_irq_init machvec_noop
292 #endif
294 #ifndef platform_send_ipi
295 # define platform_send_ipi ia64_send_ipi /* default to architected version */
296 #endif
297 #ifndef platform_timer_interrupt
298 # define platform_timer_interrupt machvec_timer_interrupt
299 #endif
300 #ifndef platform_global_tlb_purge
301 # define platform_global_tlb_purge ia64_global_tlb_purge /* default to architected version */
302 #endif
303 #ifndef platform_tlb_migrate_finish
304 # define platform_tlb_migrate_finish machvec_noop_mm
305 #endif
306 #ifndef platform_dma_init
307 # define platform_dma_init swiotlb_init
308 #endif
309 #ifndef platform_dma_alloc_coherent
310 # define platform_dma_alloc_coherent swiotlb_alloc_coherent
311 #endif
312 #ifndef platform_dma_free_coherent
313 # define platform_dma_free_coherent swiotlb_free_coherent
314 #endif
315 #ifndef platform_dma_map_single
316 # define platform_dma_map_single swiotlb_map_single
317 #endif
318 #ifndef platform_dma_unmap_single
319 # define platform_dma_unmap_single swiotlb_unmap_single
320 #endif
321 #ifndef platform_dma_map_sg
322 # define platform_dma_map_sg swiotlb_map_sg
323 #endif
324 #ifndef platform_dma_unmap_sg
325 # define platform_dma_unmap_sg swiotlb_unmap_sg
326 #endif
327 #ifndef platform_dma_sync_single_for_cpu
328 # define platform_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu
329 #endif
330 #ifndef platform_dma_sync_sg_for_cpu
331 # define platform_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu
332 #endif
333 #ifndef platform_dma_sync_single_for_device
334 # define platform_dma_sync_single_for_device swiotlb_sync_single_for_device
335 #endif
336 #ifndef platform_dma_sync_sg_for_device
337 # define platform_dma_sync_sg_for_device swiotlb_sync_sg_for_device
338 #endif
339 #ifndef platform_dma_mapping_error
340 # define platform_dma_mapping_error swiotlb_dma_mapping_error
341 #endif
342 #ifndef platform_dma_supported
343 # define platform_dma_supported swiotlb_dma_supported
344 #endif
345 #ifndef platform_local_vector_to_irq
346 # define platform_local_vector_to_irq __ia64_local_vector_to_irq
347 #endif
348 #ifndef platform_pci_get_legacy_mem
349 # define platform_pci_get_legacy_mem ia64_pci_get_legacy_mem
350 #endif
351 #ifndef platform_pci_legacy_read
352 # define platform_pci_legacy_read ia64_pci_legacy_read
353 extern int ia64_pci_legacy_read(struct pci_bus *bus, u16 port, u32 *val, u8 size);
354 #endif
355 #ifndef platform_pci_legacy_write
356 # define platform_pci_legacy_write ia64_pci_legacy_write
357 extern int ia64_pci_legacy_write(struct pci_bus *bus, u16 port, u32 val, u8 size);
358 #endif
359 #ifndef platform_inb
360 # define platform_inb __ia64_inb
361 #endif
362 #ifndef platform_inw
363 # define platform_inw __ia64_inw
364 #endif
365 #ifndef platform_inl
366 # define platform_inl __ia64_inl
367 #endif
368 #ifndef platform_outb
369 # define platform_outb __ia64_outb
370 #endif
371 #ifndef platform_outw
372 # define platform_outw __ia64_outw
373 #endif
374 #ifndef platform_outl
375 # define platform_outl __ia64_outl
376 #endif
377 #ifndef platform_mmiowb
378 # define platform_mmiowb __ia64_mmiowb
379 #endif
380 #ifndef platform_readb
381 # define platform_readb __ia64_readb
382 #endif
383 #ifndef platform_readw
384 # define platform_readw __ia64_readw
385 #endif
386 #ifndef platform_readl
387 # define platform_readl __ia64_readl
388 #endif
389 #ifndef platform_readq
390 # define platform_readq __ia64_readq
391 #endif
392 #ifndef platform_readb_relaxed
393 # define platform_readb_relaxed __ia64_readb_relaxed
394 #endif
395 #ifndef platform_readw_relaxed
396 # define platform_readw_relaxed __ia64_readw_relaxed
397 #endif
398 #ifndef platform_readl_relaxed
399 # define platform_readl_relaxed __ia64_readl_relaxed
400 #endif
401 #ifndef platform_readq_relaxed
402 # define platform_readq_relaxed __ia64_readq_relaxed
403 #endif
404 #ifndef platform_migrate
405 # define platform_migrate machvec_noop_task
406 #endif
407 #ifndef platform_msi_init
408 # define platform_msi_init ((ia64_mv_msi_init_t*)NULL)
409 #endif
411 #endif /* _ASM_IA64_MACHVEC_H */