2 * Machine vector for IA-64.
4 * Copyright (C) 1999 Silicon Graphics, Inc.
5 * Copyright (C) Srinivasa Thirumalachar <sprasad@engr.sgi.com>
6 * Copyright (C) Vijay Chander <vijay@engr.sgi.com>
7 * Copyright (C) 1999-2001, 2003-2004 Hewlett-Packard Co.
8 * David Mosberger-Tang <davidm@hpl.hp.com>
10 #ifndef _ASM_IA64_MACHVEC_H
11 #define _ASM_IA64_MACHVEC_H
13 #include <linux/types.h>
15 /* forward declarations: */
26 typedef void ia64_mv_setup_t (char **);
27 typedef void ia64_mv_cpu_init_t (void);
28 typedef void ia64_mv_irq_init_t (void);
29 typedef void ia64_mv_send_ipi_t (int, int, int, int);
30 typedef void ia64_mv_timer_interrupt_t (int, void *);
31 typedef void ia64_mv_global_tlb_purge_t (struct mm_struct
*, unsigned long, unsigned long, unsigned long);
32 typedef void ia64_mv_tlb_migrate_finish_t (struct mm_struct
*);
33 typedef u8
ia64_mv_irq_to_vector (int);
34 typedef unsigned int ia64_mv_local_vector_to_irq (u8
);
35 typedef char *ia64_mv_pci_get_legacy_mem_t (struct pci_bus
*);
36 typedef int ia64_mv_pci_legacy_read_t (struct pci_bus
*, u16 port
, u32
*val
,
38 typedef int ia64_mv_pci_legacy_write_t (struct pci_bus
*, u16 port
, u32 val
,
40 typedef void ia64_mv_migrate_t(struct task_struct
* task
);
41 typedef void ia64_mv_pci_fixup_bus_t (struct pci_bus
*);
42 typedef void ia64_mv_kernel_launch_event_t(void);
44 /* DMA-mapping interface: */
45 typedef void ia64_mv_dma_init (void);
46 typedef void *ia64_mv_dma_alloc_coherent (struct device
*, size_t, dma_addr_t
*, gfp_t
);
47 typedef void ia64_mv_dma_free_coherent (struct device
*, size_t, void *, dma_addr_t
);
48 typedef dma_addr_t
ia64_mv_dma_map_single (struct device
*, void *, size_t, int);
49 typedef void ia64_mv_dma_unmap_single (struct device
*, dma_addr_t
, size_t, int);
50 typedef int ia64_mv_dma_map_sg (struct device
*, struct scatterlist
*, int, int);
51 typedef void ia64_mv_dma_unmap_sg (struct device
*, struct scatterlist
*, int, int);
52 typedef void ia64_mv_dma_sync_single_for_cpu (struct device
*, dma_addr_t
, size_t, int);
53 typedef void ia64_mv_dma_sync_sg_for_cpu (struct device
*, struct scatterlist
*, int, int);
54 typedef void ia64_mv_dma_sync_single_for_device (struct device
*, dma_addr_t
, size_t, int);
55 typedef void ia64_mv_dma_sync_sg_for_device (struct device
*, struct scatterlist
*, int, int);
56 typedef int ia64_mv_dma_mapping_error (dma_addr_t dma_addr
);
57 typedef int ia64_mv_dma_supported (struct device
*, u64
);
60 * WARNING: The legacy I/O space is _architected_. Platforms are
61 * expected to follow this architected model (see Section 10.7 in the
62 * IA-64 Architecture Software Developer's Manual). Unfortunately,
63 * some broken machines do not follow that model, which is why we have
64 * to make the inX/outX operations part of the machine vector.
65 * Platform designers should follow the architected model whenever
68 typedef unsigned int ia64_mv_inb_t (unsigned long);
69 typedef unsigned int ia64_mv_inw_t (unsigned long);
70 typedef unsigned int ia64_mv_inl_t (unsigned long);
71 typedef void ia64_mv_outb_t (unsigned char, unsigned long);
72 typedef void ia64_mv_outw_t (unsigned short, unsigned long);
73 typedef void ia64_mv_outl_t (unsigned int, unsigned long);
74 typedef void ia64_mv_mmiowb_t (void);
75 typedef unsigned char ia64_mv_readb_t (const volatile void __iomem
*);
76 typedef unsigned short ia64_mv_readw_t (const volatile void __iomem
*);
77 typedef unsigned int ia64_mv_readl_t (const volatile void __iomem
*);
78 typedef unsigned long ia64_mv_readq_t (const volatile void __iomem
*);
79 typedef unsigned char ia64_mv_readb_relaxed_t (const volatile void __iomem
*);
80 typedef unsigned short ia64_mv_readw_relaxed_t (const volatile void __iomem
*);
81 typedef unsigned int ia64_mv_readl_relaxed_t (const volatile void __iomem
*);
82 typedef unsigned long ia64_mv_readq_relaxed_t (const volatile void __iomem
*);
84 typedef int ia64_mv_setup_msi_irq_t (struct pci_dev
*pdev
, struct msi_desc
*);
85 typedef void ia64_mv_teardown_msi_irq_t (unsigned int irq
);
93 machvec_noop_mm (struct mm_struct
*mm
)
98 machvec_noop_task (struct task_struct
*task
)
103 machvec_noop_bus (struct pci_bus
*bus
)
107 extern void machvec_setup (char **);
108 extern void machvec_timer_interrupt (int, void *);
109 extern void machvec_dma_sync_single (struct device
*, dma_addr_t
, size_t, int);
110 extern void machvec_dma_sync_sg (struct device
*, struct scatterlist
*, int, int);
111 extern void machvec_tlb_migrate_finish (struct mm_struct
*);
113 # if defined (CONFIG_IA64_HP_SIM)
114 # include <asm/machvec_hpsim.h>
115 # elif defined (CONFIG_IA64_DIG)
116 # include <asm/machvec_dig.h>
117 # elif defined (CONFIG_IA64_HP_ZX1)
118 # include <asm/machvec_hpzx1.h>
119 # elif defined (CONFIG_IA64_HP_ZX1_SWIOTLB)
120 # include <asm/machvec_hpzx1_swiotlb.h>
121 # elif defined (CONFIG_IA64_SGI_SN2)
122 # include <asm/machvec_sn2.h>
123 # elif defined (CONFIG_IA64_GENERIC)
125 # ifdef MACHVEC_PLATFORM_HEADER
126 # include MACHVEC_PLATFORM_HEADER
128 # define platform_name ia64_mv.name
129 # define platform_setup ia64_mv.setup
130 # define platform_cpu_init ia64_mv.cpu_init
131 # define platform_irq_init ia64_mv.irq_init
132 # define platform_send_ipi ia64_mv.send_ipi
133 # define platform_timer_interrupt ia64_mv.timer_interrupt
134 # define platform_global_tlb_purge ia64_mv.global_tlb_purge
135 # define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish
136 # define platform_dma_init ia64_mv.dma_init
137 # define platform_dma_alloc_coherent ia64_mv.dma_alloc_coherent
138 # define platform_dma_free_coherent ia64_mv.dma_free_coherent
139 # define platform_dma_map_single ia64_mv.dma_map_single
140 # define platform_dma_unmap_single ia64_mv.dma_unmap_single
141 # define platform_dma_map_sg ia64_mv.dma_map_sg
142 # define platform_dma_unmap_sg ia64_mv.dma_unmap_sg
143 # define platform_dma_sync_single_for_cpu ia64_mv.dma_sync_single_for_cpu
144 # define platform_dma_sync_sg_for_cpu ia64_mv.dma_sync_sg_for_cpu
145 # define platform_dma_sync_single_for_device ia64_mv.dma_sync_single_for_device
146 # define platform_dma_sync_sg_for_device ia64_mv.dma_sync_sg_for_device
147 # define platform_dma_mapping_error ia64_mv.dma_mapping_error
148 # define platform_dma_supported ia64_mv.dma_supported
149 # define platform_irq_to_vector ia64_mv.irq_to_vector
150 # define platform_local_vector_to_irq ia64_mv.local_vector_to_irq
151 # define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem
152 # define platform_pci_legacy_read ia64_mv.pci_legacy_read
153 # define platform_pci_legacy_write ia64_mv.pci_legacy_write
154 # define platform_inb ia64_mv.inb
155 # define platform_inw ia64_mv.inw
156 # define platform_inl ia64_mv.inl
157 # define platform_outb ia64_mv.outb
158 # define platform_outw ia64_mv.outw
159 # define platform_outl ia64_mv.outl
160 # define platform_mmiowb ia64_mv.mmiowb
161 # define platform_readb ia64_mv.readb
162 # define platform_readw ia64_mv.readw
163 # define platform_readl ia64_mv.readl
164 # define platform_readq ia64_mv.readq
165 # define platform_readb_relaxed ia64_mv.readb_relaxed
166 # define platform_readw_relaxed ia64_mv.readw_relaxed
167 # define platform_readl_relaxed ia64_mv.readl_relaxed
168 # define platform_readq_relaxed ia64_mv.readq_relaxed
169 # define platform_migrate ia64_mv.migrate
170 # define platform_setup_msi_irq ia64_mv.setup_msi_irq
171 # define platform_teardown_msi_irq ia64_mv.teardown_msi_irq
172 # define platform_pci_fixup_bus ia64_mv.pci_fixup_bus
173 # define platform_kernel_launch_event ia64_mv.kernel_launch_event
176 /* __attribute__((__aligned__(16))) is required to make size of the
177 * structure multiple of 16 bytes.
178 * This will fillup the holes created because of section 3.3.1 in
179 * Software Conventions guide.
181 struct ia64_machine_vector
{
183 ia64_mv_setup_t
*setup
;
184 ia64_mv_cpu_init_t
*cpu_init
;
185 ia64_mv_irq_init_t
*irq_init
;
186 ia64_mv_send_ipi_t
*send_ipi
;
187 ia64_mv_timer_interrupt_t
*timer_interrupt
;
188 ia64_mv_global_tlb_purge_t
*global_tlb_purge
;
189 ia64_mv_tlb_migrate_finish_t
*tlb_migrate_finish
;
190 ia64_mv_dma_init
*dma_init
;
191 ia64_mv_dma_alloc_coherent
*dma_alloc_coherent
;
192 ia64_mv_dma_free_coherent
*dma_free_coherent
;
193 ia64_mv_dma_map_single
*dma_map_single
;
194 ia64_mv_dma_unmap_single
*dma_unmap_single
;
195 ia64_mv_dma_map_sg
*dma_map_sg
;
196 ia64_mv_dma_unmap_sg
*dma_unmap_sg
;
197 ia64_mv_dma_sync_single_for_cpu
*dma_sync_single_for_cpu
;
198 ia64_mv_dma_sync_sg_for_cpu
*dma_sync_sg_for_cpu
;
199 ia64_mv_dma_sync_single_for_device
*dma_sync_single_for_device
;
200 ia64_mv_dma_sync_sg_for_device
*dma_sync_sg_for_device
;
201 ia64_mv_dma_mapping_error
*dma_mapping_error
;
202 ia64_mv_dma_supported
*dma_supported
;
203 ia64_mv_irq_to_vector
*irq_to_vector
;
204 ia64_mv_local_vector_to_irq
*local_vector_to_irq
;
205 ia64_mv_pci_get_legacy_mem_t
*pci_get_legacy_mem
;
206 ia64_mv_pci_legacy_read_t
*pci_legacy_read
;
207 ia64_mv_pci_legacy_write_t
*pci_legacy_write
;
211 ia64_mv_outb_t
*outb
;
212 ia64_mv_outw_t
*outw
;
213 ia64_mv_outl_t
*outl
;
214 ia64_mv_mmiowb_t
*mmiowb
;
215 ia64_mv_readb_t
*readb
;
216 ia64_mv_readw_t
*readw
;
217 ia64_mv_readl_t
*readl
;
218 ia64_mv_readq_t
*readq
;
219 ia64_mv_readb_relaxed_t
*readb_relaxed
;
220 ia64_mv_readw_relaxed_t
*readw_relaxed
;
221 ia64_mv_readl_relaxed_t
*readl_relaxed
;
222 ia64_mv_readq_relaxed_t
*readq_relaxed
;
223 ia64_mv_migrate_t
*migrate
;
224 ia64_mv_setup_msi_irq_t
*setup_msi_irq
;
225 ia64_mv_teardown_msi_irq_t
*teardown_msi_irq
;
226 ia64_mv_pci_fixup_bus_t
*pci_fixup_bus
;
227 ia64_mv_kernel_launch_event_t
*kernel_launch_event
;
228 } __attribute__((__aligned__(16))); /* align attrib? see above comment */
230 #define MACHVEC_INIT(name) \
237 platform_timer_interrupt, \
238 platform_global_tlb_purge, \
239 platform_tlb_migrate_finish, \
241 platform_dma_alloc_coherent, \
242 platform_dma_free_coherent, \
243 platform_dma_map_single, \
244 platform_dma_unmap_single, \
245 platform_dma_map_sg, \
246 platform_dma_unmap_sg, \
247 platform_dma_sync_single_for_cpu, \
248 platform_dma_sync_sg_for_cpu, \
249 platform_dma_sync_single_for_device, \
250 platform_dma_sync_sg_for_device, \
251 platform_dma_mapping_error, \
252 platform_dma_supported, \
253 platform_irq_to_vector, \
254 platform_local_vector_to_irq, \
255 platform_pci_get_legacy_mem, \
256 platform_pci_legacy_read, \
257 platform_pci_legacy_write, \
269 platform_readb_relaxed, \
270 platform_readw_relaxed, \
271 platform_readl_relaxed, \
272 platform_readq_relaxed, \
274 platform_setup_msi_irq, \
275 platform_teardown_msi_irq, \
276 platform_pci_fixup_bus, \
277 platform_kernel_launch_event \
280 extern struct ia64_machine_vector ia64_mv
;
281 extern void machvec_init (const char *name
);
282 extern void machvec_init_from_cmdline(const char *cmdline
);
285 # error Unknown configuration. Update asm-ia64/machvec.h.
286 # endif /* CONFIG_IA64_GENERIC */
289 * Declare default routines which aren't declared anywhere else:
291 extern ia64_mv_dma_init swiotlb_init
;
292 extern ia64_mv_dma_alloc_coherent swiotlb_alloc_coherent
;
293 extern ia64_mv_dma_free_coherent swiotlb_free_coherent
;
294 extern ia64_mv_dma_map_single swiotlb_map_single
;
295 extern ia64_mv_dma_unmap_single swiotlb_unmap_single
;
296 extern ia64_mv_dma_map_sg swiotlb_map_sg
;
297 extern ia64_mv_dma_unmap_sg swiotlb_unmap_sg
;
298 extern ia64_mv_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu
;
299 extern ia64_mv_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu
;
300 extern ia64_mv_dma_sync_single_for_device swiotlb_sync_single_for_device
;
301 extern ia64_mv_dma_sync_sg_for_device swiotlb_sync_sg_for_device
;
302 extern ia64_mv_dma_mapping_error swiotlb_dma_mapping_error
;
303 extern ia64_mv_dma_supported swiotlb_dma_supported
;
306 * Define default versions so we can extend machvec for new platforms without having
307 * to update the machvec files for all existing platforms.
309 #ifndef platform_setup
310 # define platform_setup machvec_setup
312 #ifndef platform_cpu_init
313 # define platform_cpu_init machvec_noop
315 #ifndef platform_irq_init
316 # define platform_irq_init machvec_noop
319 #ifndef platform_send_ipi
320 # define platform_send_ipi ia64_send_ipi /* default to architected version */
322 #ifndef platform_timer_interrupt
323 # define platform_timer_interrupt machvec_timer_interrupt
325 #ifndef platform_global_tlb_purge
326 # define platform_global_tlb_purge ia64_global_tlb_purge /* default to architected version */
328 #ifndef platform_tlb_migrate_finish
329 # define platform_tlb_migrate_finish machvec_noop_mm
331 #ifndef platform_kernel_launch_event
332 # define platform_kernel_launch_event machvec_noop
334 #ifndef platform_dma_init
335 # define platform_dma_init swiotlb_init
337 #ifndef platform_dma_alloc_coherent
338 # define platform_dma_alloc_coherent swiotlb_alloc_coherent
340 #ifndef platform_dma_free_coherent
341 # define platform_dma_free_coherent swiotlb_free_coherent
343 #ifndef platform_dma_map_single
344 # define platform_dma_map_single swiotlb_map_single
346 #ifndef platform_dma_unmap_single
347 # define platform_dma_unmap_single swiotlb_unmap_single
349 #ifndef platform_dma_map_sg
350 # define platform_dma_map_sg swiotlb_map_sg
352 #ifndef platform_dma_unmap_sg
353 # define platform_dma_unmap_sg swiotlb_unmap_sg
355 #ifndef platform_dma_sync_single_for_cpu
356 # define platform_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu
358 #ifndef platform_dma_sync_sg_for_cpu
359 # define platform_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu
361 #ifndef platform_dma_sync_single_for_device
362 # define platform_dma_sync_single_for_device swiotlb_sync_single_for_device
364 #ifndef platform_dma_sync_sg_for_device
365 # define platform_dma_sync_sg_for_device swiotlb_sync_sg_for_device
367 #ifndef platform_dma_mapping_error
368 # define platform_dma_mapping_error swiotlb_dma_mapping_error
370 #ifndef platform_dma_supported
371 # define platform_dma_supported swiotlb_dma_supported
373 #ifndef platform_irq_to_vector
374 # define platform_irq_to_vector __ia64_irq_to_vector
376 #ifndef platform_local_vector_to_irq
377 # define platform_local_vector_to_irq __ia64_local_vector_to_irq
379 #ifndef platform_pci_get_legacy_mem
380 # define platform_pci_get_legacy_mem ia64_pci_get_legacy_mem
382 #ifndef platform_pci_legacy_read
383 # define platform_pci_legacy_read ia64_pci_legacy_read
384 extern int ia64_pci_legacy_read(struct pci_bus
*bus
, u16 port
, u32
*val
, u8 size
);
386 #ifndef platform_pci_legacy_write
387 # define platform_pci_legacy_write ia64_pci_legacy_write
388 extern int ia64_pci_legacy_write(struct pci_bus
*bus
, u16 port
, u32 val
, u8 size
);
391 # define platform_inb __ia64_inb
394 # define platform_inw __ia64_inw
397 # define platform_inl __ia64_inl
399 #ifndef platform_outb
400 # define platform_outb __ia64_outb
402 #ifndef platform_outw
403 # define platform_outw __ia64_outw
405 #ifndef platform_outl
406 # define platform_outl __ia64_outl
408 #ifndef platform_mmiowb
409 # define platform_mmiowb __ia64_mmiowb
411 #ifndef platform_readb
412 # define platform_readb __ia64_readb
414 #ifndef platform_readw
415 # define platform_readw __ia64_readw
417 #ifndef platform_readl
418 # define platform_readl __ia64_readl
420 #ifndef platform_readq
421 # define platform_readq __ia64_readq
423 #ifndef platform_readb_relaxed
424 # define platform_readb_relaxed __ia64_readb_relaxed
426 #ifndef platform_readw_relaxed
427 # define platform_readw_relaxed __ia64_readw_relaxed
429 #ifndef platform_readl_relaxed
430 # define platform_readl_relaxed __ia64_readl_relaxed
432 #ifndef platform_readq_relaxed
433 # define platform_readq_relaxed __ia64_readq_relaxed
435 #ifndef platform_migrate
436 # define platform_migrate machvec_noop_task
438 #ifndef platform_setup_msi_irq
439 # define platform_setup_msi_irq ((ia64_mv_setup_msi_irq_t*)NULL)
441 #ifndef platform_teardown_msi_irq
442 # define platform_teardown_msi_irq ((ia64_mv_teardown_msi_irq_t*)NULL)
444 #ifndef platform_pci_fixup_bus
445 # define platform_pci_fixup_bus machvec_noop_bus
448 #endif /* _ASM_IA64_MACHVEC_H */