powerpc/cell: Add DMA_ATTR_WEAK_ORDERING dma attribute and use in Cell IOMMU code
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / include / asm-ia64 / machvec.h
blob0721a5e8271e7ffe2181a487689f155b7b85cb89
1 /*
2 * Machine vector for IA-64.
4 * Copyright (C) 1999 Silicon Graphics, Inc.
5 * Copyright (C) Srinivasa Thirumalachar <sprasad@engr.sgi.com>
6 * Copyright (C) Vijay Chander <vijay@engr.sgi.com>
7 * Copyright (C) 1999-2001, 2003-2004 Hewlett-Packard Co.
8 * David Mosberger-Tang <davidm@hpl.hp.com>
9 */
10 #ifndef _ASM_IA64_MACHVEC_H
11 #define _ASM_IA64_MACHVEC_H
13 #include <linux/types.h>
15 /* forward declarations: */
16 struct device;
17 struct pt_regs;
18 struct scatterlist;
19 struct page;
20 struct mm_struct;
21 struct pci_bus;
22 struct task_struct;
23 struct pci_dev;
24 struct msi_desc;
25 struct dma_attrs;
27 typedef void ia64_mv_setup_t (char **);
28 typedef void ia64_mv_cpu_init_t (void);
29 typedef void ia64_mv_irq_init_t (void);
30 typedef void ia64_mv_send_ipi_t (int, int, int, int);
31 typedef void ia64_mv_timer_interrupt_t (int, void *);
32 typedef void ia64_mv_global_tlb_purge_t (struct mm_struct *, unsigned long, unsigned long, unsigned long);
33 typedef void ia64_mv_tlb_migrate_finish_t (struct mm_struct *);
34 typedef u8 ia64_mv_irq_to_vector (int);
35 typedef unsigned int ia64_mv_local_vector_to_irq (u8);
36 typedef char *ia64_mv_pci_get_legacy_mem_t (struct pci_bus *);
37 typedef int ia64_mv_pci_legacy_read_t (struct pci_bus *, u16 port, u32 *val,
38 u8 size);
39 typedef int ia64_mv_pci_legacy_write_t (struct pci_bus *, u16 port, u32 val,
40 u8 size);
41 typedef void ia64_mv_migrate_t(struct task_struct * task);
42 typedef void ia64_mv_pci_fixup_bus_t (struct pci_bus *);
43 typedef void ia64_mv_kernel_launch_event_t(void);
45 /* DMA-mapping interface: */
46 typedef void ia64_mv_dma_init (void);
47 typedef void *ia64_mv_dma_alloc_coherent (struct device *, size_t, dma_addr_t *, gfp_t);
48 typedef void ia64_mv_dma_free_coherent (struct device *, size_t, void *, dma_addr_t);
49 typedef dma_addr_t ia64_mv_dma_map_single (struct device *, void *, size_t, int);
50 typedef void ia64_mv_dma_unmap_single (struct device *, dma_addr_t, size_t, int);
51 typedef int ia64_mv_dma_map_sg (struct device *, struct scatterlist *, int, int);
52 typedef void ia64_mv_dma_unmap_sg (struct device *, struct scatterlist *, int, int);
53 typedef void ia64_mv_dma_sync_single_for_cpu (struct device *, dma_addr_t, size_t, int);
54 typedef void ia64_mv_dma_sync_sg_for_cpu (struct device *, struct scatterlist *, int, int);
55 typedef void ia64_mv_dma_sync_single_for_device (struct device *, dma_addr_t, size_t, int);
56 typedef void ia64_mv_dma_sync_sg_for_device (struct device *, struct scatterlist *, int, int);
57 typedef int ia64_mv_dma_mapping_error (dma_addr_t dma_addr);
58 typedef int ia64_mv_dma_supported (struct device *, u64);
60 typedef dma_addr_t ia64_mv_dma_map_single_attrs (struct device *, void *, size_t, int, struct dma_attrs *);
61 typedef void ia64_mv_dma_unmap_single_attrs (struct device *, dma_addr_t, size_t, int, struct dma_attrs *);
62 typedef int ia64_mv_dma_map_sg_attrs (struct device *, struct scatterlist *, int, int, struct dma_attrs *);
63 typedef void ia64_mv_dma_unmap_sg_attrs (struct device *, struct scatterlist *, int, int, struct dma_attrs *);
66 * WARNING: The legacy I/O space is _architected_. Platforms are
67 * expected to follow this architected model (see Section 10.7 in the
68 * IA-64 Architecture Software Developer's Manual). Unfortunately,
69 * some broken machines do not follow that model, which is why we have
70 * to make the inX/outX operations part of the machine vector.
71 * Platform designers should follow the architected model whenever
72 * possible.
74 typedef unsigned int ia64_mv_inb_t (unsigned long);
75 typedef unsigned int ia64_mv_inw_t (unsigned long);
76 typedef unsigned int ia64_mv_inl_t (unsigned long);
77 typedef void ia64_mv_outb_t (unsigned char, unsigned long);
78 typedef void ia64_mv_outw_t (unsigned short, unsigned long);
79 typedef void ia64_mv_outl_t (unsigned int, unsigned long);
80 typedef void ia64_mv_mmiowb_t (void);
81 typedef unsigned char ia64_mv_readb_t (const volatile void __iomem *);
82 typedef unsigned short ia64_mv_readw_t (const volatile void __iomem *);
83 typedef unsigned int ia64_mv_readl_t (const volatile void __iomem *);
84 typedef unsigned long ia64_mv_readq_t (const volatile void __iomem *);
85 typedef unsigned char ia64_mv_readb_relaxed_t (const volatile void __iomem *);
86 typedef unsigned short ia64_mv_readw_relaxed_t (const volatile void __iomem *);
87 typedef unsigned int ia64_mv_readl_relaxed_t (const volatile void __iomem *);
88 typedef unsigned long ia64_mv_readq_relaxed_t (const volatile void __iomem *);
90 typedef int ia64_mv_setup_msi_irq_t (struct pci_dev *pdev, struct msi_desc *);
91 typedef void ia64_mv_teardown_msi_irq_t (unsigned int irq);
93 static inline void
94 machvec_noop (void)
98 static inline void
99 machvec_noop_mm (struct mm_struct *mm)
103 static inline void
104 machvec_noop_task (struct task_struct *task)
108 static inline void
109 machvec_noop_bus (struct pci_bus *bus)
113 extern void machvec_setup (char **);
114 extern void machvec_timer_interrupt (int, void *);
115 extern void machvec_dma_sync_single (struct device *, dma_addr_t, size_t, int);
116 extern void machvec_dma_sync_sg (struct device *, struct scatterlist *, int, int);
117 extern void machvec_tlb_migrate_finish (struct mm_struct *);
119 # if defined (CONFIG_IA64_HP_SIM)
120 # include <asm/machvec_hpsim.h>
121 # elif defined (CONFIG_IA64_DIG)
122 # include <asm/machvec_dig.h>
123 # elif defined (CONFIG_IA64_HP_ZX1)
124 # include <asm/machvec_hpzx1.h>
125 # elif defined (CONFIG_IA64_HP_ZX1_SWIOTLB)
126 # include <asm/machvec_hpzx1_swiotlb.h>
127 # elif defined (CONFIG_IA64_SGI_SN2)
128 # include <asm/machvec_sn2.h>
129 # elif defined (CONFIG_IA64_SGI_UV)
130 # include <asm/machvec_uv.h>
131 # elif defined (CONFIG_IA64_GENERIC)
133 # ifdef MACHVEC_PLATFORM_HEADER
134 # include MACHVEC_PLATFORM_HEADER
135 # else
136 # define platform_name ia64_mv.name
137 # define platform_setup ia64_mv.setup
138 # define platform_cpu_init ia64_mv.cpu_init
139 # define platform_irq_init ia64_mv.irq_init
140 # define platform_send_ipi ia64_mv.send_ipi
141 # define platform_timer_interrupt ia64_mv.timer_interrupt
142 # define platform_global_tlb_purge ia64_mv.global_tlb_purge
143 # define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish
144 # define platform_dma_init ia64_mv.dma_init
145 # define platform_dma_alloc_coherent ia64_mv.dma_alloc_coherent
146 # define platform_dma_free_coherent ia64_mv.dma_free_coherent
147 # define platform_dma_map_single_attrs ia64_mv.dma_map_single_attrs
148 # define platform_dma_unmap_single_attrs ia64_mv.dma_unmap_single_attrs
149 # define platform_dma_map_sg_attrs ia64_mv.dma_map_sg_attrs
150 # define platform_dma_unmap_sg_attrs ia64_mv.dma_unmap_sg_attrs
151 # define platform_dma_sync_single_for_cpu ia64_mv.dma_sync_single_for_cpu
152 # define platform_dma_sync_sg_for_cpu ia64_mv.dma_sync_sg_for_cpu
153 # define platform_dma_sync_single_for_device ia64_mv.dma_sync_single_for_device
154 # define platform_dma_sync_sg_for_device ia64_mv.dma_sync_sg_for_device
155 # define platform_dma_mapping_error ia64_mv.dma_mapping_error
156 # define platform_dma_supported ia64_mv.dma_supported
157 # define platform_irq_to_vector ia64_mv.irq_to_vector
158 # define platform_local_vector_to_irq ia64_mv.local_vector_to_irq
159 # define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem
160 # define platform_pci_legacy_read ia64_mv.pci_legacy_read
161 # define platform_pci_legacy_write ia64_mv.pci_legacy_write
162 # define platform_inb ia64_mv.inb
163 # define platform_inw ia64_mv.inw
164 # define platform_inl ia64_mv.inl
165 # define platform_outb ia64_mv.outb
166 # define platform_outw ia64_mv.outw
167 # define platform_outl ia64_mv.outl
168 # define platform_mmiowb ia64_mv.mmiowb
169 # define platform_readb ia64_mv.readb
170 # define platform_readw ia64_mv.readw
171 # define platform_readl ia64_mv.readl
172 # define platform_readq ia64_mv.readq
173 # define platform_readb_relaxed ia64_mv.readb_relaxed
174 # define platform_readw_relaxed ia64_mv.readw_relaxed
175 # define platform_readl_relaxed ia64_mv.readl_relaxed
176 # define platform_readq_relaxed ia64_mv.readq_relaxed
177 # define platform_migrate ia64_mv.migrate
178 # define platform_setup_msi_irq ia64_mv.setup_msi_irq
179 # define platform_teardown_msi_irq ia64_mv.teardown_msi_irq
180 # define platform_pci_fixup_bus ia64_mv.pci_fixup_bus
181 # define platform_kernel_launch_event ia64_mv.kernel_launch_event
182 # endif
184 /* __attribute__((__aligned__(16))) is required to make size of the
185 * structure multiple of 16 bytes.
186 * This will fillup the holes created because of section 3.3.1 in
187 * Software Conventions guide.
189 struct ia64_machine_vector {
190 const char *name;
191 ia64_mv_setup_t *setup;
192 ia64_mv_cpu_init_t *cpu_init;
193 ia64_mv_irq_init_t *irq_init;
194 ia64_mv_send_ipi_t *send_ipi;
195 ia64_mv_timer_interrupt_t *timer_interrupt;
196 ia64_mv_global_tlb_purge_t *global_tlb_purge;
197 ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish;
198 ia64_mv_dma_init *dma_init;
199 ia64_mv_dma_alloc_coherent *dma_alloc_coherent;
200 ia64_mv_dma_free_coherent *dma_free_coherent;
201 ia64_mv_dma_map_single_attrs *dma_map_single_attrs;
202 ia64_mv_dma_unmap_single_attrs *dma_unmap_single_attrs;
203 ia64_mv_dma_map_sg_attrs *dma_map_sg_attrs;
204 ia64_mv_dma_unmap_sg_attrs *dma_unmap_sg_attrs;
205 ia64_mv_dma_sync_single_for_cpu *dma_sync_single_for_cpu;
206 ia64_mv_dma_sync_sg_for_cpu *dma_sync_sg_for_cpu;
207 ia64_mv_dma_sync_single_for_device *dma_sync_single_for_device;
208 ia64_mv_dma_sync_sg_for_device *dma_sync_sg_for_device;
209 ia64_mv_dma_mapping_error *dma_mapping_error;
210 ia64_mv_dma_supported *dma_supported;
211 ia64_mv_irq_to_vector *irq_to_vector;
212 ia64_mv_local_vector_to_irq *local_vector_to_irq;
213 ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem;
214 ia64_mv_pci_legacy_read_t *pci_legacy_read;
215 ia64_mv_pci_legacy_write_t *pci_legacy_write;
216 ia64_mv_inb_t *inb;
217 ia64_mv_inw_t *inw;
218 ia64_mv_inl_t *inl;
219 ia64_mv_outb_t *outb;
220 ia64_mv_outw_t *outw;
221 ia64_mv_outl_t *outl;
222 ia64_mv_mmiowb_t *mmiowb;
223 ia64_mv_readb_t *readb;
224 ia64_mv_readw_t *readw;
225 ia64_mv_readl_t *readl;
226 ia64_mv_readq_t *readq;
227 ia64_mv_readb_relaxed_t *readb_relaxed;
228 ia64_mv_readw_relaxed_t *readw_relaxed;
229 ia64_mv_readl_relaxed_t *readl_relaxed;
230 ia64_mv_readq_relaxed_t *readq_relaxed;
231 ia64_mv_migrate_t *migrate;
232 ia64_mv_setup_msi_irq_t *setup_msi_irq;
233 ia64_mv_teardown_msi_irq_t *teardown_msi_irq;
234 ia64_mv_pci_fixup_bus_t *pci_fixup_bus;
235 ia64_mv_kernel_launch_event_t *kernel_launch_event;
236 } __attribute__((__aligned__(16))); /* align attrib? see above comment */
238 #define MACHVEC_INIT(name) \
240 #name, \
241 platform_setup, \
242 platform_cpu_init, \
243 platform_irq_init, \
244 platform_send_ipi, \
245 platform_timer_interrupt, \
246 platform_global_tlb_purge, \
247 platform_tlb_migrate_finish, \
248 platform_dma_init, \
249 platform_dma_alloc_coherent, \
250 platform_dma_free_coherent, \
251 platform_dma_map_single_attrs, \
252 platform_dma_unmap_single_attrs, \
253 platform_dma_map_sg_attrs, \
254 platform_dma_unmap_sg_attrs, \
255 platform_dma_sync_single_for_cpu, \
256 platform_dma_sync_sg_for_cpu, \
257 platform_dma_sync_single_for_device, \
258 platform_dma_sync_sg_for_device, \
259 platform_dma_mapping_error, \
260 platform_dma_supported, \
261 platform_irq_to_vector, \
262 platform_local_vector_to_irq, \
263 platform_pci_get_legacy_mem, \
264 platform_pci_legacy_read, \
265 platform_pci_legacy_write, \
266 platform_inb, \
267 platform_inw, \
268 platform_inl, \
269 platform_outb, \
270 platform_outw, \
271 platform_outl, \
272 platform_mmiowb, \
273 platform_readb, \
274 platform_readw, \
275 platform_readl, \
276 platform_readq, \
277 platform_readb_relaxed, \
278 platform_readw_relaxed, \
279 platform_readl_relaxed, \
280 platform_readq_relaxed, \
281 platform_migrate, \
282 platform_setup_msi_irq, \
283 platform_teardown_msi_irq, \
284 platform_pci_fixup_bus, \
285 platform_kernel_launch_event \
288 extern struct ia64_machine_vector ia64_mv;
289 extern void machvec_init (const char *name);
290 extern void machvec_init_from_cmdline(const char *cmdline);
292 # else
293 # error Unknown configuration. Update asm-ia64/machvec.h.
294 # endif /* CONFIG_IA64_GENERIC */
297 * Declare default routines which aren't declared anywhere else:
299 extern ia64_mv_dma_init swiotlb_init;
300 extern ia64_mv_dma_alloc_coherent swiotlb_alloc_coherent;
301 extern ia64_mv_dma_free_coherent swiotlb_free_coherent;
302 extern ia64_mv_dma_map_single swiotlb_map_single;
303 extern ia64_mv_dma_map_single_attrs swiotlb_map_single_attrs;
304 extern ia64_mv_dma_unmap_single swiotlb_unmap_single;
305 extern ia64_mv_dma_unmap_single_attrs swiotlb_unmap_single_attrs;
306 extern ia64_mv_dma_map_sg swiotlb_map_sg;
307 extern ia64_mv_dma_map_sg_attrs swiotlb_map_sg_attrs;
308 extern ia64_mv_dma_unmap_sg swiotlb_unmap_sg;
309 extern ia64_mv_dma_unmap_sg_attrs swiotlb_unmap_sg_attrs;
310 extern ia64_mv_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu;
311 extern ia64_mv_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu;
312 extern ia64_mv_dma_sync_single_for_device swiotlb_sync_single_for_device;
313 extern ia64_mv_dma_sync_sg_for_device swiotlb_sync_sg_for_device;
314 extern ia64_mv_dma_mapping_error swiotlb_dma_mapping_error;
315 extern ia64_mv_dma_supported swiotlb_dma_supported;
318 * Define default versions so we can extend machvec for new platforms without having
319 * to update the machvec files for all existing platforms.
321 #ifndef platform_setup
322 # define platform_setup machvec_setup
323 #endif
324 #ifndef platform_cpu_init
325 # define platform_cpu_init machvec_noop
326 #endif
327 #ifndef platform_irq_init
328 # define platform_irq_init machvec_noop
329 #endif
331 #ifndef platform_send_ipi
332 # define platform_send_ipi ia64_send_ipi /* default to architected version */
333 #endif
334 #ifndef platform_timer_interrupt
335 # define platform_timer_interrupt machvec_timer_interrupt
336 #endif
337 #ifndef platform_global_tlb_purge
338 # define platform_global_tlb_purge ia64_global_tlb_purge /* default to architected version */
339 #endif
340 #ifndef platform_tlb_migrate_finish
341 # define platform_tlb_migrate_finish machvec_noop_mm
342 #endif
343 #ifndef platform_kernel_launch_event
344 # define platform_kernel_launch_event machvec_noop
345 #endif
346 #ifndef platform_dma_init
347 # define platform_dma_init swiotlb_init
348 #endif
349 #ifndef platform_dma_alloc_coherent
350 # define platform_dma_alloc_coherent swiotlb_alloc_coherent
351 #endif
352 #ifndef platform_dma_free_coherent
353 # define platform_dma_free_coherent swiotlb_free_coherent
354 #endif
355 #ifndef platform_dma_map_single_attrs
356 # define platform_dma_map_single_attrs swiotlb_map_single_attrs
357 #endif
358 #ifndef platform_dma_unmap_single_attrs
359 # define platform_dma_unmap_single_attrs swiotlb_unmap_single_attrs
360 #endif
361 #ifndef platform_dma_map_sg_attrs
362 # define platform_dma_map_sg_attrs swiotlb_map_sg_attrs
363 #endif
364 #ifndef platform_dma_unmap_sg_attrs
365 # define platform_dma_unmap_sg_attrs swiotlb_unmap_sg_attrs
366 #endif
367 #ifndef platform_dma_sync_single_for_cpu
368 # define platform_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu
369 #endif
370 #ifndef platform_dma_sync_sg_for_cpu
371 # define platform_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu
372 #endif
373 #ifndef platform_dma_sync_single_for_device
374 # define platform_dma_sync_single_for_device swiotlb_sync_single_for_device
375 #endif
376 #ifndef platform_dma_sync_sg_for_device
377 # define platform_dma_sync_sg_for_device swiotlb_sync_sg_for_device
378 #endif
379 #ifndef platform_dma_mapping_error
380 # define platform_dma_mapping_error swiotlb_dma_mapping_error
381 #endif
382 #ifndef platform_dma_supported
383 # define platform_dma_supported swiotlb_dma_supported
384 #endif
385 #ifndef platform_irq_to_vector
386 # define platform_irq_to_vector __ia64_irq_to_vector
387 #endif
388 #ifndef platform_local_vector_to_irq
389 # define platform_local_vector_to_irq __ia64_local_vector_to_irq
390 #endif
391 #ifndef platform_pci_get_legacy_mem
392 # define platform_pci_get_legacy_mem ia64_pci_get_legacy_mem
393 #endif
394 #ifndef platform_pci_legacy_read
395 # define platform_pci_legacy_read ia64_pci_legacy_read
396 extern int ia64_pci_legacy_read(struct pci_bus *bus, u16 port, u32 *val, u8 size);
397 #endif
398 #ifndef platform_pci_legacy_write
399 # define platform_pci_legacy_write ia64_pci_legacy_write
400 extern int ia64_pci_legacy_write(struct pci_bus *bus, u16 port, u32 val, u8 size);
401 #endif
402 #ifndef platform_inb
403 # define platform_inb __ia64_inb
404 #endif
405 #ifndef platform_inw
406 # define platform_inw __ia64_inw
407 #endif
408 #ifndef platform_inl
409 # define platform_inl __ia64_inl
410 #endif
411 #ifndef platform_outb
412 # define platform_outb __ia64_outb
413 #endif
414 #ifndef platform_outw
415 # define platform_outw __ia64_outw
416 #endif
417 #ifndef platform_outl
418 # define platform_outl __ia64_outl
419 #endif
420 #ifndef platform_mmiowb
421 # define platform_mmiowb __ia64_mmiowb
422 #endif
423 #ifndef platform_readb
424 # define platform_readb __ia64_readb
425 #endif
426 #ifndef platform_readw
427 # define platform_readw __ia64_readw
428 #endif
429 #ifndef platform_readl
430 # define platform_readl __ia64_readl
431 #endif
432 #ifndef platform_readq
433 # define platform_readq __ia64_readq
434 #endif
435 #ifndef platform_readb_relaxed
436 # define platform_readb_relaxed __ia64_readb_relaxed
437 #endif
438 #ifndef platform_readw_relaxed
439 # define platform_readw_relaxed __ia64_readw_relaxed
440 #endif
441 #ifndef platform_readl_relaxed
442 # define platform_readl_relaxed __ia64_readl_relaxed
443 #endif
444 #ifndef platform_readq_relaxed
445 # define platform_readq_relaxed __ia64_readq_relaxed
446 #endif
447 #ifndef platform_migrate
448 # define platform_migrate machvec_noop_task
449 #endif
450 #ifndef platform_setup_msi_irq
451 # define platform_setup_msi_irq ((ia64_mv_setup_msi_irq_t*)NULL)
452 #endif
453 #ifndef platform_teardown_msi_irq
454 # define platform_teardown_msi_irq ((ia64_mv_teardown_msi_irq_t*)NULL)
455 #endif
456 #ifndef platform_pci_fixup_bus
457 # define platform_pci_fixup_bus machvec_noop_bus
458 #endif
460 #endif /* _ASM_IA64_MACHVEC_H */