1 /***************************************************************************
3 * Open \______ \ ____ ____ | | _\_ |__ _______ ___
4 * Source | _// _ \_/ ___\| |/ /| __ \ / _ \ \/ /
5 * Jukebox | | ( <_> ) \___| < | \_\ ( <_> > < <
6 * Firmware |____|_ /\____/ \___ >__|_ \|___ /\____/__/\_ \
10 * Copyright (C) 2002 by Alan Korr
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation; either version 2
15 * of the License, or (at your option) any later version.
17 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
18 * KIND, either express or implied.
20 ****************************************************************************/
28 #include "gcc_extensions.h" /* for LIKELY/UNLIKELY */
30 extern void system_reboot (void);
31 /* Called from any UIE handler and panicf - wait for a key and return
32 * to reboot system. */
33 extern void system_exception_wait(void);
34 extern void system_init(void);
36 extern long cpu_frequency
;
44 bool detect_flashed_romimage(void);
45 bool detect_flashed_ramimage(void);
46 bool detect_original_firmware(void);
48 #if defined(HAVE_ADJUSTABLE_CPU_FREQ) \
49 && defined(ROCKBOX_HAS_LOGF) && (NUM_CORES == 1)
50 #define CPU_BOOST_LOGGING
53 #ifdef HAVE_ADJUSTABLE_CPU_FREQ
55 extern struct spinlock boostctrl_spin
;
57 void cpu_boost_init(void);
58 #define FREQ cpu_frequency
59 void set_cpu_frequency(long frequency
);
60 #ifdef CPU_BOOST_LOGGING
61 char * cpu_boost_log_getlog_first(void);
62 char * cpu_boost_log_getlog_next(void);
63 int cpu_boost_log_getcount(void);
64 void cpu_boost_(bool on_off
, char* location
, int line
);
66 void cpu_boost(bool on_off
);
68 void cpu_idle_mode(bool on_off
);
69 int get_cpu_boost_counter(void);
70 #else /* ndef HAVE_ADJUSTABLE_CPU_FREQ */
74 #define set_cpu_frequency(frequency)
75 #define cpu_boost(on_off)
76 #define cpu_boost_id(on_off, id)
77 #define cpu_idle_mode(on_off)
78 #define get_cpu_boost_counter()
79 #define get_cpu_boost_tracker()
80 #endif /* HAVE_ADJUSTABLE_CPU_FREQ */
82 #ifdef CPU_BOOST_LOGGING
83 #define cpu_boost(on_off) cpu_boost_(on_off,__FILE__, __LINE__)
89 #define NULL ((void*)0)
93 #define MIN(a, b) (((a)<(b))?(a):(b))
97 #define MAX(a, b) (((a)>(b))?(a):(b))
100 /* return number of elements in array a */
101 #define ARRAYLEN(a) (sizeof(a)/sizeof((a)[0]))
103 /* return p incremented by specified number of bytes */
104 #define SKIPBYTES(p, count) ((typeof (p))((char *)(p) + (count)))
106 #define P2_M1(p2) ((1 << (p2))-1)
108 /* align up or down to nearest 2^p2 */
109 #define ALIGN_DOWN_P2(n, p2) ((n) & ~P2_M1(p2))
110 #define ALIGN_UP_P2(n, p2) ALIGN_DOWN_P2((n) + P2_M1(p2),p2)
112 /* align up or down to nearest integer multiple of a */
113 #define ALIGN_DOWN(n, a) ((typeof(n))((typeof(a))(n)/(a)*(a)))
114 #define ALIGN_UP(n, a) ALIGN_DOWN((n)+((a)-1),a)
116 /* align start and end of buffer to nearest integer multiple of a */
117 #define ALIGN_BUFFER(ptr,len,align) \
119 uintptr_t tmp_ptr1 = (uintptr_t)ptr; \
120 uintptr_t tmp_ptr2 = tmp_ptr1 + len;\
121 tmp_ptr1 = ALIGN_UP(tmp_ptr1,align); \
122 tmp_ptr2 = ALIGN_DOWN(tmp_ptr2,align); \
123 len = tmp_ptr2 - tmp_ptr1; \
124 ptr = (typeof(ptr))tmp_ptr1; \
127 #define PTR_ADD(ptr, x) ((typeof(ptr))((char*)(ptr) + (x)))
128 #define PTR_SUB(ptr, x) ((typeof(ptr))((char*)(ptr) - (x)))
130 /* newer? SDL includes endian.h, So we ignore it */
131 #if (CONFIG_PLATFORM & PLATFORM_HOSTED) || defined(__PCTOOL__)
142 /* Android NDK contains swap16 and swap32, ignore them */
143 #if (CONFIG_PLATFORM & PLATFORM_ANDROID)
148 /* Get the byte offset of a type's member */
149 #define OFFSETOF(type, membername) ((off_t)&((type *)0)->membername)
151 /* Get the type pointer from one of its members */
152 #define TYPE_FROM_MEMBER(type, memberptr, membername) \
153 ((type *)((intptr_t)(memberptr) - OFFSETOF(type, membername)))
155 /* returns index of first set bit or 32 if no bits are set */
156 int find_first_set_bit(uint32_t val
);
158 static inline __attribute__((always_inline
))
159 uint32_t isolate_first_bit(uint32_t val
)
160 { return val
& -val
; }
162 /* Functions to set and clear register or variable bits atomically */
163 void bitmod16(volatile uint16_t *addr
, uint16_t bits
, uint16_t mask
);
164 void bitset16(volatile uint16_t *addr
, uint16_t mask
);
165 void bitclr16(volatile uint16_t *addr
, uint16_t mask
);
167 void bitmod32(volatile uint32_t *addr
, uint32_t bits
, uint32_t mask
);
168 void bitset32(volatile uint32_t *addr
, uint32_t mask
);
169 void bitclr32(volatile uint32_t *addr
, uint32_t mask
);
171 /* gcc 3.4 changed the format of the constraints */
172 #if (__GNUC__ >= 3) && (__GNUC_MINOR__ > 3) || (__GNUC__ >= 4)
173 #define I_CONSTRAINT "I08"
175 #define I_CONSTRAINT "I"
178 /* Utilize the user break controller to catch invalid memory accesses. */
179 int system_memory_guard(int newmode
);
182 MEMGUARD_KEEP
= -1, /* don't change the mode; for reading */
183 MEMGUARD_NONE
= 0, /* catch nothing */
184 MEMGUARD_FLASH_WRITES
, /* catch writes to area 02 (flash ROM) */
185 MEMGUARD_ZERO_AREA
, /* catch all accesses to areas 00 and 01 */
189 #if !defined(SIMULATOR) && !defined(__PCTOOL__)
190 #include "system-target.h"
191 #elif defined(HAVE_SDL) /* SDL build */
192 #include "system-sdl.h"
193 #define NEED_GENERIC_BYTESWAPS
194 #elif defined(__PCTOOL__)
195 #include "system-sdl.h"
196 #define NEED_GENERIC_BYTESWAPS
200 #ifdef NEED_GENERIC_BYTESWAPS
201 static inline uint16_t swap16_hw(uint16_t value
)
203 result[15..8] = value[ 7..0];
204 result[ 7..0] = value[15..8];
207 return (value
>> 8) | (value
<< 8);
210 static inline uint32_t swap32_hw(uint32_t value
)
212 result[31..24] = value[ 7.. 0];
213 result[23..16] = value[15.. 8];
214 result[15.. 8] = value[23..16];
215 result[ 7.. 0] = value[31..24];
218 uint32_t hi
= swap16_hw(value
>> 16);
219 uint32_t lo
= swap16_hw(value
& 0xffff);
220 return (lo
<< 16) | hi
;
223 static inline uint32_t swap_odd_even32_hw(uint32_t value
)
226 result[31..24],[15.. 8] = value[23..16],[ 7.. 0]
227 result[23..16],[ 7.. 0] = value[31..24],[15.. 8]
229 uint32_t t
= value
& 0xff00ff00;
230 return (t
>> 8) | ((t
^ value
) << 8);
233 static inline uint32_t swaw32_hw(uint32_t value
)
236 result[31..16] = value[15.. 0];
237 result[15.. 0] = value[31..16];
239 return (value
>> 16) | (value
<< 16);
242 #endif /* NEED_GENERIC_BYTESWAPS */
244 /* static endianness conversion */
245 #define SWAP16_CONST(x) \
246 ((typeof(x))( ((uint16_t)(x) >> 8) | ((uint16_t)(x) << 8) ))
248 #define SWAP32_CONST(x) \
249 ((typeof(x))( ((uint32_t)(x) >> 24) | \
250 (((uint32_t)(x) & 0xff0000) >> 8) | \
251 (((uint32_t)(x) & 0xff00) << 8) | \
252 ((uint32_t)(x) << 24) ))
254 #define SWAP_ODD_EVEN32_CONST(x) \
255 ((typeof(x))( ((uint32_t)SWAP16_CONST((uint32_t)(x) >> 16) << 16) | \
256 SWAP16_CONST((uint32_t)(x))) )
258 #define SWAW32_CONST(x) \
259 ((typeof(x))( ((uint32_t)(x) << 16) | ((uint32_t)(x) >> 16) ))
261 /* Select best method based upon whether x is a constant expression */
263 ( __builtin_constant_p(x) ? SWAP16_CONST(x) : (typeof(x))swap16_hw(x) )
266 ( __builtin_constant_p(x) ? SWAP32_CONST(x) : (typeof(x))swap32_hw(x) )
268 #define swap_odd_even32(x) \
269 ( __builtin_constant_p(x) ? SWAP_ODD_EVEN32_CONST(x) : (typeof(x))swap_odd_even32_hw(x) )
272 ( __builtin_constant_p(x) ? SWAW32_CONST(x) : (typeof(x))swaw32_hw(x) )
275 #ifdef ROCKBOX_LITTLE_ENDIAN
276 #define letoh16(x) (x)
277 #define letoh32(x) (x)
278 #define htole16(x) (x)
279 #define htole32(x) (x)
280 #define betoh16(x) swap16(x)
281 #define betoh32(x) swap32(x)
282 #define htobe16(x) swap16(x)
283 #define htobe32(x) swap32(x)
284 #define swap_odd_even_be32(x) (x)
285 #define swap_odd_even_le32(x) swap_odd_even32(x)
287 #define letoh16(x) swap16(x)
288 #define letoh32(x) swap32(x)
289 #define htole16(x) swap16(x)
290 #define htole32(x) swap32(x)
291 #define betoh16(x) (x)
292 #define betoh32(x) (x)
293 #define htobe16(x) (x)
294 #define htobe32(x) (x)
295 #define swap_odd_even_be32(x) swap_odd_even32(x)
296 #define swap_odd_even_le32(x) (x)
300 #define BIT_N(n) (1U << (n))
304 /* Make a mask of n contiguous bits, shifted left by 'shift' */
305 #define MASK_N(type, n, shift) \
306 ((type)((((type)1 << (n)) - (type)1) << (shift)))
309 /* Declare this as HIGHEST_IRQ_LEVEL if they don't differ */
310 #ifndef DISABLE_INTERRUPTS
311 #define DISABLE_INTERRUPTS HIGHEST_IRQ_LEVEL
314 /* Define this, if the CPU may take advantage of cache aligment. Is enabled
315 * for all ARM CPUs. */
317 #define HAVE_CPU_CACHE_ALIGN
320 /* Calculate CACHEALIGN_SIZE from CACHEALIGN_BITS */
321 #ifdef CACHEALIGN_SIZE
322 /* undefine, if defined. always calculate from CACHEALIGN_BITS */
323 #undef CACHEALIGN_SIZE
325 #ifdef CACHEALIGN_BITS
326 /* CACHEALIGN_SIZE = 2 ^ CACHEALIGN_BITS */
327 #define CACHEALIGN_SIZE (1u << CACHEALIGN_BITS)
329 /* FIXME: set to maximum known cache alignment of supported CPUs */
330 #define CACHEALIGN_BITS 5
331 #define CACHEALIGN_SIZE 32
334 #ifdef HAVE_CPU_CACHE_ALIGN
335 /* Cache alignment attributes and sizes are enabled */
336 #define CACHEALIGN_ATTR __attribute__((aligned(CACHEALIGN_SIZE)))
337 /* Aligns x up to a CACHEALIGN_SIZE boundary */
338 #define CACHEALIGN_UP(x) \
339 ((typeof (x))ALIGN_UP_P2((uintptr_t)(x), CACHEALIGN_BITS))
340 /* Aligns x down to a CACHEALIGN_SIZE boundary */
341 #define CACHEALIGN_DOWN(x) \
342 ((typeof (x))ALIGN_DOWN_P2((uintptr_t)(x), CACHEALIGN_BITS))
343 /* Aligns at least to the greater of size x or CACHEALIGN_SIZE */
344 #define CACHEALIGN_AT_LEAST_ATTR(x) \
345 __attribute__((aligned(CACHEALIGN_UP(x))))
346 /* Aligns a buffer pointer and size to proper boundaries */
347 #define CACHEALIGN_BUFFER(start, size) \
348 ALIGN_BUFFER((start), (size), CACHEALIGN_SIZE)
350 /* Cache alignment attributes and sizes are not enabled */
351 #define CACHEALIGN_ATTR
352 #define CACHEALIGN_AT_LEAST_ATTR(x) __attribute__((aligned(x)))
353 #define CACHEALIGN_UP(x) (x)
354 #define CACHEALIGN_DOWN(x) (x)
355 /* Make no adjustments */
356 #define CACHEALIGN_BUFFER(start, size)
359 /* Define MEM_ALIGN_ATTR which may be used to align e.g. buffers for faster
362 /* Use ARMs cache alignment. */
363 #define MEM_ALIGN_ATTR CACHEALIGN_ATTR
364 #define MEM_ALIGN_SIZE CACHEALIGN_SIZE
365 #elif defined(CPU_COLDFIRE)
366 /* Use fixed alignment of 16 bytes. Speed up only for 'movem' in DRAM. */
367 #define MEM_ALIGN_ATTR __attribute__((aligned(16)))
368 #define MEM_ALIGN_SIZE 16
370 /* Align pointer size */
371 #define MEM_ALIGN_ATTR __attribute__((aligned(sizeof(intptr_t))))
372 #define MEM_ALIGN_SIZE sizeof(intptr_t)
375 #define MEM_ALIGN_UP(x) \
376 ((typeof (x))ALIGN_UP((uintptr_t)(x), MEM_ALIGN_SIZE))
377 #define MEM_ALIGN_DOWN(x) \
378 ((typeof (x))ALIGN_DOWN((uintptr_t)(x), MEM_ALIGN_SIZE))
380 #ifdef STORAGE_WANTS_ALIGN
381 #define STORAGE_ALIGN_ATTR __attribute__((aligned(CACHEALIGN_SIZE)))
382 #define STORAGE_ALIGN_DOWN(x) \
383 ((typeof (x))ALIGN_DOWN_P2((uintptr_t)(x), CACHEALIGN_BITS))
384 /* Pad a size so the buffer can be aligned later */
385 #define STORAGE_PAD(x) ((x) + CACHEALIGN_SIZE - 1)
386 /* Number of bytes in the last cacheline assuming buffer of size x is aligned */
387 #define STORAGE_OVERLAP(x) ((x) & (CACHEALIGN_SIZE - 1))
388 #define STORAGE_ALIGN_BUFFER(start, size) \
389 ALIGN_BUFFER((start), (size), CACHEALIGN_SIZE)
391 #define STORAGE_ALIGN_ATTR
392 #define STORAGE_ALIGN_DOWN(x) (x)
393 #define STORAGE_PAD(x) (x)
394 #define STORAGE_OVERLAP(x) 0
395 #define STORAGE_ALIGN_BUFFER(start, size)
398 /* Double-cast to avoid 'dereferencing type-punned pointer will
399 * break strict aliasing rules' B.S. */
400 #define PUN_PTR(type, p) ((type)(intptr_t)(p))
403 bool dbg_ports(void);
405 #if (CONFIG_PLATFORM & PLATFORM_NATIVE)
406 bool dbg_hw_info(void);
409 #endif /* __SYSTEM_H__ */