1 /* ----------------------------------------------------------------------- *
3 * Copyright 1996-2017 The NASM Authors - All Rights Reserved
4 * See the file AUTHORS included with the NASM distribution for
5 * the specific copyright holders.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above
14 * copyright notice, this list of conditions and the following
15 * disclaimer in the documentation and/or other materials provided
16 * with the distribution.
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
19 * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
20 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
25 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
26 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
27 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
29 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
30 * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 * ----------------------------------------------------------------------- */
35 * bytesex.h - byte order helper functions
37 * In this function, be careful about getting X86_MEMORY versus
38 * LITTLE_ENDIAN correct: X86_MEMORY also means we are allowed to
39 * do unaligned memory references, and is probabilistic.
42 #ifndef NASM_BYTEORD_H
43 #define NASM_BYTEORD_H
48 * Some handy macros that will probably be of use in more than one
49 * output format: convert integers into little-endian byte packed
53 #define WRITECHAR(p,v) \
55 uint8_t *_wc_p = (uint8_t *)(p); \
57 (p) = (void *)_wc_p; \
62 #define WRITESHORT(p,v) \
64 uint16_t *_ws_p = (uint16_t *)(p); \
66 (p) = (void *)_ws_p; \
69 #define WRITELONG(p,v) \
71 uint32_t *_wl_p = (uint32_t *)(p); \
73 (p) = (void *)_wl_p; \
76 #define WRITEDLONG(p,v) \
78 uint64_t *_wq_p = (uint64_t *)(p); \
80 (p) = (void *)_wq_p; \
83 #else /* !X86_MEMORY */
85 #define WRITESHORT(p,v) \
87 uint8_t *_ws_p = (uint8_t *)(p); \
88 const uint16_t _ws_v = (v); \
89 WRITECHAR(_ws_p, _ws_v); \
90 WRITECHAR(_ws_p, _ws_v >> 8); \
91 (p) = (void *)_ws_p; \
94 #define WRITELONG(p,v) \
96 uint8_t *_wl_p = (uint8_t *)(p); \
97 const uint32_t _wl_v = (v); \
98 WRITESHORT(_wl_p, _wl_v); \
99 WRITESHORT(_wl_p, _wl_v >> 16); \
100 (p) = (void *)_wl_p; \
103 #define WRITEDLONG(p,v) \
105 uint8_t *_wq_p = (uint8_t *)(p); \
106 const uint64_t _wq_v = (v); \
107 WRITELONG(_wq_p, _wq_v); \
108 WRITELONG(_wq_p, _wq_v >> 32); \
109 (p) = (void *)_wq_p; \
112 #endif /* X86_MEMORY */
115 * Endian control functions which work on a single integer
117 #ifdef WORDS_LITTLEENDIAN
119 #ifndef HAVE_CPU_TO_LE16
120 # define cpu_to_le16(v) ((uint16_t)(v))
122 #ifndef HAVE_CPU_TO_LE32
123 # define cpu_to_le32(v) ((uint32_t)(v))
125 #ifndef HAVE_CPU_TO_LE64
126 # define cpu_to_le64(v) ((uint64_t)(v))
129 #elif defined(WORDS_BIGENDIAN)
131 #ifndef HAVE_CPU_TO_LE16
132 static inline uint16_t cpu_to_le16(uint16_t v
)
134 # ifdef HAVE___CPU_TO_LE16
135 return __cpu_to_le16(v
);
136 # elif defined(HAVE_HTOLE16)
138 # elif defined(HAVE___BSWAP_16)
139 return __bswap_16(v
);
140 # elif defined(HAVE___BUILTIN_BSWAP16)
141 return __builtin_bswap16(v
);
142 # elif defined(HAVE__BYTESWAP_USHORT) && (USHRT_MAX == 0xffffU)
143 return _byteswap_ushort(v
);
145 return (v
<< 8) | (v
>> 8);
150 #ifndef HAVE_CPU_TO_LE32
151 static inline uint32_t cpu_to_le32(uint32_t v
)
153 # ifdef HAVE___CPU_TO_LE32
154 return __cpu_to_le32(v
);
155 # elif defined(HAVE_HTOLE32)
157 # elif defined(HAVE___BSWAP_32)
158 return __bswap_32(v
);
159 # elif defined(HAVE___BUILTIN_BSWAP32)
160 return __builtin_bswap32(v
);
161 # elif defined(HAVE__BYTESWAP_ULONG) && (ULONG_MAX == 0xffffffffUL)
162 return _byteswap_ulong(v
);
164 v
= ((v
<< 8) & 0xff00ff00 ) |
165 ((v
>> 8) & 0x00ff00ff);
166 return (v
<< 16) | (v
>> 16);
171 #ifndef HAVE_CPU_TO_LE64
172 static inline uint64_t cpu_to_le64(uint64_t v
)
174 # ifdef HAVE___CPU_TO_LE64
175 return __cpu_to_le64(v
);
176 # elif defined(HAVE_HTOLE64)
178 # elif defined(HAVE___BSWAP_64)
179 return __bswap_64(v
);
180 # elif defined(HAVE___BUILTIN_BSWAP64)
181 return __builtin_bswap64(v
);
182 # elif defined(HAVE__BYTESWAP_UINT64)
183 return _byteswap_uint64(v
);
185 v
= ((v
<< 8) & 0xff00ff00ff00ff00ull
) |
186 ((v
>> 8) & 0x00ff00ff00ff00ffull
);
187 v
= ((v
<< 16) & 0xffff0000ffff0000ull
) |
188 ((v
>> 16) & 0x0000ffff0000ffffull
);
189 return (v
<< 32) | (v
>> 32);
194 #else /* not WORDS_LITTLEENDIAN or WORDS_BIGENDIAN */
196 static inline uint16_t cpu_to_le16(uint16_t v
)
208 static inline uint32_t cpu_to_le32(uint32_t v
)
220 static inline uint64_t cpu_to_le64(uint64_t v
)
234 #define WRITEADDR(p,v,s) \
236 switch (is_constant(s) ? (s) : 0) { \
251 const uint64_t _wa_v = cpu_to_le64(v); \
252 const size_t _wa_s = (s); \
253 uint8_t * const _wa_p = (uint8_t *)(p); \
254 memcpy(_wa_p, &_wa_v, _wa_s); \
255 (p) = (void *)(_wa_p + _wa_s); \
261 #endif /* NASM_BYTESEX_H */