1 | /* ----------------------------------------------------------------------- * |
2 | * |
3 | * Copyright 1996-2017 The NASM Authors - All Rights Reserved |
4 | * See the file AUTHORS included with the NASM distribution for |
5 | * the specific copyright holders. |
6 | * |
7 | * Redistribution and use in source and binary forms, with or without |
8 | * modification, are permitted provided that the following |
9 | * conditions are met: |
10 | * |
11 | * * Redistributions of source code must retain the above copyright |
12 | * notice, this list of conditions and the following disclaimer. |
13 | * * Redistributions in binary form must reproduce the above |
14 | * copyright notice, this list of conditions and the following |
15 | * disclaimer in the documentation and/or other materials provided |
16 | * with the distribution. |
17 | * |
18 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND |
19 | * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, |
20 | * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
21 | * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
22 | * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR |
23 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
24 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
25 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
26 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) |
27 | * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
28 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
29 | * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, |
30 | * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
31 | * |
32 | * ----------------------------------------------------------------------- */ |
33 | |
34 | /* |
35 | * bytesex.h - byte order helper functions |
36 | * |
37 | * In this function, be careful about getting X86_MEMORY versus |
38 | * LITTLE_ENDIAN correct: X86_MEMORY also means we are allowed to |
39 | * do unaligned memory references, and is probabilistic. |
40 | */ |
41 | |
42 | #ifndef NASM_BYTEORD_H |
43 | #define NASM_BYTEORD_H |
44 | |
45 | #include "compiler.h" |
46 | |
47 | /* |
48 | * Some handy macros that will probably be of use in more than one |
49 | * output format: convert integers into little-endian byte packed |
50 | * format in memory. |
51 | */ |
52 | |
53 | #define WRITECHAR(p,v) \ |
54 | do { \ |
55 | uint8_t *_wc_p = (uint8_t *)(p); \ |
56 | *_wc_p++ = (v); \ |
57 | (p) = (void *)_wc_p; \ |
58 | } while (0) |
59 | |
60 | #if X86_MEMORY |
61 | |
62 | #define WRITESHORT(p,v) \ |
63 | do { \ |
64 | uint16_t *_ws_p = (uint16_t *)(p); \ |
65 | *_ws_p++ = (v); \ |
66 | (p) = (void *)_ws_p; \ |
67 | } while (0) |
68 | |
69 | #define WRITELONG(p,v) \ |
70 | do { \ |
71 | uint32_t *_wl_p = (uint32_t *)(p); \ |
72 | *_wl_p++ = (v); \ |
73 | (p) = (void *)_wl_p; \ |
74 | } while (0) |
75 | |
76 | #define WRITEDLONG(p,v) \ |
77 | do { \ |
78 | uint64_t *_wq_p = (uint64_t *)(p); \ |
79 | *_wq_p++ = (v); \ |
80 | (p) = (void *)_wq_p; \ |
81 | } while (0) |
82 | |
83 | #else /* !X86_MEMORY */ |
84 | |
85 | #define WRITESHORT(p,v) \ |
86 | do { \ |
87 | uint8_t *_ws_p = (uint8_t *)(p); \ |
88 | const uint16_t _ws_v = (v); \ |
89 | WRITECHAR(_ws_p, _ws_v); \ |
90 | WRITECHAR(_ws_p, _ws_v >> 8); \ |
91 | (p) = (void *)_ws_p; \ |
92 | } while (0) |
93 | |
94 | #define WRITELONG(p,v) \ |
95 | do { \ |
96 | uint8_t *_wl_p = (uint8_t *)(p); \ |
97 | const uint32_t _wl_v = (v); \ |
98 | WRITESHORT(_wl_p, _wl_v); \ |
99 | WRITESHORT(_wl_p, _wl_v >> 16); \ |
100 | (p) = (void *)_wl_p; \ |
101 | } while (0) |
102 | |
103 | #define WRITEDLONG(p,v) \ |
104 | do { \ |
105 | uint8_t *_wq_p = (uint8_t *)(p); \ |
106 | const uint64_t _wq_v = (v); \ |
107 | WRITELONG(_wq_p, _wq_v); \ |
108 | WRITELONG(_wq_p, _wq_v >> 32); \ |
109 | (p) = (void *)_wq_p; \ |
110 | } while (0) |
111 | |
112 | #endif /* X86_MEMORY */ |
113 | |
114 | /* |
115 | * Endian control functions which work on a single integer |
116 | */ |
117 | #ifdef WORDS_LITTLEENDIAN |
118 | |
119 | #ifndef HAVE_CPU_TO_LE16 |
120 | # define cpu_to_le16(v) ((uint16_t)(v)) |
121 | #endif |
122 | #ifndef HAVE_CPU_TO_LE32 |
123 | # define cpu_to_le32(v) ((uint32_t)(v)) |
124 | #endif |
125 | #ifndef HAVE_CPU_TO_LE64 |
126 | # define cpu_to_le64(v) ((uint64_t)(v)) |
127 | #endif |
128 | |
129 | #elif defined(WORDS_BIGENDIAN) |
130 | |
131 | #ifndef HAVE_CPU_TO_LE16 |
132 | static inline uint16_t cpu_to_le16(uint16_t v) |
133 | { |
134 | # ifdef HAVE___CPU_TO_LE16 |
135 | return __cpu_to_le16(v); |
136 | # elif defined(HAVE_HTOLE16) |
137 | return htole16(v); |
138 | # elif defined(HAVE___BSWAP_16) |
139 | return __bswap_16(v); |
140 | # elif defined(HAVE___BUILTIN_BSWAP16) |
141 | return __builtin_bswap16(v); |
142 | # elif defined(HAVE__BYTESWAP_USHORT) && (USHRT_MAX == 0xffffU) |
143 | return _byteswap_ushort(v); |
144 | # else |
145 | return (v << 8) | (v >> 8); |
146 | # endif |
147 | } |
148 | #endif |
149 | |
150 | #ifndef HAVE_CPU_TO_LE32 |
151 | static inline uint32_t cpu_to_le32(uint32_t v) |
152 | { |
153 | # ifdef HAVE___CPU_TO_LE32 |
154 | return __cpu_to_le32(v); |
155 | # elif defined(HAVE_HTOLE32) |
156 | return htole32(v); |
157 | # elif defined(HAVE___BSWAP_32) |
158 | return __bswap_32(v); |
159 | # elif defined(HAVE___BUILTIN_BSWAP32) |
160 | return __builtin_bswap32(v); |
161 | # elif defined(HAVE__BYTESWAP_ULONG) && (ULONG_MAX == 0xffffffffUL) |
162 | return _byteswap_ulong(v); |
163 | # else |
164 | v = ((v << 8) & 0xff00ff00 ) | |
165 | ((v >> 8) & 0x00ff00ff); |
166 | return (v << 16) | (v >> 16); |
167 | # endif |
168 | } |
169 | #endif |
170 | |
171 | #ifndef HAVE_CPU_TO_LE64 |
172 | static inline uint64_t cpu_to_le64(uint64_t v) |
173 | { |
174 | # ifdef HAVE___CPU_TO_LE64 |
175 | return __cpu_to_le64(v); |
176 | # elif defined(HAVE_HTOLE64) |
177 | return htole64(v); |
178 | # elif defined(HAVE___BSWAP_64) |
179 | return __bswap_64(v); |
180 | # elif defined(HAVE___BUILTIN_BSWAP64) |
181 | return __builtin_bswap64(v); |
182 | # elif defined(HAVE__BYTESWAP_UINT64) |
183 | return _byteswap_uint64(v); |
184 | # else |
185 | v = ((v << 8) & 0xff00ff00ff00ff00ull) | |
186 | ((v >> 8) & 0x00ff00ff00ff00ffull); |
187 | v = ((v << 16) & 0xffff0000ffff0000ull) | |
188 | ((v >> 16) & 0x0000ffff0000ffffull); |
189 | return (v << 32) | (v >> 32); |
190 | # endif |
191 | } |
192 | #endif |
193 | |
194 | #else /* not WORDS_LITTLEENDIAN or WORDS_BIGENDIAN */ |
195 | |
196 | static inline uint16_t cpu_to_le16(uint16_t v) |
197 | { |
198 | union u16 { |
199 | uint16_t v; |
200 | uint8_t c[2]; |
201 | } x; |
202 | uint8_t *cp = &x.c; |
203 | |
204 | WRITESHORT(cp, v); |
205 | return x.v; |
206 | } |
207 | |
208 | static inline uint32_t cpu_to_le32(uint32_t v) |
209 | { |
210 | union u32 { |
211 | uint32_t v; |
212 | uint8_t c[4]; |
213 | } x; |
214 | uint8_t *cp = &x.c; |
215 | |
216 | WRITELONG(cp, v); |
217 | return x.v; |
218 | } |
219 | |
220 | static inline uint64_t cpu_to_le64(uint64_t v) |
221 | { |
222 | union u64 { |
223 | uint64_t v; |
224 | uint8_t c[8]; |
225 | } x; |
226 | uint8_t *cp = &x.c; |
227 | |
228 | WRITEDLONG(cp, v); |
229 | return x.v; |
230 | } |
231 | |
232 | #endif |
233 | |
234 | #define WRITEADDR(p,v,s) \ |
235 | do { \ |
236 | switch (is_constant(s) ? (s) : 0) { \ |
237 | case 1: \ |
238 | WRITECHAR(p,v); \ |
239 | break; \ |
240 | case 2: \ |
241 | WRITESHORT(p,v); \ |
242 | break; \ |
243 | case 4: \ |
244 | WRITELONG(p,v); \ |
245 | break; \ |
246 | case 8: \ |
247 | WRITEDLONG(p,v); \ |
248 | break; \ |
249 | default: \ |
250 | { \ |
251 | const uint64_t _wa_v = cpu_to_le64(v); \ |
252 | const size_t _wa_s = (s); \ |
253 | uint8_t * const _wa_p = (uint8_t *)(p); \ |
254 | memcpy(_wa_p, &_wa_v, _wa_s); \ |
255 | (p) = (void *)(_wa_p + _wa_s); \ |
256 | } \ |
257 | break; \ |
258 | } \ |
259 | } while (0) |
260 | |
261 | #endif /* NASM_BYTESEX_H */ |
262 | |