blob: 47d2300177c0a1e618f95890b51077f2ab0a5c9c [file] [log] [blame]
Matteo Scandolo9a2772a2018-11-19 14:56:26 -08001/*
2 * Copyright (c) 2016-present, Yann Collet, Facebook, Inc.
3 * All rights reserved.
4 *
5 * This source code is licensed under both the BSD-style license (found in the
6 * LICENSE file in the root directory of this source tree) and the GPLv2 (found
7 * in the COPYING file in the root directory of this source tree).
8 * You may select, at your option, one of the above-listed licenses.
9 */
10
11#ifndef MEM_H_MODULE
12#define MEM_H_MODULE
13
14#if defined (__cplusplus)
15extern "C" {
16#endif
17
18/*-****************************************
19* Dependencies
20******************************************/
21#include <stddef.h> /* size_t, ptrdiff_t */
22#include <string.h> /* memcpy */
23
24
25/*-****************************************
26* Compiler specifics
27******************************************/
28#if defined(_MSC_VER) /* Visual Studio */
29# include <stdlib.h> /* _byteswap_ulong */
30# include <intrin.h> /* _byteswap_* */
31#endif
32#if defined(__GNUC__)
33# define MEM_STATIC static __inline __attribute__((unused))
34#elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
35# define MEM_STATIC static inline
36#elif defined(_MSC_VER)
37# define MEM_STATIC static __inline
38#else
39# define MEM_STATIC static /* this version may generate warnings for unused static functions; disable the relevant warning */
40#endif
41
42/* code only tested on 32 and 64 bits systems */
43#define MEM_STATIC_ASSERT(c) { enum { MEM_static_assert = 1/(int)(!!(c)) }; }
44MEM_STATIC void MEM_check(void) { MEM_STATIC_ASSERT((sizeof(size_t)==4) || (sizeof(size_t)==8)); }
45
46
47/*-**************************************************************
48* Basic Types
49*****************************************************************/
50#if !defined (__VMS) && (defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) )
51# include <stdint.h>
52 typedef uint8_t BYTE;
53 typedef uint16_t U16;
54 typedef int16_t S16;
55 typedef uint32_t U32;
56 typedef int32_t S32;
57 typedef uint64_t U64;
58 typedef int64_t S64;
59#else
60 typedef unsigned char BYTE;
61 typedef unsigned short U16;
62 typedef signed short S16;
63 typedef unsigned int U32;
64 typedef signed int S32;
65 typedef unsigned long long U64;
66 typedef signed long long S64;
67#endif
68
69
70/*-**************************************************************
71* Memory I/O
72*****************************************************************/
73/* MEM_FORCE_MEMORY_ACCESS :
74 * By default, access to unaligned memory is controlled by `memcpy()`, which is safe and portable.
75 * Unfortunately, on some target/compiler combinations, the generated assembly is sub-optimal.
76 * The below switch allow to select different access method for improved performance.
77 * Method 0 (default) : use `memcpy()`. Safe and portable.
78 * Method 1 : `__packed` statement. It depends on compiler extension (i.e., not portable).
79 * This method is safe if your compiler supports it, and *generally* as fast or faster than `memcpy`.
80 * Method 2 : direct access. This method is portable but violate C standard.
81 * It can generate buggy code on targets depending on alignment.
82 * In some circumstances, it's the only known way to get the most performance (i.e. GCC + ARMv6)
83 * See http://fastcompression.blogspot.fr/2015/08/accessing-unaligned-memory.html for details.
84 * Prefer these methods in priority order (0 > 1 > 2)
85 */
86#ifndef MEM_FORCE_MEMORY_ACCESS /* can be defined externally, on command line for example */
87# if defined(__GNUC__) && ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) )
88# define MEM_FORCE_MEMORY_ACCESS 2
89# elif defined(__INTEL_COMPILER) || defined(__GNUC__)
90# define MEM_FORCE_MEMORY_ACCESS 1
91# endif
92#endif
93
94MEM_STATIC unsigned MEM_32bits(void) { return sizeof(size_t)==4; }
95MEM_STATIC unsigned MEM_64bits(void) { return sizeof(size_t)==8; }
96
97MEM_STATIC unsigned MEM_isLittleEndian(void)
98{
99 const union { U32 u; BYTE c[4]; } one = { 1 }; /* don't use static : performance detrimental */
100 return one.c[0];
101}
102
103#if defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==2)
104
105/* violates C standard, by lying on structure alignment.
106Only use if no other choice to achieve best performance on target platform */
107MEM_STATIC U16 MEM_read16(const void* memPtr) { return *(const U16*) memPtr; }
108MEM_STATIC U32 MEM_read32(const void* memPtr) { return *(const U32*) memPtr; }
109MEM_STATIC U64 MEM_read64(const void* memPtr) { return *(const U64*) memPtr; }
110MEM_STATIC size_t MEM_readST(const void* memPtr) { return *(const size_t*) memPtr; }
111
112MEM_STATIC void MEM_write16(void* memPtr, U16 value) { *(U16*)memPtr = value; }
113MEM_STATIC void MEM_write32(void* memPtr, U32 value) { *(U32*)memPtr = value; }
114MEM_STATIC void MEM_write64(void* memPtr, U64 value) { *(U64*)memPtr = value; }
115
116#elif defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==1)
117
118/* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */
119/* currently only defined for gcc and icc */
120#if defined(_MSC_VER) || (defined(__INTEL_COMPILER) && defined(WIN32))
121 __pragma( pack(push, 1) )
122 typedef struct { U16 v; } unalign16;
123 typedef struct { U32 v; } unalign32;
124 typedef struct { U64 v; } unalign64;
125 typedef struct { size_t v; } unalignArch;
126 __pragma( pack(pop) )
127#else
128 typedef struct { U16 v; } __attribute__((packed)) unalign16;
129 typedef struct { U32 v; } __attribute__((packed)) unalign32;
130 typedef struct { U64 v; } __attribute__((packed)) unalign64;
131 typedef struct { size_t v; } __attribute__((packed)) unalignArch;
132#endif
133
134MEM_STATIC U16 MEM_read16(const void* ptr) { return ((const unalign16*)ptr)->v; }
135MEM_STATIC U32 MEM_read32(const void* ptr) { return ((const unalign32*)ptr)->v; }
136MEM_STATIC U64 MEM_read64(const void* ptr) { return ((const unalign64*)ptr)->v; }
137MEM_STATIC size_t MEM_readST(const void* ptr) { return ((const unalignArch*)ptr)->v; }
138
139MEM_STATIC void MEM_write16(void* memPtr, U16 value) { ((unalign16*)memPtr)->v = value; }
140MEM_STATIC void MEM_write32(void* memPtr, U32 value) { ((unalign32*)memPtr)->v = value; }
141MEM_STATIC void MEM_write64(void* memPtr, U64 value) { ((unalign64*)memPtr)->v = value; }
142
143#else
144
145/* default method, safe and standard.
146 can sometimes prove slower */
147
148MEM_STATIC U16 MEM_read16(const void* memPtr)
149{
150 U16 val; memcpy(&val, memPtr, sizeof(val)); return val;
151}
152
153MEM_STATIC U32 MEM_read32(const void* memPtr)
154{
155 U32 val; memcpy(&val, memPtr, sizeof(val)); return val;
156}
157
158MEM_STATIC U64 MEM_read64(const void* memPtr)
159{
160 U64 val; memcpy(&val, memPtr, sizeof(val)); return val;
161}
162
163MEM_STATIC size_t MEM_readST(const void* memPtr)
164{
165 size_t val; memcpy(&val, memPtr, sizeof(val)); return val;
166}
167
168MEM_STATIC void MEM_write16(void* memPtr, U16 value)
169{
170 memcpy(memPtr, &value, sizeof(value));
171}
172
173MEM_STATIC void MEM_write32(void* memPtr, U32 value)
174{
175 memcpy(memPtr, &value, sizeof(value));
176}
177
178MEM_STATIC void MEM_write64(void* memPtr, U64 value)
179{
180 memcpy(memPtr, &value, sizeof(value));
181}
182
183#endif /* MEM_FORCE_MEMORY_ACCESS */
184
185MEM_STATIC U32 MEM_swap32(U32 in)
186{
187#if defined(_MSC_VER) /* Visual Studio */
188 return _byteswap_ulong(in);
189#elif defined (__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 403)
190 return __builtin_bswap32(in);
191#else
192 return ((in << 24) & 0xff000000 ) |
193 ((in << 8) & 0x00ff0000 ) |
194 ((in >> 8) & 0x0000ff00 ) |
195 ((in >> 24) & 0x000000ff );
196#endif
197}
198
199MEM_STATIC U64 MEM_swap64(U64 in)
200{
201#if defined(_MSC_VER) /* Visual Studio */
202 return _byteswap_uint64(in);
203#elif defined (__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 403)
204 return __builtin_bswap64(in);
205#else
206 return ((in << 56) & 0xff00000000000000ULL) |
207 ((in << 40) & 0x00ff000000000000ULL) |
208 ((in << 24) & 0x0000ff0000000000ULL) |
209 ((in << 8) & 0x000000ff00000000ULL) |
210 ((in >> 8) & 0x00000000ff000000ULL) |
211 ((in >> 24) & 0x0000000000ff0000ULL) |
212 ((in >> 40) & 0x000000000000ff00ULL) |
213 ((in >> 56) & 0x00000000000000ffULL);
214#endif
215}
216
217MEM_STATIC size_t MEM_swapST(size_t in)
218{
219 if (MEM_32bits())
220 return (size_t)MEM_swap32((U32)in);
221 else
222 return (size_t)MEM_swap64((U64)in);
223}
224
225/*=== Little endian r/w ===*/
226
227MEM_STATIC U16 MEM_readLE16(const void* memPtr)
228{
229 if (MEM_isLittleEndian())
230 return MEM_read16(memPtr);
231 else {
232 const BYTE* p = (const BYTE*)memPtr;
233 return (U16)(p[0] + (p[1]<<8));
234 }
235}
236
237MEM_STATIC void MEM_writeLE16(void* memPtr, U16 val)
238{
239 if (MEM_isLittleEndian()) {
240 MEM_write16(memPtr, val);
241 } else {
242 BYTE* p = (BYTE*)memPtr;
243 p[0] = (BYTE)val;
244 p[1] = (BYTE)(val>>8);
245 }
246}
247
248MEM_STATIC U32 MEM_readLE24(const void* memPtr)
249{
250 return MEM_readLE16(memPtr) + (((const BYTE*)memPtr)[2] << 16);
251}
252
253MEM_STATIC void MEM_writeLE24(void* memPtr, U32 val)
254{
255 MEM_writeLE16(memPtr, (U16)val);
256 ((BYTE*)memPtr)[2] = (BYTE)(val>>16);
257}
258
259MEM_STATIC U32 MEM_readLE32(const void* memPtr)
260{
261 if (MEM_isLittleEndian())
262 return MEM_read32(memPtr);
263 else
264 return MEM_swap32(MEM_read32(memPtr));
265}
266
267MEM_STATIC void MEM_writeLE32(void* memPtr, U32 val32)
268{
269 if (MEM_isLittleEndian())
270 MEM_write32(memPtr, val32);
271 else
272 MEM_write32(memPtr, MEM_swap32(val32));
273}
274
275MEM_STATIC U64 MEM_readLE64(const void* memPtr)
276{
277 if (MEM_isLittleEndian())
278 return MEM_read64(memPtr);
279 else
280 return MEM_swap64(MEM_read64(memPtr));
281}
282
283MEM_STATIC void MEM_writeLE64(void* memPtr, U64 val64)
284{
285 if (MEM_isLittleEndian())
286 MEM_write64(memPtr, val64);
287 else
288 MEM_write64(memPtr, MEM_swap64(val64));
289}
290
291MEM_STATIC size_t MEM_readLEST(const void* memPtr)
292{
293 if (MEM_32bits())
294 return (size_t)MEM_readLE32(memPtr);
295 else
296 return (size_t)MEM_readLE64(memPtr);
297}
298
299MEM_STATIC void MEM_writeLEST(void* memPtr, size_t val)
300{
301 if (MEM_32bits())
302 MEM_writeLE32(memPtr, (U32)val);
303 else
304 MEM_writeLE64(memPtr, (U64)val);
305}
306
307/*=== Big endian r/w ===*/
308
309MEM_STATIC U32 MEM_readBE32(const void* memPtr)
310{
311 if (MEM_isLittleEndian())
312 return MEM_swap32(MEM_read32(memPtr));
313 else
314 return MEM_read32(memPtr);
315}
316
317MEM_STATIC void MEM_writeBE32(void* memPtr, U32 val32)
318{
319 if (MEM_isLittleEndian())
320 MEM_write32(memPtr, MEM_swap32(val32));
321 else
322 MEM_write32(memPtr, val32);
323}
324
325MEM_STATIC U64 MEM_readBE64(const void* memPtr)
326{
327 if (MEM_isLittleEndian())
328 return MEM_swap64(MEM_read64(memPtr));
329 else
330 return MEM_read64(memPtr);
331}
332
333MEM_STATIC void MEM_writeBE64(void* memPtr, U64 val64)
334{
335 if (MEM_isLittleEndian())
336 MEM_write64(memPtr, MEM_swap64(val64));
337 else
338 MEM_write64(memPtr, val64);
339}
340
341MEM_STATIC size_t MEM_readBEST(const void* memPtr)
342{
343 if (MEM_32bits())
344 return (size_t)MEM_readBE32(memPtr);
345 else
346 return (size_t)MEM_readBE64(memPtr);
347}
348
349MEM_STATIC void MEM_writeBEST(void* memPtr, size_t val)
350{
351 if (MEM_32bits())
352 MEM_writeBE32(memPtr, (U32)val);
353 else
354 MEM_writeBE64(memPtr, (U64)val);
355}
356
357
358#if defined (__cplusplus)
359}
360#endif
361
362#endif /* MEM_H_MODULE */