blob: 219f4f0af5bf49c22966ebbe433877ec39f03eb3 [file] [log] [blame]
Dave Rodgmanfbc23222022-11-24 18:07:37 +00001/**
2 * \file alignment.h
3 *
4 * \brief Utility code for dealing with unaligned memory accesses
5 */
6/*
7 * Copyright The Mbed TLS Contributors
Dave Rodgman16799db2023-11-02 19:47:20 +00008 * SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
Dave Rodgmanfbc23222022-11-24 18:07:37 +00009 */
10
11#ifndef MBEDTLS_LIBRARY_ALIGNMENT_H
12#define MBEDTLS_LIBRARY_ALIGNMENT_H
13
14#include <stdint.h>
Dave Rodgman96d61d12022-11-24 19:33:22 +000015#include <string.h>
Dave Rodgmanf7f1f742022-11-28 14:52:45 +000016#include <stdlib.h>
Dave Rodgmanfbc23222022-11-24 18:07:37 +000017
Dave Rodgman7d8c99a2024-01-19 14:02:58 +000018#if defined(__GNUC__) && !defined(__ARMCC_VERSION) && !defined(__clang__) \
19 && !defined(__llvm__) && !defined(__INTEL_COMPILER)
20/* Defined if the compiler really is gcc and not clang, etc */
21#define MBEDTLS_COMPILER_IS_GCC
22#define MBEDTLS_GCC_VERSION \
23 (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
24#endif
25
Dave Rodgmanb9cd19b2022-12-30 21:32:03 +000026/*
27 * Define MBEDTLS_EFFICIENT_UNALIGNED_ACCESS for architectures where unaligned memory
Dave Rodgman7f376fa2023-01-05 12:25:15 +000028 * accesses are known to be efficient.
29 *
30 * All functions defined here will behave correctly regardless, but might be less
31 * efficient when this is not defined.
Dave Rodgmanb9cd19b2022-12-30 21:32:03 +000032 */
33#if defined(__ARM_FEATURE_UNALIGNED) \
Dave Rodgmanc5cc7272023-09-15 11:41:17 +010034 || defined(MBEDTLS_ARCH_IS_X86) || defined(MBEDTLS_ARCH_IS_X64) \
Dave Rodgman0a487172023-09-15 11:52:06 +010035 || defined(MBEDTLS_PLATFORM_IS_WINDOWS_ON_ARM64)
Dave Rodgmanb9cd19b2022-12-30 21:32:03 +000036/*
37 * __ARM_FEATURE_UNALIGNED is defined where appropriate by armcc, gcc 7, clang 9
Dave Rodgman7f376fa2023-01-05 12:25:15 +000038 * (and later versions) for Arm v7 and later; all x86 platforms should have
39 * efficient unaligned access.
Dave Rodgman78fc0bd2023-08-08 10:36:15 +010040 *
41 * https://learn.microsoft.com/en-us/cpp/build/arm64-windows-abi-conventions?view=msvc-170#alignment
42 * specifies that on Windows-on-Arm64, unaligned access is safe (except for uncached
43 * device memory).
Dave Rodgmanb9cd19b2022-12-30 21:32:03 +000044 */
45#define MBEDTLS_EFFICIENT_UNALIGNED_ACCESS
46#endif
47
Dave Rodgman96d61d12022-11-24 19:33:22 +000048/**
Dave Rodgmana360e192022-11-28 14:44:05 +000049 * Read the unsigned 16 bits integer from the given address, which need not
50 * be aligned.
51 *
52 * \param p pointer to 2 bytes of data
53 * \return Data at the given address
54 */
Gilles Peskine449bd832023-01-11 14:50:10 +010055inline uint16_t mbedtls_get_unaligned_uint16(const void *p)
Dave Rodgmana360e192022-11-28 14:44:05 +000056{
57 uint16_t r;
Gilles Peskine449bd832023-01-11 14:50:10 +010058 memcpy(&r, p, sizeof(r));
Dave Rodgmana360e192022-11-28 14:44:05 +000059 return r;
60}
61
62/**
63 * Write the unsigned 16 bits integer to the given address, which need not
64 * be aligned.
65 *
66 * \param p pointer to 2 bytes of data
67 * \param x data to write
68 */
Gilles Peskine449bd832023-01-11 14:50:10 +010069inline void mbedtls_put_unaligned_uint16(void *p, uint16_t x)
Dave Rodgmana360e192022-11-28 14:44:05 +000070{
Gilles Peskine449bd832023-01-11 14:50:10 +010071 memcpy(p, &x, sizeof(x));
Dave Rodgmana360e192022-11-28 14:44:05 +000072}
73
74/**
Dave Rodgman96d61d12022-11-24 19:33:22 +000075 * Read the unsigned 32 bits integer from the given address, which need not
76 * be aligned.
Dave Rodgmanfbc23222022-11-24 18:07:37 +000077 *
Dave Rodgman96d61d12022-11-24 19:33:22 +000078 * \param p pointer to 4 bytes of data
Dave Rodgman875d2382022-11-24 20:43:15 +000079 * \return Data at the given address
Dave Rodgmanfbc23222022-11-24 18:07:37 +000080 */
Gilles Peskine449bd832023-01-11 14:50:10 +010081inline uint32_t mbedtls_get_unaligned_uint32(const void *p)
Dave Rodgman96d61d12022-11-24 19:33:22 +000082{
83 uint32_t r;
Gilles Peskine449bd832023-01-11 14:50:10 +010084 memcpy(&r, p, sizeof(r));
Dave Rodgman96d61d12022-11-24 19:33:22 +000085 return r;
86}
Dave Rodgmanfbc23222022-11-24 18:07:37 +000087
Dave Rodgman96d61d12022-11-24 19:33:22 +000088/**
89 * Write the unsigned 32 bits integer to the given address, which need not
90 * be aligned.
91 *
92 * \param p pointer to 4 bytes of data
93 * \param x data to write
94 */
Gilles Peskine449bd832023-01-11 14:50:10 +010095inline void mbedtls_put_unaligned_uint32(void *p, uint32_t x)
Dave Rodgman96d61d12022-11-24 19:33:22 +000096{
Gilles Peskine449bd832023-01-11 14:50:10 +010097 memcpy(p, &x, sizeof(x));
Dave Rodgman96d61d12022-11-24 19:33:22 +000098}
Dave Rodgmanfbc23222022-11-24 18:07:37 +000099
Dave Rodgmana360e192022-11-28 14:44:05 +0000100/**
101 * Read the unsigned 64 bits integer from the given address, which need not
102 * be aligned.
103 *
104 * \param p pointer to 8 bytes of data
105 * \return Data at the given address
106 */
Gilles Peskine449bd832023-01-11 14:50:10 +0100107inline uint64_t mbedtls_get_unaligned_uint64(const void *p)
Dave Rodgmana360e192022-11-28 14:44:05 +0000108{
109 uint64_t r;
Gilles Peskine449bd832023-01-11 14:50:10 +0100110 memcpy(&r, p, sizeof(r));
Dave Rodgmana360e192022-11-28 14:44:05 +0000111 return r;
112}
113
114/**
115 * Write the unsigned 64 bits integer to the given address, which need not
116 * be aligned.
117 *
118 * \param p pointer to 8 bytes of data
119 * \param x data to write
120 */
Gilles Peskine449bd832023-01-11 14:50:10 +0100121inline void mbedtls_put_unaligned_uint64(void *p, uint64_t x)
Dave Rodgmana360e192022-11-28 14:44:05 +0000122{
Gilles Peskine449bd832023-01-11 14:50:10 +0100123 memcpy(p, &x, sizeof(x));
Dave Rodgmana360e192022-11-28 14:44:05 +0000124}
125
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000126/** Byte Reading Macros
127 *
128 * Given a multi-byte integer \p x, MBEDTLS_BYTE_n retrieves the n-th
129 * byte from x, where byte 0 is the least significant byte.
130 */
Gilles Peskine449bd832023-01-11 14:50:10 +0100131#define MBEDTLS_BYTE_0(x) ((uint8_t) ((x) & 0xff))
Dave Rodgman914c6322023-03-01 09:30:14 +0000132#define MBEDTLS_BYTE_1(x) ((uint8_t) (((x) >> 8) & 0xff))
Gilles Peskine449bd832023-01-11 14:50:10 +0100133#define MBEDTLS_BYTE_2(x) ((uint8_t) (((x) >> 16) & 0xff))
134#define MBEDTLS_BYTE_3(x) ((uint8_t) (((x) >> 24) & 0xff))
135#define MBEDTLS_BYTE_4(x) ((uint8_t) (((x) >> 32) & 0xff))
136#define MBEDTLS_BYTE_5(x) ((uint8_t) (((x) >> 40) & 0xff))
137#define MBEDTLS_BYTE_6(x) ((uint8_t) (((x) >> 48) & 0xff))
138#define MBEDTLS_BYTE_7(x) ((uint8_t) (((x) >> 56) & 0xff))
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000139
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000140/*
141 * Detect GCC built-in byteswap routines
142 */
143#if defined(__GNUC__) && defined(__GNUC_PREREQ)
Gilles Peskine449bd832023-01-11 14:50:10 +0100144#if __GNUC_PREREQ(4, 8)
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000145#define MBEDTLS_BSWAP16 __builtin_bswap16
146#endif /* __GNUC_PREREQ(4,8) */
Gilles Peskine449bd832023-01-11 14:50:10 +0100147#if __GNUC_PREREQ(4, 3)
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000148#define MBEDTLS_BSWAP32 __builtin_bswap32
149#define MBEDTLS_BSWAP64 __builtin_bswap64
150#endif /* __GNUC_PREREQ(4,3) */
151#endif /* defined(__GNUC__) && defined(__GNUC_PREREQ) */
152
153/*
154 * Detect Clang built-in byteswap routines
155 */
156#if defined(__clang__) && defined(__has_builtin)
Dave Rodgmane47899d2023-02-28 17:39:03 +0000157#if __has_builtin(__builtin_bswap16) && !defined(MBEDTLS_BSWAP16)
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000158#define MBEDTLS_BSWAP16 __builtin_bswap16
159#endif /* __has_builtin(__builtin_bswap16) */
Dave Rodgmane47899d2023-02-28 17:39:03 +0000160#if __has_builtin(__builtin_bswap32) && !defined(MBEDTLS_BSWAP32)
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000161#define MBEDTLS_BSWAP32 __builtin_bswap32
162#endif /* __has_builtin(__builtin_bswap32) */
Dave Rodgmane47899d2023-02-28 17:39:03 +0000163#if __has_builtin(__builtin_bswap64) && !defined(MBEDTLS_BSWAP64)
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000164#define MBEDTLS_BSWAP64 __builtin_bswap64
165#endif /* __has_builtin(__builtin_bswap64) */
166#endif /* defined(__clang__) && defined(__has_builtin) */
167
168/*
169 * Detect MSVC built-in byteswap routines
170 */
171#if defined(_MSC_VER)
Dave Rodgmane47899d2023-02-28 17:39:03 +0000172#if !defined(MBEDTLS_BSWAP16)
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000173#define MBEDTLS_BSWAP16 _byteswap_ushort
Dave Rodgmane47899d2023-02-28 17:39:03 +0000174#endif
175#if !defined(MBEDTLS_BSWAP32)
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000176#define MBEDTLS_BSWAP32 _byteswap_ulong
Dave Rodgmane47899d2023-02-28 17:39:03 +0000177#endif
178#if !defined(MBEDTLS_BSWAP64)
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000179#define MBEDTLS_BSWAP64 _byteswap_uint64
Dave Rodgmane47899d2023-02-28 17:39:03 +0000180#endif
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000181#endif /* defined(_MSC_VER) */
182
Dave Rodgman2dae4b32022-11-30 12:07:36 +0000183/* Detect armcc built-in byteswap routine */
Dave Rodgmane47899d2023-02-28 17:39:03 +0000184#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 410000) && !defined(MBEDTLS_BSWAP32)
Tom Cosgrovef2b5a132023-04-26 17:00:12 +0100185#if defined(__ARM_ACLE) /* ARM Compiler 6 - earlier versions don't need a header */
186#include <arm_acle.h>
187#endif
Dave Rodgman2dae4b32022-11-30 12:07:36 +0000188#define MBEDTLS_BSWAP32 __rev
189#endif
190
Dave Rodgman650674b2023-12-05 12:16:48 +0000191/* Detect IAR built-in byteswap routine */
192#if defined(__IAR_SYSTEMS_ICC__)
193#if defined(__ARM_ACLE)
194#include <arm_acle.h>
195#define MBEDTLS_BSWAP16(x) ((uint16_t) __rev16((uint32_t) (x)))
196#define MBEDTLS_BSWAP32 __rev
197#define MBEDTLS_BSWAP64 __revll
198#endif
199#endif
200
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000201/*
202 * Where compiler built-ins are not present, fall back to C code that the
203 * compiler may be able to detect and transform into the relevant bswap or
204 * similar instruction.
205 */
206#if !defined(MBEDTLS_BSWAP16)
Gilles Peskine449bd832023-01-11 14:50:10 +0100207static inline uint16_t mbedtls_bswap16(uint16_t x)
208{
Dave Rodgman6298b242022-11-28 14:51:49 +0000209 return
Gilles Peskine449bd832023-01-11 14:50:10 +0100210 (x & 0x00ff) << 8 |
211 (x & 0xff00) >> 8;
Dave Rodgman6298b242022-11-28 14:51:49 +0000212}
213#define MBEDTLS_BSWAP16 mbedtls_bswap16
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000214#endif /* !defined(MBEDTLS_BSWAP16) */
Dave Rodgman6298b242022-11-28 14:51:49 +0000215
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000216#if !defined(MBEDTLS_BSWAP32)
Gilles Peskine449bd832023-01-11 14:50:10 +0100217static inline uint32_t mbedtls_bswap32(uint32_t x)
218{
Dave Rodgman6298b242022-11-28 14:51:49 +0000219 return
Gilles Peskine449bd832023-01-11 14:50:10 +0100220 (x & 0x000000ff) << 24 |
221 (x & 0x0000ff00) << 8 |
222 (x & 0x00ff0000) >> 8 |
223 (x & 0xff000000) >> 24;
Dave Rodgman6298b242022-11-28 14:51:49 +0000224}
225#define MBEDTLS_BSWAP32 mbedtls_bswap32
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000226#endif /* !defined(MBEDTLS_BSWAP32) */
Dave Rodgman6298b242022-11-28 14:51:49 +0000227
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000228#if !defined(MBEDTLS_BSWAP64)
Gilles Peskine449bd832023-01-11 14:50:10 +0100229static inline uint64_t mbedtls_bswap64(uint64_t x)
230{
Dave Rodgman6298b242022-11-28 14:51:49 +0000231 return
Tom Cosgrovebbe166e2023-03-08 13:23:24 +0000232 (x & 0x00000000000000ffULL) << 56 |
233 (x & 0x000000000000ff00ULL) << 40 |
234 (x & 0x0000000000ff0000ULL) << 24 |
235 (x & 0x00000000ff000000ULL) << 8 |
236 (x & 0x000000ff00000000ULL) >> 8 |
237 (x & 0x0000ff0000000000ULL) >> 24 |
238 (x & 0x00ff000000000000ULL) >> 40 |
239 (x & 0xff00000000000000ULL) >> 56;
Dave Rodgman6298b242022-11-28 14:51:49 +0000240}
241#define MBEDTLS_BSWAP64 mbedtls_bswap64
Dave Rodgmanf7f1f742022-11-28 14:52:45 +0000242#endif /* !defined(MBEDTLS_BSWAP64) */
Dave Rodgman6298b242022-11-28 14:51:49 +0000243
Dave Rodgmane5c42592022-11-28 14:47:46 +0000244#if !defined(__BYTE_ORDER__)
Dave Rodgmanf3c04f32023-12-05 12:06:11 +0000245
246#if defined(__LITTLE_ENDIAN__)
247/* IAR defines __xxx_ENDIAN__, but not __BYTE_ORDER__ */
248#define MBEDTLS_IS_BIG_ENDIAN 0
249#elif defined(__BIG_ENDIAN__)
250#define MBEDTLS_IS_BIG_ENDIAN 1
251#else
Dave Rodgmane5c42592022-11-28 14:47:46 +0000252static const uint16_t mbedtls_byte_order_detector = { 0x100 };
253#define MBEDTLS_IS_BIG_ENDIAN (*((unsigned char *) (&mbedtls_byte_order_detector)) == 0x01)
Dave Rodgmanf3c04f32023-12-05 12:06:11 +0000254#endif
255
Dave Rodgmane5c42592022-11-28 14:47:46 +0000256#else
Dave Rodgmanf3c04f32023-12-05 12:06:11 +0000257
258#if (__BYTE_ORDER__) == (__ORDER_BIG_ENDIAN__)
259#define MBEDTLS_IS_BIG_ENDIAN 1
260#else
261#define MBEDTLS_IS_BIG_ENDIAN 0
262#endif
263
Dave Rodgmane5c42592022-11-28 14:47:46 +0000264#endif /* !defined(__BYTE_ORDER__) */
265
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000266/**
267 * Get the unsigned 32 bits integer corresponding to four bytes in
268 * big-endian order (MSB first).
269 *
270 * \param data Base address of the memory to get the four bytes from.
271 * \param offset Offset from \p data of the first and most significant
272 * byte of the four bytes to build the 32 bits unsigned
273 * integer from.
274 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000275#define MBEDTLS_GET_UINT32_BE(data, offset) \
276 ((MBEDTLS_IS_BIG_ENDIAN) \
Dave Rodgmana5110b02022-11-28 14:48:45 +0000277 ? mbedtls_get_unaligned_uint32((data) + (offset)) \
278 : MBEDTLS_BSWAP32(mbedtls_get_unaligned_uint32((data) + (offset))) \
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000279 )
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000280
281/**
282 * Put in memory a 32 bits unsigned integer in big-endian order.
283 *
284 * \param n 32 bits unsigned integer to put in memory.
285 * \param data Base address of the memory where to put the 32
286 * bits unsigned integer in.
287 * \param offset Offset from \p data where to put the most significant
288 * byte of the 32 bits unsigned integer \p n.
289 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000290#define MBEDTLS_PUT_UINT32_BE(n, data, offset) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100291 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000292 if (MBEDTLS_IS_BIG_ENDIAN) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100293 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000294 mbedtls_put_unaligned_uint32((data) + (offset), (uint32_t) (n)); \
Gilles Peskine449bd832023-01-11 14:50:10 +0100295 } \
296 else \
297 { \
298 mbedtls_put_unaligned_uint32((data) + (offset), MBEDTLS_BSWAP32((uint32_t) (n))); \
299 } \
300 }
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000301
302/**
303 * Get the unsigned 32 bits integer corresponding to four bytes in
304 * little-endian order (LSB first).
305 *
306 * \param data Base address of the memory to get the four bytes from.
307 * \param offset Offset from \p data of the first and least significant
308 * byte of the four bytes to build the 32 bits unsigned
309 * integer from.
310 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000311#define MBEDTLS_GET_UINT32_LE(data, offset) \
312 ((MBEDTLS_IS_BIG_ENDIAN) \
Dave Rodgmana5110b02022-11-28 14:48:45 +0000313 ? MBEDTLS_BSWAP32(mbedtls_get_unaligned_uint32((data) + (offset))) \
314 : mbedtls_get_unaligned_uint32((data) + (offset)) \
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000315 )
Dave Rodgmana5110b02022-11-28 14:48:45 +0000316
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000317
318/**
319 * Put in memory a 32 bits unsigned integer in little-endian order.
320 *
321 * \param n 32 bits unsigned integer to put in memory.
322 * \param data Base address of the memory where to put the 32
323 * bits unsigned integer in.
324 * \param offset Offset from \p data where to put the least significant
325 * byte of the 32 bits unsigned integer \p n.
326 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000327#define MBEDTLS_PUT_UINT32_LE(n, data, offset) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100328 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000329 if (MBEDTLS_IS_BIG_ENDIAN) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100330 { \
331 mbedtls_put_unaligned_uint32((data) + (offset), MBEDTLS_BSWAP32((uint32_t) (n))); \
332 } \
333 else \
334 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000335 mbedtls_put_unaligned_uint32((data) + (offset), ((uint32_t) (n))); \
Gilles Peskine449bd832023-01-11 14:50:10 +0100336 } \
337 }
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000338
339/**
340 * Get the unsigned 16 bits integer corresponding to two bytes in
341 * little-endian order (LSB first).
342 *
343 * \param data Base address of the memory to get the two bytes from.
344 * \param offset Offset from \p data of the first and least significant
345 * byte of the two bytes to build the 16 bits unsigned
346 * integer from.
347 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000348#define MBEDTLS_GET_UINT16_LE(data, offset) \
349 ((MBEDTLS_IS_BIG_ENDIAN) \
Dave Rodgmana5110b02022-11-28 14:48:45 +0000350 ? MBEDTLS_BSWAP16(mbedtls_get_unaligned_uint16((data) + (offset))) \
351 : mbedtls_get_unaligned_uint16((data) + (offset)) \
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000352 )
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000353
354/**
355 * Put in memory a 16 bits unsigned integer in little-endian order.
356 *
357 * \param n 16 bits unsigned integer to put in memory.
358 * \param data Base address of the memory where to put the 16
359 * bits unsigned integer in.
360 * \param offset Offset from \p data where to put the least significant
361 * byte of the 16 bits unsigned integer \p n.
362 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000363#define MBEDTLS_PUT_UINT16_LE(n, data, offset) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100364 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000365 if (MBEDTLS_IS_BIG_ENDIAN) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100366 { \
367 mbedtls_put_unaligned_uint16((data) + (offset), MBEDTLS_BSWAP16((uint16_t) (n))); \
368 } \
369 else \
370 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000371 mbedtls_put_unaligned_uint16((data) + (offset), (uint16_t) (n)); \
Gilles Peskine449bd832023-01-11 14:50:10 +0100372 } \
373 }
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000374
375/**
376 * Get the unsigned 16 bits integer corresponding to two bytes in
377 * big-endian order (MSB first).
378 *
379 * \param data Base address of the memory to get the two bytes from.
380 * \param offset Offset from \p data of the first and most significant
381 * byte of the two bytes to build the 16 bits unsigned
382 * integer from.
383 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000384#define MBEDTLS_GET_UINT16_BE(data, offset) \
385 ((MBEDTLS_IS_BIG_ENDIAN) \
Dave Rodgmana5110b02022-11-28 14:48:45 +0000386 ? mbedtls_get_unaligned_uint16((data) + (offset)) \
387 : MBEDTLS_BSWAP16(mbedtls_get_unaligned_uint16((data) + (offset))) \
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000388 )
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000389
390/**
391 * Put in memory a 16 bits unsigned integer in big-endian order.
392 *
393 * \param n 16 bits unsigned integer to put in memory.
394 * \param data Base address of the memory where to put the 16
395 * bits unsigned integer in.
396 * \param offset Offset from \p data where to put the most significant
397 * byte of the 16 bits unsigned integer \p n.
398 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000399#define MBEDTLS_PUT_UINT16_BE(n, data, offset) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100400 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000401 if (MBEDTLS_IS_BIG_ENDIAN) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100402 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000403 mbedtls_put_unaligned_uint16((data) + (offset), (uint16_t) (n)); \
Gilles Peskine449bd832023-01-11 14:50:10 +0100404 } \
405 else \
406 { \
407 mbedtls_put_unaligned_uint16((data) + (offset), MBEDTLS_BSWAP16((uint16_t) (n))); \
408 } \
409 }
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000410
411/**
412 * Get the unsigned 24 bits integer corresponding to three bytes in
413 * big-endian order (MSB first).
414 *
415 * \param data Base address of the memory to get the three bytes from.
416 * \param offset Offset from \p data of the first and most significant
417 * byte of the three bytes to build the 24 bits unsigned
418 * integer from.
419 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000420#define MBEDTLS_GET_UINT24_BE(data, offset) \
421 ( \
422 ((uint32_t) (data)[(offset)] << 16) \
423 | ((uint32_t) (data)[(offset) + 1] << 8) \
424 | ((uint32_t) (data)[(offset) + 2]) \
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000425 )
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000426
427/**
428 * Put in memory a 24 bits unsigned integer in big-endian order.
429 *
430 * \param n 24 bits unsigned integer to put in memory.
431 * \param data Base address of the memory where to put the 24
432 * bits unsigned integer in.
433 * \param offset Offset from \p data where to put the most significant
434 * byte of the 24 bits unsigned integer \p n.
435 */
Gilles Peskine449bd832023-01-11 14:50:10 +0100436#define MBEDTLS_PUT_UINT24_BE(n, data, offset) \
Dave Rodgman914c6322023-03-01 09:30:14 +0000437 { \
438 (data)[(offset)] = MBEDTLS_BYTE_2(n); \
Gilles Peskine449bd832023-01-11 14:50:10 +0100439 (data)[(offset) + 1] = MBEDTLS_BYTE_1(n); \
440 (data)[(offset) + 2] = MBEDTLS_BYTE_0(n); \
441 }
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000442
443/**
444 * Get the unsigned 24 bits integer corresponding to three bytes in
445 * little-endian order (LSB first).
446 *
447 * \param data Base address of the memory to get the three bytes from.
448 * \param offset Offset from \p data of the first and least significant
449 * byte of the three bytes to build the 24 bits unsigned
450 * integer from.
451 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000452#define MBEDTLS_GET_UINT24_LE(data, offset) \
453 ( \
454 ((uint32_t) (data)[(offset)]) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100455 | ((uint32_t) (data)[(offset) + 1] << 8) \
456 | ((uint32_t) (data)[(offset) + 2] << 16) \
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000457 )
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000458
459/**
460 * Put in memory a 24 bits unsigned integer in little-endian order.
461 *
462 * \param n 24 bits unsigned integer to put in memory.
463 * \param data Base address of the memory where to put the 24
464 * bits unsigned integer in.
465 * \param offset Offset from \p data where to put the least significant
466 * byte of the 24 bits unsigned integer \p n.
467 */
Gilles Peskine449bd832023-01-11 14:50:10 +0100468#define MBEDTLS_PUT_UINT24_LE(n, data, offset) \
Dave Rodgman914c6322023-03-01 09:30:14 +0000469 { \
470 (data)[(offset)] = MBEDTLS_BYTE_0(n); \
Gilles Peskine449bd832023-01-11 14:50:10 +0100471 (data)[(offset) + 1] = MBEDTLS_BYTE_1(n); \
472 (data)[(offset) + 2] = MBEDTLS_BYTE_2(n); \
473 }
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000474
475/**
476 * Get the unsigned 64 bits integer corresponding to eight bytes in
477 * big-endian order (MSB first).
478 *
479 * \param data Base address of the memory to get the eight bytes from.
480 * \param offset Offset from \p data of the first and most significant
481 * byte of the eight bytes to build the 64 bits unsigned
482 * integer from.
483 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000484#define MBEDTLS_GET_UINT64_BE(data, offset) \
485 ((MBEDTLS_IS_BIG_ENDIAN) \
Dave Rodgmana5110b02022-11-28 14:48:45 +0000486 ? mbedtls_get_unaligned_uint64((data) + (offset)) \
487 : MBEDTLS_BSWAP64(mbedtls_get_unaligned_uint64((data) + (offset))) \
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000488 )
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000489
490/**
491 * Put in memory a 64 bits unsigned integer in big-endian order.
492 *
493 * \param n 64 bits unsigned integer to put in memory.
494 * \param data Base address of the memory where to put the 64
495 * bits unsigned integer in.
496 * \param offset Offset from \p data where to put the most significant
497 * byte of the 64 bits unsigned integer \p n.
498 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000499#define MBEDTLS_PUT_UINT64_BE(n, data, offset) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100500 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000501 if (MBEDTLS_IS_BIG_ENDIAN) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100502 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000503 mbedtls_put_unaligned_uint64((data) + (offset), (uint64_t) (n)); \
Gilles Peskine449bd832023-01-11 14:50:10 +0100504 } \
505 else \
506 { \
507 mbedtls_put_unaligned_uint64((data) + (offset), MBEDTLS_BSWAP64((uint64_t) (n))); \
508 } \
509 }
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000510
511/**
512 * Get the unsigned 64 bits integer corresponding to eight bytes in
513 * little-endian order (LSB first).
514 *
515 * \param data Base address of the memory to get the eight bytes from.
516 * \param offset Offset from \p data of the first and least significant
517 * byte of the eight bytes to build the 64 bits unsigned
518 * integer from.
519 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000520#define MBEDTLS_GET_UINT64_LE(data, offset) \
521 ((MBEDTLS_IS_BIG_ENDIAN) \
Dave Rodgmana5110b02022-11-28 14:48:45 +0000522 ? MBEDTLS_BSWAP64(mbedtls_get_unaligned_uint64((data) + (offset))) \
523 : mbedtls_get_unaligned_uint64((data) + (offset)) \
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000524 )
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000525
526/**
527 * Put in memory a 64 bits unsigned integer in little-endian order.
528 *
529 * \param n 64 bits unsigned integer to put in memory.
530 * \param data Base address of the memory where to put the 64
531 * bits unsigned integer in.
532 * \param offset Offset from \p data where to put the least significant
533 * byte of the 64 bits unsigned integer \p n.
534 */
Dave Rodgman914c6322023-03-01 09:30:14 +0000535#define MBEDTLS_PUT_UINT64_LE(n, data, offset) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100536 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000537 if (MBEDTLS_IS_BIG_ENDIAN) \
Gilles Peskine449bd832023-01-11 14:50:10 +0100538 { \
539 mbedtls_put_unaligned_uint64((data) + (offset), MBEDTLS_BSWAP64((uint64_t) (n))); \
540 } \
541 else \
542 { \
Dave Rodgman914c6322023-03-01 09:30:14 +0000543 mbedtls_put_unaligned_uint64((data) + (offset), (uint64_t) (n)); \
Gilles Peskine449bd832023-01-11 14:50:10 +0100544 } \
545 }
Dave Rodgmanfbc23222022-11-24 18:07:37 +0000546
547#endif /* MBEDTLS_LIBRARY_ALIGNMENT_H */