66 * details.
77 */
88
9+ #pragma once
10+
911/**
1012 * @defgroup sys_atomic_utils Utility functions for atomic access
1113 * @ingroup sys
133135 * @author Marian Buschsieweke <[email protected] > 134136 */
135137
136- #ifndef ATOMIC_UTILS_H
137- #define ATOMIC_UTILS_H
138-
139138#include <stdint.h>
140139
141140#include "irq.h"
148147extern "C" {
149148#endif
150149
150+ /* NOLINTBEGIN(bugprone-macro-parentheses)
151+ *
152+ * The macros ATOMIC_LOAD_IMPL() and friends do not surround the argument used
153+ * to pass the type with parenthesis. Suppressing the clang-tidy warning here,
154+ * as adding parenthesis around a type would be a synstax error.
155+ */
156+
151157/* Declarations and documentation: */
152158
153159#if !defined(HAS_ATOMIC_BIT ) || defined(DOXYGEN )
@@ -239,6 +245,24 @@ static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
239245 * @return The value stored in @p var
240246 */
241247static inline uint64_t atomic_load_u64 (const volatile uint64_t * var );
248+ /**
249+ * @brief Load an `unsigned int` atomically
250+ *
251+ * @param[in] var Variable to load atomically
252+ * @return The value stored in @p var
253+ */
254+ static inline unsigned atomic_load_unsigned (const volatile unsigned * var )
255+ {
256+ if (sizeof (uint64_t ) == sizeof (unsigned )) {
257+ return atomic_load_u64 ((volatile void * )var );
258+ }
259+
260+ if (sizeof (uint32_t ) == sizeof (unsigned )) {
261+ return atomic_load_u32 ((volatile void * )var );
262+ }
263+
264+ return atomic_load_u16 ((volatile void * )var );
265+ }
242266
243267/**
244268 * @brief Load an `uintptr_t` atomically
@@ -250,12 +274,12 @@ static inline uintptr_t atomic_load_uintptr(const volatile uintptr_t *var) {
250274 if (sizeof (uintptr_t ) == 2 ) {
251275 return atomic_load_u16 ((const volatile uint16_t * )var );
252276 }
253- else if (sizeof (uintptr_t ) == 4 ) {
277+
278+ if (sizeof (uintptr_t ) == 4 ) {
254279 return atomic_load_u32 ((const volatile uint32_t * )(uintptr_t )var );
255280 }
256- else {
257- return atomic_load_u64 ((const volatile uint64_t * )(uintptr_t )var );
258- }
281+
282+ return atomic_load_u64 ((const volatile uint64_t * )(uintptr_t )var );
259283}
260284/**
261285 * @brief Load an `void *` atomically
@@ -274,7 +298,7 @@ static inline void * atomic_load_ptr(void **ptr_addr) {
274298 */
275299static inline kernel_pid_t atomic_load_kernel_pid (const volatile kernel_pid_t * var )
276300{
277- return atomic_load_u16 ((const volatile uint16_t * )var );
301+ return ( kernel_pid_t ) atomic_load_u16 ((const volatile uint16_t * )var );
278302}
279303/** @} */
280304
@@ -306,6 +330,23 @@ static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
306330 * @param[in] val Value to write
307331 */
308332static inline void atomic_store_u64 (volatile uint64_t * dest , uint64_t val );
333+ /**
334+ * @brief Store an `uint64_t` atomically
335+ * @param[out] dest Location to atomically write the new value to
336+ * @param[in] val Value to write
337+ */
338+ static inline void atomic_store_unsigned (volatile unsigned * dest , unsigned val )
339+ {
340+ if (sizeof (uint64_t ) == sizeof (unsigned )) {
341+ atomic_store_u64 ((volatile void * )dest , val );
342+ }
343+ else if (sizeof (uint32_t ) == sizeof (unsigned )) {
344+ atomic_store_u32 ((volatile void * )dest , val );
345+ }
346+ else {
347+ atomic_store_u16 ((volatile void * )dest , val );
348+ }
349+ }
309350
310351/**
311352 * @brief Store an `uintptr_t` atomically
@@ -383,6 +424,25 @@ static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
383424 */
384425static inline uint64_t atomic_fetch_add_u64 (volatile uint64_t * dest ,
385426 uint64_t summand );
427+ /**
428+ * @brief Atomically add a value onto a given value
429+ * @param[in,out] dest Add @p summand onto this value atomically in-place
430+ * @param[in] summand Value to add onto @p dest
431+ * @return The value previously stored @p dest
432+ */
433+ static inline unsigned atomic_fetch_add_unsigned (volatile unsigned * dest ,
434+ unsigned summand )
435+ {
436+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
437+ return atomic_fetch_add_u64 ((volatile void * )dest , summand );
438+ }
439+
440+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
441+ return atomic_fetch_add_u32 ((volatile void * )dest , summand );
442+ }
443+
444+ return atomic_fetch_add_u16 ((volatile void * )dest , summand );
445+ }
386446/** @} */
387447
388448/**
@@ -425,6 +485,26 @@ static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
425485 */
426486static inline uint64_t atomic_fetch_sub_u64 (volatile uint64_t * dest ,
427487 uint64_t subtrahend );
488+ /**
489+ * @brief Atomically subtract a value from a given value
490+ * @param[in,out] dest Subtract @p subtrahend from this value
491+ * atomically in-place
492+ * @param[in] subtrahend Value to subtract from @p dest
493+ * @return The value previously stored @p dest
494+ */
495+ static inline unsigned atomic_fetch_sub_unsigned (volatile unsigned * dest ,
496+ unsigned summand )
497+ {
498+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
499+ return atomic_fetch_sub_u64 ((volatile void * )dest , summand );
500+ }
501+
502+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
503+ return atomic_fetch_sub_u32 ((volatile void * )dest , summand );
504+ }
505+
506+ return atomic_fetch_sub_u16 ((volatile void * )dest , summand );
507+ }
428508/** @} */
429509
430510/**
@@ -466,6 +546,26 @@ static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
466546 */
467547static inline uint64_t atomic_fetch_or_u64 (volatile uint64_t * dest ,
468548 uint64_t val );
549+ /**
550+ * @brief Atomic version of `*dest |= val`
551+ * @param[in,out] dest Replace this value with the result of
552+ * `*dest | val`
553+ * @param[in] val Value to bitwise or into @p dest in-place
554+ * @return The value previously stored @p dest
555+ */
556+ static inline unsigned atomic_fetch_or_unsigned (volatile unsigned * dest ,
557+ unsigned val )
558+ {
559+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
560+ return atomic_fetch_or_u64 ((volatile void * )dest , val );
561+ }
562+
563+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
564+ return atomic_fetch_or_u32 ((volatile void * )dest , val );
565+ }
566+
567+ return atomic_fetch_or_u16 ((volatile void * )dest , val );
568+ }
469569/** @} */
470570
471571/**
@@ -507,6 +607,26 @@ static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
507607 */
508608static inline uint64_t atomic_fetch_xor_u64 (volatile uint64_t * dest ,
509609 uint64_t val );
610+ /**
611+ * @brief Atomic version of `*dest ^= val`
612+ * @param[in,out] dest Replace this value with the result of
613+ * `*dest ^ val`
614+ * @param[in] val Value to bitwise xor into @p dest in-place
615+ * @return The value previously stored @p dest
616+ */
617+ static inline unsigned atomic_fetch_xor_unsigned (volatile unsigned * dest ,
618+ unsigned val )
619+ {
620+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
621+ return atomic_fetch_xor_u64 ((volatile void * )dest , val );
622+ }
623+
624+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
625+ return atomic_fetch_xor_u32 ((volatile void * )dest , val );
626+ }
627+
628+ return atomic_fetch_xor_u16 ((volatile void * )dest , val );
629+ }
510630/** @} */
511631
512632/**
@@ -548,6 +668,26 @@ static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
548668 */
549669static inline uint64_t atomic_fetch_and_u64 (volatile uint64_t * dest ,
550670 uint64_t val );
671+ /**
672+ * @brief Atomic version of `*dest &= val`
673+ * @param[in,out] dest Replace this value with the result of
674+ * `*dest & val`
675+ * @param[in] val Value to bitwise and into @p dest in-place
676+ * @return The value previously stored @p dest
677+ */
678+ static inline unsigned atomic_fetch_and_unsigned (volatile unsigned * dest ,
679+ unsigned val )
680+ {
681+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
682+ return atomic_fetch_and_u64 ((volatile void * )dest , val );
683+ }
684+
685+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
686+ return atomic_fetch_and_u32 ((volatile void * )dest , val );
687+ }
688+
689+ return atomic_fetch_and_u16 ((volatile void * )dest , val );
690+ }
551691/** @} */
552692
553693/**
@@ -679,6 +819,26 @@ static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
679819 */
680820static inline uint64_t semi_atomic_fetch_add_u64 (volatile uint64_t * dest ,
681821 uint64_t summand );
822+ /**
823+ * @brief Semi-atomically add a value onto a given value
824+ * @param[in,out] dest Add @p summand onto this value semi-atomically
825+ * in-place
826+ * @param[in] summand Value to add onto @p dest
827+ * @return The value previously stored @p dest
828+ */
829+ static inline unsigned semi_atomic_fetch_add_unsigned (volatile unsigned * dest ,
830+ unsigned summand )
831+ {
832+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
833+ return semi_atomic_fetch_add_u64 ((volatile void * )dest , summand );
834+ }
835+
836+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
837+ return semi_atomic_fetch_add_u32 ((volatile void * )dest , summand );
838+ }
839+
840+ return semi_atomic_fetch_add_u16 ((volatile void * )dest , summand );
841+ }
682842/** @} */
683843
684844/**
@@ -721,6 +881,26 @@ static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
721881 */
722882static inline uint64_t semi_atomic_fetch_sub_u64 (volatile uint64_t * dest ,
723883 uint64_t subtrahend );
884+ /**
885+ * @brief Semi-atomically subtract a value from a given value
886+ * @param[in,out] dest Subtract @p subtrahend from this value
887+ * semi-atomically in-place
888+ * @param[in] subtrahend Value to subtract from @p dest
889+ * @return The value previously stored @p dest
890+ */
891+ static inline unsigned semi_atomic_fetch_sub_unsigned (volatile unsigned * dest ,
892+ unsigned summand )
893+ {
894+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
895+ return semi_atomic_fetch_sub_u64 ((volatile void * )dest , summand );
896+ }
897+
898+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
899+ return semi_atomic_fetch_sub_u32 ((volatile void * )dest , summand );
900+ }
901+
902+ return semi_atomic_fetch_sub_u16 ((volatile void * )dest , summand );
903+ }
724904/** @} */
725905
726906/**
@@ -762,6 +942,26 @@ static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
762942 */
763943static inline uint64_t semi_atomic_fetch_or_u64 (volatile uint64_t * dest ,
764944 uint64_t val );
945+ /**
946+ * @brief Semi-atomic version of `*dest |= val`
947+ * @param[in,out] dest Replace this value with the result of
948+ * `*dest | val`
949+ * @param[in] val Value to bitwise or into @p dest in-place
950+ * @return The value previously stored @p dest
951+ */
952+ static inline unsigned semi_atomic_fetch_or_unsigned (volatile unsigned * dest ,
953+ unsigned summand )
954+ {
955+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
956+ return semi_atomic_fetch_or_u64 ((volatile void * )dest , summand );
957+ }
958+
959+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
960+ return semi_atomic_fetch_or_u32 ((volatile void * )dest , summand );
961+ }
962+
963+ return semi_atomic_fetch_or_u16 ((volatile void * )dest , summand );
964+ }
765965/** @} */
766966
767967/**
@@ -804,6 +1004,26 @@ static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
8041004 */
8051005static inline uint64_t semi_atomic_fetch_xor_u64 (volatile uint64_t * dest ,
8061006 uint64_t val );
1007+ /**
1008+ * @brief Semi-atomic version of `*dest ^= val`
1009+ * @param[in,out] dest Replace this value with the result of
1010+ * `*dest ^ val`
1011+ * @param[in] val Value to bitwise xor into @p dest in-place
1012+ * @return The value previously stored @p dest
1013+ */
1014+ static inline unsigned semi_atomic_fetch_xor_unsigned (volatile unsigned * dest ,
1015+ unsigned summand )
1016+ {
1017+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
1018+ return semi_atomic_fetch_xor_u64 ((volatile void * )dest , summand );
1019+ }
1020+
1021+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
1022+ return semi_atomic_fetch_xor_u32 ((volatile void * )dest , summand );
1023+ }
1024+
1025+ return semi_atomic_fetch_xor_u16 ((volatile void * )dest , summand );
1026+ }
8071027/** @} */
8081028
8091029/**
@@ -846,6 +1066,26 @@ static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
8461066 */
8471067static inline uint64_t semi_atomic_fetch_and_u64 (volatile uint64_t * dest ,
8481068 uint64_t val );
1069+ /**
1070+ * @brief Semi-atomic version of `*dest &= val`
1071+ * @param[in,out] dest Replace this value with the result of
1072+ * `*dest & val`
1073+ * @param[in] val Value to bitwise and into @p dest in-place
1074+ * @return The value previously stored @p dest
1075+ */
1076+ static inline unsigned semi_atomic_fetch_and_unsigned (volatile unsigned * dest ,
1077+ unsigned summand )
1078+ {
1079+ if (sizeof (unsigned ) == sizeof (uint64_t )) {
1080+ return semi_atomic_fetch_and_u64 ((volatile void * )dest , summand );
1081+ }
1082+
1083+ if (sizeof (unsigned ) == sizeof (uint32_t )) {
1084+ return semi_atomic_fetch_and_u32 ((volatile void * )dest , summand );
1085+ }
1086+
1087+ return semi_atomic_fetch_and_u16 ((volatile void * )dest , summand );
1088+ }
8491089/** @} */
8501090
8511091/* Fallback implementations of atomic utility functions: */
@@ -1391,5 +1631,5 @@ static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
13911631}
13921632#endif
13931633
1394- #endif /* ATOMIC_UTILS_H */
1634+ /* NOLINTEND(bugprone-macro-parentheses) */
13951635/** @} */
0 commit comments