mirror of https://github.com/ARMmbed/mbed-os.git
				
				
				
			Merge pull request #9600 from kjbracey-arm/atomic_exchange_64
Atomic extensions: 64-bit, bool, exchangepull/9645/head
						commit
						709e6ff795
					
				| 
						 | 
				
			
			@ -71,6 +71,22 @@ bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentV
 | 
			
		|||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *ptr, uint8_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *ptr, uint16_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *ptr, uint32_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return 0;
 | 
			
		||||
| 
						 | 
				
			
			@ -103,11 +119,46 @@ uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
 | 
			
		|||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr)
 | 
			
		||||
{
 | 
			
		||||
    return 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return false;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return false;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void *core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return NULL;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return NULL;
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -100,6 +100,9 @@ void core_util_critical_section_exit(void)
 | 
			
		|||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/* Inline bool implementations in the header use uint8_t versions to manipulate the bool */
 | 
			
		||||
MBED_STATIC_ASSERT(sizeof(bool) == sizeof(uint8_t), "Surely bool is a byte");
 | 
			
		||||
 | 
			
		||||
#if MBED_EXCLUSIVE_ACCESS
 | 
			
		||||
 | 
			
		||||
/* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
 | 
			
		||||
| 
						 | 
				
			
			@ -109,8 +112,8 @@ void core_util_critical_section_exit(void)
 | 
			
		|||
 | 
			
		||||
bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
 | 
			
		||||
{
 | 
			
		||||
    uint8_t currentValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    uint8_t currentValue;
 | 
			
		||||
    do {
 | 
			
		||||
        currentValue = __LDREXB(&flagPtr->_flag);
 | 
			
		||||
    } while (__STREXB(true, &flagPtr->_flag));
 | 
			
		||||
| 
						 | 
				
			
			@ -164,6 +167,39 @@ bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentV
 | 
			
		|||
    return true;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    uint8_t currentValue;
 | 
			
		||||
    do {
 | 
			
		||||
        currentValue = __LDREXB(valuePtr);
 | 
			
		||||
    } while (__STREXB(desiredValue, valuePtr));
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    return currentValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    uint16_t currentValue;
 | 
			
		||||
    do {
 | 
			
		||||
        currentValue = __LDREXH(valuePtr);
 | 
			
		||||
    } while (__STREXH(desiredValue, valuePtr));
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    return currentValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    uint32_t currentValue;
 | 
			
		||||
    do {
 | 
			
		||||
        currentValue = __LDREXW(valuePtr);
 | 
			
		||||
    } while (__STREXW(desiredValue, valuePtr));
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    return currentValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
 | 
			
		||||
{
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
| 
						 | 
				
			
			@ -188,8 +224,8 @@ uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
 | 
			
		|||
 | 
			
		||||
uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
 | 
			
		||||
{
 | 
			
		||||
    uint32_t newValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    uint32_t newValue;
 | 
			
		||||
    do {
 | 
			
		||||
        newValue = __LDREXW(valuePtr) + delta;
 | 
			
		||||
    } while (__STREXW(newValue, valuePtr));
 | 
			
		||||
| 
						 | 
				
			
			@ -200,8 +236,8 @@ uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
 | 
			
		|||
 | 
			
		||||
uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
 | 
			
		||||
{
 | 
			
		||||
    uint8_t newValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    uint8_t newValue;
 | 
			
		||||
    do {
 | 
			
		||||
        newValue = __LDREXB(valuePtr) - delta;
 | 
			
		||||
    } while (__STREXB(newValue, valuePtr));
 | 
			
		||||
| 
						 | 
				
			
			@ -211,8 +247,8 @@ uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
 | 
			
		|||
 | 
			
		||||
uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
 | 
			
		||||
{
 | 
			
		||||
    uint16_t newValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    uint16_t newValue;
 | 
			
		||||
    do {
 | 
			
		||||
        newValue = __LDREXH(valuePtr) - delta;
 | 
			
		||||
    } while (__STREXH(newValue, valuePtr));
 | 
			
		||||
| 
						 | 
				
			
			@ -222,8 +258,8 @@ uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
 | 
			
		|||
 | 
			
		||||
uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
 | 
			
		||||
{
 | 
			
		||||
    uint32_t newValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    uint32_t newValue;
 | 
			
		||||
    do {
 | 
			
		||||
        newValue = __LDREXW(valuePtr) - delta;
 | 
			
		||||
    } while (__STREXW(newValue, valuePtr));
 | 
			
		||||
| 
						 | 
				
			
			@ -295,6 +331,34 @@ bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentV
 | 
			
		|||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *ptr, uint8_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    uint8_t currentValue = *ptr;
 | 
			
		||||
    *ptr = desiredValue;
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
    return currentValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *ptr, uint16_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    uint16_t currentValue = *ptr;
 | 
			
		||||
    *ptr = desiredValue;
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
    return currentValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *ptr, uint32_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    uint32_t currentValue = *ptr;
 | 
			
		||||
    *ptr = desiredValue;
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
    return currentValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
 | 
			
		||||
{
 | 
			
		||||
    uint8_t newValue;
 | 
			
		||||
| 
						 | 
				
			
			@ -358,6 +422,69 @@ uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
 | 
			
		|||
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
/* No architecture we support has LDREXD/STREXD, so must always disable IRQs for 64-bit operations */
 | 
			
		||||
uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr)
 | 
			
		||||
{
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    uint64_t currentValue = *valuePtr;
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
    return currentValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    *valuePtr = desiredValue;
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    uint64_t currentValue = *valuePtr;
 | 
			
		||||
    *valuePtr = desiredValue;
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
    return currentValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    bool success;
 | 
			
		||||
    uint64_t currentValue;
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    currentValue = *ptr;
 | 
			
		||||
    if (currentValue == *expectedCurrentValue) {
 | 
			
		||||
        *ptr = desiredValue;
 | 
			
		||||
        success = true;
 | 
			
		||||
    } else {
 | 
			
		||||
        *expectedCurrentValue = currentValue;
 | 
			
		||||
        success = false;
 | 
			
		||||
    }
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
    return success;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta)
 | 
			
		||||
{
 | 
			
		||||
    uint64_t newValue;
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    newValue = *valuePtr + delta;
 | 
			
		||||
    *valuePtr = newValue;
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
    return newValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta)
 | 
			
		||||
{
 | 
			
		||||
    uint64_t newValue;
 | 
			
		||||
    core_util_critical_section_enter();
 | 
			
		||||
    newValue = *valuePtr - delta;
 | 
			
		||||
    *valuePtr = newValue;
 | 
			
		||||
    core_util_critical_section_exit();
 | 
			
		||||
    return newValue;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
MBED_STATIC_ASSERT(sizeof(void *) == sizeof(uint32_t), "Alas, pointers must be 32-bit");
 | 
			
		||||
 | 
			
		||||
bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
 | 
			
		||||
{
 | 
			
		||||
| 
						 | 
				
			
			@ -367,6 +494,11 @@ bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue,
 | 
			
		|||
               (uint32_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void *core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (void *)core_util_atomic_exchange_u32((volatile uint32_t *)valuePtr, (uint32_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (void *)core_util_atomic_incr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -202,175 +202,45 @@ MBED_FORCEINLINE void core_util_atomic_flag_clear(volatile core_util_atomic_flag
 | 
			
		|||
 */
 | 
			
		||||
bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic compare and set. It compares the contents of a memory location to a
 | 
			
		||||
 * given value and, only if they are the same, modifies the contents of that
 | 
			
		||||
 * memory location to a given new value. This is done as a single atomic
 | 
			
		||||
 * operation. The atomicity guarantees that the new value is calculated based on
 | 
			
		||||
 * up-to-date information; if the value had been updated by another thread in
 | 
			
		||||
 * the meantime, the write would fail due to a mismatched expectedCurrentValue.
 | 
			
		||||
 *
 | 
			
		||||
 * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
 | 
			
		||||
 * you to the article on compare-and swap].
 | 
			
		||||
 *
 | 
			
		||||
 * @param  ptr                  The target memory location.
 | 
			
		||||
 * @param[in,out] expectedCurrentValue A pointer to some location holding the
 | 
			
		||||
 *                              expected current value of the data being set atomically.
 | 
			
		||||
 *                              The computed 'desiredValue' should be a function of this current value.
 | 
			
		||||
 *                              @note: This is an in-out parameter. In the
 | 
			
		||||
 *                              failure case of atomic_cas (where the
 | 
			
		||||
 *                              destination isn't set), the pointee of expectedCurrentValue is
 | 
			
		||||
 *                              updated with the current value.
 | 
			
		||||
 * @param[in] desiredValue      The new value computed based on '*expectedCurrentValue'.
 | 
			
		||||
 *
 | 
			
		||||
 * @return                      true if the memory location was atomically
 | 
			
		||||
 *                              updated with the desired value (after verifying
 | 
			
		||||
 *                              that it contained the expectedCurrentValue),
 | 
			
		||||
 *                              false otherwise. In the failure case,
 | 
			
		||||
 *                              exepctedCurrentValue is updated with the new
 | 
			
		||||
 *                              value of the target memory location.
 | 
			
		||||
 *
 | 
			
		||||
 * pseudocode:
 | 
			
		||||
 * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
 | 
			
		||||
 *     if *p != *old {
 | 
			
		||||
 *         *old = *p
 | 
			
		||||
 *         return false
 | 
			
		||||
 *     }
 | 
			
		||||
 *     *p = new
 | 
			
		||||
 *     return true
 | 
			
		||||
 * }
 | 
			
		||||
 *
 | 
			
		||||
 * @note: In the failure case (where the destination isn't set), the value
 | 
			
		||||
 * pointed to by expectedCurrentValue is instead updated with the current value.
 | 
			
		||||
 * This property helps writing concise code for the following incr:
 | 
			
		||||
 *
 | 
			
		||||
 * function incr(p : pointer to int, a : int) returns int {
 | 
			
		||||
 *     done = false
 | 
			
		||||
 *     value = *p // This fetch operation need not be atomic.
 | 
			
		||||
 *     while not done {
 | 
			
		||||
 *         done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success
 | 
			
		||||
 *     }
 | 
			
		||||
 *     return value + a
 | 
			
		||||
 * }
 | 
			
		||||
 *
 | 
			
		||||
 * @note: This corresponds to the C11 "atomic_compare_exchange_strong" - it
 | 
			
		||||
 * always succeeds if the current value is expected, as per the pseudocode
 | 
			
		||||
 * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
 | 
			
		||||
 */
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic compare and set. It compares the contents of a memory location to a
 | 
			
		||||
 * given value and, only if they are the same, modifies the contents of that
 | 
			
		||||
 * memory location to a given new value. This is done as a single atomic
 | 
			
		||||
 * operation. The atomicity guarantees that the new value is calculated based on
 | 
			
		||||
 * up-to-date information; if the value had been updated by another thread in
 | 
			
		||||
 * the meantime, the write would fail due to a mismatched expectedCurrentValue.
 | 
			
		||||
 *
 | 
			
		||||
 * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
 | 
			
		||||
 * you to the article on compare-and swap].
 | 
			
		||||
 *
 | 
			
		||||
 * @param  ptr                  The target memory location.
 | 
			
		||||
 * @param[in,out] expectedCurrentValue A pointer to some location holding the
 | 
			
		||||
 *                              expected current value of the data being set atomically.
 | 
			
		||||
 *                              The computed 'desiredValue' should be a function of this current value.
 | 
			
		||||
 *                              @note: This is an in-out parameter. In the
 | 
			
		||||
 *                              failure case of atomic_cas (where the
 | 
			
		||||
 *                              destination isn't set), the pointee of expectedCurrentValue is
 | 
			
		||||
 *                              updated with the current value.
 | 
			
		||||
 * @param[in] desiredValue      The new value computed based on '*expectedCurrentValue'.
 | 
			
		||||
 *
 | 
			
		||||
 * @return                      true if the memory location was atomically
 | 
			
		||||
 *                              updated with the desired value (after verifying
 | 
			
		||||
 *                              that it contained the expectedCurrentValue),
 | 
			
		||||
 *                              false otherwise. In the failure case,
 | 
			
		||||
 *                              exepctedCurrentValue is updated with the new
 | 
			
		||||
 *                              value of the target memory location.
 | 
			
		||||
 *
 | 
			
		||||
 * pseudocode:
 | 
			
		||||
 * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
 | 
			
		||||
 *     if *p != *old {
 | 
			
		||||
 *         *old = *p
 | 
			
		||||
 *         return false
 | 
			
		||||
 *     }
 | 
			
		||||
 *     *p = new
 | 
			
		||||
 *     return true
 | 
			
		||||
 * }
 | 
			
		||||
 *
 | 
			
		||||
 * @note: In the failure case (where the destination isn't set), the value
 | 
			
		||||
 * pointed to by expectedCurrentValue is instead updated with the current value.
 | 
			
		||||
 * This property helps writing concise code for the following incr:
 | 
			
		||||
 *
 | 
			
		||||
 * function incr(p : pointer to int, a : int) returns int {
 | 
			
		||||
 *     done = false
 | 
			
		||||
 *     value = *p // This fetch operation need not be atomic.
 | 
			
		||||
 *     while not done {
 | 
			
		||||
 *         done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success
 | 
			
		||||
 *     }
 | 
			
		||||
 *     return value + a
 | 
			
		||||
 *
 | 
			
		||||
 * @note: This corresponds to the C11 "atomic_compare_exchange_strong" - it
 | 
			
		||||
 * always succeeds if the current value is expected, as per the pseudocode
 | 
			
		||||
 * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
 | 
			
		||||
 * }
 | 
			
		||||
 */
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic compare and set. It compares the contents of a memory location to a
 | 
			
		||||
 * given value and, only if they are the same, modifies the contents of that
 | 
			
		||||
 * memory location to a given new value. This is done as a single atomic
 | 
			
		||||
 * operation. The atomicity guarantees that the new value is calculated based on
 | 
			
		||||
 * up-to-date information; if the value had been updated by another thread in
 | 
			
		||||
 * the meantime, the write would fail due to a mismatched expectedCurrentValue.
 | 
			
		||||
 *
 | 
			
		||||
 * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
 | 
			
		||||
 * you to the article on compare-and swap].
 | 
			
		||||
 *
 | 
			
		||||
 * @param  ptr                  The target memory location.
 | 
			
		||||
 * @param[in,out] expectedCurrentValue A pointer to some location holding the
 | 
			
		||||
 *                              expected current value of the data being set atomically.
 | 
			
		||||
 *                              The computed 'desiredValue' should be a function of this current value.
 | 
			
		||||
 *                              @note: This is an in-out parameter. In the
 | 
			
		||||
 *                              failure case of atomic_cas (where the
 | 
			
		||||
 *                              destination isn't set), the pointee of expectedCurrentValue is
 | 
			
		||||
 *                              updated with the current value.
 | 
			
		||||
 * @param[in] desiredValue      The new value computed based on '*expectedCurrentValue'.
 | 
			
		||||
 *
 | 
			
		||||
 * @return                      true if the memory location was atomically
 | 
			
		||||
 *                              updated with the desired value (after verifying
 | 
			
		||||
 *                              that it contained the expectedCurrentValue),
 | 
			
		||||
 *                              false otherwise. In the failure case,
 | 
			
		||||
 *                              exepctedCurrentValue is updated with the new
 | 
			
		||||
 *                              value of the target memory location.
 | 
			
		||||
 *
 | 
			
		||||
 * pseudocode:
 | 
			
		||||
 * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
 | 
			
		||||
 *     if *p != *old {
 | 
			
		||||
 *         *old = *p
 | 
			
		||||
 *         return false
 | 
			
		||||
 *     }
 | 
			
		||||
 *     *p = new
 | 
			
		||||
 *     return true
 | 
			
		||||
 * }
 | 
			
		||||
 *
 | 
			
		||||
 * @note: In the failure case (where the destination isn't set), the value
 | 
			
		||||
 * pointed to by expectedCurrentValue is instead updated with the current value.
 | 
			
		||||
 * This property helps writing concise code for the following incr:
 | 
			
		||||
 *
 | 
			
		||||
 * function incr(p : pointer to int, a : int) returns int {
 | 
			
		||||
 *     done = false
 | 
			
		||||
 *     value = *p // This fetch operation need not be atomic.
 | 
			
		||||
 *     while not done {
 | 
			
		||||
 *         done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success
 | 
			
		||||
 *     }
 | 
			
		||||
 *     return value + a
 | 
			
		||||
 * }
 | 
			
		||||
 *
 | 
			
		||||
 * @note: This corresponds to the C11 "atomic_compare_exchange_strong" - it
 | 
			
		||||
 * always succeeds if the current value is expected, as per the pseudocode
 | 
			
		||||
 * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
 | 
			
		||||
 */
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
MBED_FORCEINLINE int8_t core_util_atomic_cas_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (int8_t)core_util_atomic_cas_u8((volatile uint8_t *)ptr, (uint8_t *)expectedCurrentValue, (uint8_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
MBED_FORCEINLINE int16_t core_util_atomic_cas_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (int16_t)core_util_atomic_cas_u16((volatile uint16_t *)ptr, (uint16_t *)expectedCurrentValue, (uint16_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
MBED_FORCEINLINE int32_t core_util_atomic_cas_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (int32_t)core_util_atomic_cas_u32((volatile uint32_t *)ptr, (uint32_t *)expectedCurrentValue, (uint32_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
MBED_FORCEINLINE int64_t core_util_atomic_cas_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (int64_t)core_util_atomic_cas_u64((volatile uint64_t *)ptr, (uint64_t *)expectedCurrentValue, (uint64_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
MBED_FORCEINLINE bool core_util_atomic_cas_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (bool)core_util_atomic_cas_u8((volatile uint8_t *)ptr, (uint8_t *)expectedCurrentValue, desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/** \copydoc core_util_atomic_cas_u8 */
 | 
			
		||||
bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -409,6 +279,71 @@ MBED_FORCEINLINE uint32_t core_util_atomic_load_u32(const volatile uint32_t *val
 | 
			
		|||
    return value;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic load.
 | 
			
		||||
 * @param  valuePtr Target memory location.
 | 
			
		||||
 * @return          The loaded value.
 | 
			
		||||
 */
 | 
			
		||||
uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic load.
 | 
			
		||||
 * @param  valuePtr Target memory location.
 | 
			
		||||
 * @return          The loaded value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int8_t core_util_atomic_load_s8(const volatile int8_t *valuePtr)
 | 
			
		||||
{
 | 
			
		||||
    int8_t value = *valuePtr;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    return value;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic load.
 | 
			
		||||
 * @param  valuePtr Target memory location.
 | 
			
		||||
 * @return          The loaded value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int16_t core_util_atomic_load_s16(const volatile int16_t *valuePtr)
 | 
			
		||||
{
 | 
			
		||||
    int16_t value = *valuePtr;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    return value;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic load.
 | 
			
		||||
 * @param  valuePtr Target memory location.
 | 
			
		||||
 * @return          The loaded value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int32_t core_util_atomic_load_s32(const volatile int32_t *valuePtr)
 | 
			
		||||
{
 | 
			
		||||
    int32_t value = *valuePtr;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    return value;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic load.
 | 
			
		||||
 * @param  valuePtr Target memory location.
 | 
			
		||||
 * @return          The loaded value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int64_t core_util_atomic_load_s64(const volatile int64_t *valuePtr)
 | 
			
		||||
{
 | 
			
		||||
    return (int64_t)core_util_atomic_load_u64((const volatile uint64_t *)valuePtr);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic load.
 | 
			
		||||
 * @param  valuePtr Target memory location.
 | 
			
		||||
 * @return          The loaded value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE bool core_util_atomic_load_bool(const volatile bool *valuePtr)
 | 
			
		||||
{
 | 
			
		||||
    bool value = *valuePtr;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    return value;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic load.
 | 
			
		||||
 * @param  valuePtr Target memory location.
 | 
			
		||||
| 
						 | 
				
			
			@ -457,6 +392,71 @@ MBED_FORCEINLINE void core_util_atomic_store_u32(volatile uint32_t *valuePtr, ui
 | 
			
		|||
    MBED_BARRIER();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic store.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 */
 | 
			
		||||
void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic store.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE void core_util_atomic_store_s8(volatile int8_t *valuePtr, int8_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    *valuePtr = desiredValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic store.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE void core_util_atomic_store_s16(volatile int16_t *valuePtr, int16_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    *valuePtr = desiredValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic store.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE void core_util_atomic_store_s32(volatile int32_t *valuePtr, int32_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    *valuePtr = desiredValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic store.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE void core_util_atomic_store_s64(volatile int64_t *valuePtr, int64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    core_util_atomic_store_u64((volatile uint64_t *)valuePtr, (uint64_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic store.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE void core_util_atomic_store_bool(volatile bool *valuePtr, bool desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
    *valuePtr = desiredValue;
 | 
			
		||||
    MBED_BARRIER();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic store.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
| 
						 | 
				
			
			@ -469,6 +469,101 @@ MBED_FORCEINLINE void core_util_atomic_store_ptr(void *volatile *valuePtr, void
 | 
			
		|||
    MBED_BARRIER();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8(volatile int8_t *valuePtr, int8_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (int8_t)core_util_atomic_exchange_u8((volatile uint8_t *)valuePtr, (uint8_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16(volatile int16_t *valuePtr, int16_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (int16_t)core_util_atomic_exchange_u16((volatile uint16_t *)valuePtr, (uint16_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32(volatile int32_t *valuePtr, int32_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (int32_t)core_util_atomic_exchange_u32((volatile uint32_t *)valuePtr, (uint32_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64(volatile int64_t *valuePtr, int64_t desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (int64_t)core_util_atomic_exchange_u64((volatile uint64_t *)valuePtr, (uint64_t)desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE bool core_util_atomic_exchange_bool(volatile bool *valuePtr, bool desiredValue)
 | 
			
		||||
{
 | 
			
		||||
    return (bool)core_util_atomic_exchange_u8((volatile uint8_t *)valuePtr, desiredValue);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic exchange.
 | 
			
		||||
 * @param  valuePtr     Target memory location.
 | 
			
		||||
 * @param  desiredValue The value to store.
 | 
			
		||||
 * @return              The previous value.
 | 
			
		||||
 */
 | 
			
		||||
void *core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic increment.
 | 
			
		||||
 * @param  valuePtr Target memory location being incremented.
 | 
			
		||||
| 
						 | 
				
			
			@ -493,6 +588,58 @@ uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta);
 | 
			
		|||
 */
 | 
			
		||||
uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic increment.
 | 
			
		||||
 * @param  valuePtr Target memory location being incremented.
 | 
			
		||||
 * @param  delta    The amount being incremented.
 | 
			
		||||
 * @return          The new incremented value.
 | 
			
		||||
 */
 | 
			
		||||
uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic increment.
 | 
			
		||||
 * @param  valuePtr Target memory location being incremented.
 | 
			
		||||
 * @param  delta    The amount being incremented.
 | 
			
		||||
 * @return          The new incremented value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int8_t core_util_atomic_incr_s8(volatile int8_t *valuePtr, int8_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (int8_t)core_util_atomic_incr_u8((volatile uint8_t *)valuePtr, (uint8_t)delta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic increment.
 | 
			
		||||
 * @param  valuePtr Target memory location being incremented.
 | 
			
		||||
 * @param  delta    The amount being incremented.
 | 
			
		||||
 * @return          The new incremented value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int16_t core_util_atomic_incr_s16(volatile int16_t *valuePtr, int16_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (int16_t)core_util_atomic_incr_u16((volatile uint16_t *)valuePtr, (uint16_t)delta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic increment.
 | 
			
		||||
 * @param  valuePtr Target memory location being incremented.
 | 
			
		||||
 * @param  delta    The amount being incremented.
 | 
			
		||||
 * @return          The new incremented value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int32_t core_util_atomic_incr_s32(volatile int32_t *valuePtr, int32_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (int32_t)core_util_atomic_incr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic increment.
 | 
			
		||||
 * @param  valuePtr Target memory location being incremented.
 | 
			
		||||
 * @param  delta    The amount being incremented.
 | 
			
		||||
 * @return          The new incremented value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int64_t core_util_atomic_incr_s64(volatile int64_t *valuePtr, int64_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (int64_t)core_util_atomic_incr_u64((volatile uint64_t *)valuePtr, (uint64_t)delta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic increment.
 | 
			
		||||
 * @param  valuePtr Target memory location being incremented.
 | 
			
		||||
| 
						 | 
				
			
			@ -528,6 +675,58 @@ uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta);
 | 
			
		|||
 */
 | 
			
		||||
uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic decrement.
 | 
			
		||||
 * @param  valuePtr Target memory location being decremented.
 | 
			
		||||
 * @param  delta    The amount being decremented.
 | 
			
		||||
 * @return          The new decremented value.
 | 
			
		||||
 */
 | 
			
		||||
uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta);
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic decrement.
 | 
			
		||||
 * @param  valuePtr Target memory location being decremented.
 | 
			
		||||
 * @param  delta    The amount being decremented.
 | 
			
		||||
 * @return          The new decremented value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int8_t core_util_atomic_decr_s8(volatile int8_t *valuePtr, int8_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (int8_t)core_util_atomic_decr_u8((volatile uint8_t *)valuePtr, (uint8_t)delta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic decrement.
 | 
			
		||||
 * @param  valuePtr Target memory location being decremented.
 | 
			
		||||
 * @param  delta    The amount being decremented.
 | 
			
		||||
 * @return          The new decremented value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int16_t core_util_atomic_decr_s16(volatile int16_t *valuePtr, int16_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (int16_t)core_util_atomic_decr_u16((volatile uint16_t *)valuePtr, (uint16_t)delta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic decrement.
 | 
			
		||||
 * @param  valuePtr Target memory location being decremented.
 | 
			
		||||
 * @param  delta    The amount being decremented.
 | 
			
		||||
 * @return          The new decremented value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int32_t core_util_atomic_decr_s32(volatile int32_t *valuePtr, int32_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (int32_t)core_util_atomic_decr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic decrement.
 | 
			
		||||
 * @param  valuePtr Target memory location being decremented.
 | 
			
		||||
 * @param  delta    The amount being decremented.
 | 
			
		||||
 * @return          The new decremented value.
 | 
			
		||||
 */
 | 
			
		||||
MBED_FORCEINLINE int64_t core_util_atomic_decr_s64(volatile int64_t *valuePtr, int64_t delta)
 | 
			
		||||
{
 | 
			
		||||
    return (int64_t)core_util_atomic_decr_u64((volatile uint64_t *)valuePtr, (uint64_t)delta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Atomic decrement.
 | 
			
		||||
 * @param  valuePtr Target memory location being decremented.
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue