2016-02-24 19:35:16 +00:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
|
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
* not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
*/
|
|
|
|
|
2016-11-21 16:59:45 +00:00
|
|
|
/* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
|
|
|
|
#define __STDC_LIMIT_MACROS
|
2017-10-19 12:32:18 +00:00
|
|
|
#include "hal/critical_section_api.h"
|
2016-07-05 16:44:06 +00:00
|
|
|
|
2016-02-24 19:35:16 +00:00
|
|
|
#include "cmsis.h"
|
2016-10-01 07:11:36 +00:00
|
|
|
#include "platform/mbed_assert.h"
|
2017-10-19 12:32:18 +00:00
|
|
|
#include "platform/mbed_critical.h"
|
2017-01-27 11:10:28 +00:00
|
|
|
#include "platform/mbed_toolchain.h"
|
2016-02-24 19:35:16 +00:00
|
|
|
|
2017-11-28 10:32:07 +00:00
|
|
|
// if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
|
|
|
|
#ifndef MBED_EXCLUSIVE_ACCESS
|
2017-11-28 12:02:41 +00:00
|
|
|
#ifndef __EXCLUSIVE_ACCESS
|
2017-11-28 10:32:07 +00:00
|
|
|
#if ((__ARM_ARCH_7M__ == 1U) || \
|
|
|
|
(__ARM_ARCH_7EM__ == 1U) || \
|
|
|
|
(__ARM_ARCH_8M_BASE__ == 1U) || \
|
|
|
|
(__ARM_ARCH_8M_MAIN__ == 1U)) || \
|
|
|
|
(__ARM_ARCH_7A__ == 1U)
|
|
|
|
#define MBED_EXCLUSIVE_ACCESS 1U
|
2017-11-28 12:02:41 +00:00
|
|
|
#elif (__ARM_ARCH_6M__ == 1U)
|
2017-11-28 10:32:07 +00:00
|
|
|
#define MBED_EXCLUSIVE_ACCESS 0U
|
2017-11-28 12:02:41 +00:00
|
|
|
#else
|
|
|
|
#error "Unknown architecture for exclusive access"
|
2017-11-28 10:32:07 +00:00
|
|
|
#endif
|
2018-06-27 14:09:15 +00:00
|
|
|
#else
|
2017-11-28 12:02:41 +00:00
|
|
|
#define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
|
2017-11-28 10:32:07 +00:00
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
|
2019-01-03 08:54:31 +00:00
|
|
|
static uint32_t critical_section_reentrancy_counter = 0;
|
2016-06-06 10:52:14 +00:00
|
|
|
|
2016-07-01 14:08:31 +00:00
|
|
|
bool core_util_are_interrupts_enabled(void)
|
2016-06-06 10:52:14 +00:00
|
|
|
{
|
|
|
|
#if defined(__CORTEX_A9)
|
2016-06-09 15:41:58 +00:00
|
|
|
return ((__get_CPSR() & 0x80) == 0);
|
2016-06-06 10:52:14 +00:00
|
|
|
#else
|
2016-06-09 15:41:58 +00:00
|
|
|
return ((__get_PRIMASK() & 0x1) == 0);
|
2016-06-06 10:52:14 +00:00
|
|
|
#endif
|
|
|
|
}
|
2016-02-24 19:35:16 +00:00
|
|
|
|
2017-05-24 21:29:11 +00:00
|
|
|
bool core_util_is_isr_active(void)
|
|
|
|
{
|
|
|
|
#if defined(__CORTEX_A9)
|
2018-06-27 14:09:15 +00:00
|
|
|
switch (__get_CPSR() & 0x1FU) {
|
2017-07-25 14:45:11 +00:00
|
|
|
case CPSR_M_USR:
|
|
|
|
case CPSR_M_SYS:
|
2017-05-24 21:29:11 +00:00
|
|
|
return false;
|
2017-07-25 14:45:11 +00:00
|
|
|
case CPSR_M_SVC:
|
2017-05-24 21:29:11 +00:00
|
|
|
default:
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
return (__get_IPSR() != 0U);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2017-10-19 12:32:18 +00:00
|
|
|
bool core_util_in_critical_section(void)
|
2016-02-24 19:35:16 +00:00
|
|
|
{
|
2018-01-04 13:59:44 +00:00
|
|
|
return hal_in_critical_section();
|
2017-10-19 12:32:18 +00:00
|
|
|
}
|
2016-02-24 19:35:16 +00:00
|
|
|
|
2017-10-19 12:32:18 +00:00
|
|
|
void core_util_critical_section_enter(void)
|
|
|
|
{
|
2019-01-03 08:54:31 +00:00
|
|
|
hal_critical_section_enter();
|
|
|
|
|
2017-10-19 12:32:18 +00:00
|
|
|
// If the reentrancy counter overflows something has gone badly wrong.
|
|
|
|
MBED_ASSERT(critical_section_reentrancy_counter < UINT32_MAX);
|
2016-02-24 19:35:16 +00:00
|
|
|
|
2017-12-19 13:35:43 +00:00
|
|
|
++critical_section_reentrancy_counter;
|
2016-02-24 19:35:16 +00:00
|
|
|
}
|
|
|
|
|
2017-10-19 12:32:18 +00:00
|
|
|
void core_util_critical_section_exit(void)
|
2016-02-24 19:35:16 +00:00
|
|
|
{
|
2017-10-25 17:04:40 +00:00
|
|
|
|
2017-10-19 12:32:18 +00:00
|
|
|
// If critical_section_enter has not previously been called, do nothing
|
|
|
|
if (critical_section_reentrancy_counter == 0) {
|
|
|
|
return;
|
|
|
|
}
|
2016-02-24 19:35:16 +00:00
|
|
|
|
2017-12-19 13:35:43 +00:00
|
|
|
--critical_section_reentrancy_counter;
|
2016-02-24 19:35:16 +00:00
|
|
|
|
2017-10-19 12:32:18 +00:00
|
|
|
if (critical_section_reentrancy_counter == 0) {
|
|
|
|
hal_critical_section_exit();
|
2016-02-24 19:35:16 +00:00
|
|
|
}
|
|
|
|
}
|
2016-06-01 23:30:09 +00:00
|
|
|
|
2019-02-04 11:12:55 +00:00
|
|
|
/* Inline bool implementations in the header use uint8_t versions to manipulate the bool */
|
|
|
|
MBED_STATIC_ASSERT(sizeof(bool) == sizeof(uint8_t), "Surely bool is a byte");
|
|
|
|
|
2017-11-28 10:32:07 +00:00
|
|
|
#if MBED_EXCLUSIVE_ACCESS
|
2016-06-01 23:30:09 +00:00
|
|
|
|
2016-08-31 18:10:01 +00:00
|
|
|
/* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
|
2018-06-27 14:09:15 +00:00
|
|
|
#if defined (__CC_ARM)
|
2016-08-31 18:10:01 +00:00
|
|
|
#pragma diag_suppress 3731
|
2016-08-31 19:30:45 +00:00
|
|
|
#endif
|
2016-08-31 18:10:01 +00:00
|
|
|
|
2018-10-05 12:55:29 +00:00
|
|
|
bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
|
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2019-02-04 13:08:16 +00:00
|
|
|
uint8_t currentValue;
|
2018-10-05 12:55:29 +00:00
|
|
|
do {
|
|
|
|
currentValue = __LDREXB(&flagPtr->_flag);
|
|
|
|
} while (__STREXB(true, &flagPtr->_flag));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2018-10-05 12:55:29 +00:00
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2017-11-28 09:50:48 +00:00
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
uint8_t currentValue = __LDREXB(ptr);
|
2017-11-28 09:50:48 +00:00
|
|
|
if (currentValue != *expectedCurrentValue) {
|
|
|
|
*expectedCurrentValue = currentValue;
|
|
|
|
__CLREX();
|
|
|
|
return false;
|
|
|
|
}
|
2017-11-28 10:10:53 +00:00
|
|
|
} while (__STREXB(desiredValue, ptr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2017-11-28 09:50:48 +00:00
|
|
|
return true;
|
2016-06-01 23:30:09 +00:00
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2017-11-28 09:50:48 +00:00
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
uint16_t currentValue = __LDREXH(ptr);
|
2017-11-28 09:50:48 +00:00
|
|
|
if (currentValue != *expectedCurrentValue) {
|
|
|
|
*expectedCurrentValue = currentValue;
|
|
|
|
__CLREX();
|
|
|
|
return false;
|
|
|
|
}
|
2017-11-28 10:10:53 +00:00
|
|
|
} while (__STREXH(desiredValue, ptr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2017-11-28 09:50:48 +00:00
|
|
|
return true;
|
2016-06-01 23:30:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2017-11-28 09:50:48 +00:00
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
uint32_t currentValue = __LDREXW(ptr);
|
2017-11-28 09:50:48 +00:00
|
|
|
if (currentValue != *expectedCurrentValue) {
|
|
|
|
*expectedCurrentValue = currentValue;
|
|
|
|
__CLREX();
|
|
|
|
return false;
|
|
|
|
}
|
2017-11-28 10:10:53 +00:00
|
|
|
} while (__STREXW(desiredValue, ptr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2017-11-28 09:50:48 +00:00
|
|
|
return true;
|
2016-06-01 23:30:09 +00:00
|
|
|
}
|
|
|
|
|
2019-02-04 11:14:07 +00:00
|
|
|
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue)
|
|
|
|
{
|
|
|
|
MBED_BARRIER();
|
|
|
|
uint8_t currentValue;
|
|
|
|
do {
|
|
|
|
currentValue = __LDREXB(valuePtr);
|
|
|
|
} while (__STREXB(desiredValue, valuePtr));
|
|
|
|
MBED_BARRIER();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue)
|
|
|
|
{
|
|
|
|
MBED_BARRIER();
|
|
|
|
uint16_t currentValue;
|
|
|
|
do {
|
|
|
|
currentValue = __LDREXH(valuePtr);
|
|
|
|
} while (__STREXH(desiredValue, valuePtr));
|
|
|
|
MBED_BARRIER();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue)
|
|
|
|
{
|
|
|
|
MBED_BARRIER();
|
|
|
|
uint32_t currentValue;
|
|
|
|
do {
|
|
|
|
currentValue = __LDREXW(valuePtr);
|
|
|
|
} while (__STREXW(desiredValue, valuePtr));
|
|
|
|
MBED_BARRIER();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2016-06-01 23:30:09 +00:00
|
|
|
uint8_t newValue;
|
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
newValue = __LDREXB(valuePtr) + delta;
|
|
|
|
} while (__STREXB(newValue, valuePtr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2016-06-01 23:30:09 +00:00
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2016-06-01 23:30:09 +00:00
|
|
|
uint16_t newValue;
|
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
newValue = __LDREXH(valuePtr) + delta;
|
|
|
|
} while (__STREXH(newValue, valuePtr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2016-06-01 23:30:09 +00:00
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2019-02-04 13:08:16 +00:00
|
|
|
uint32_t newValue;
|
2016-06-01 23:30:09 +00:00
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
newValue = __LDREXW(valuePtr) + delta;
|
|
|
|
} while (__STREXW(newValue, valuePtr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2016-06-01 23:30:09 +00:00
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2019-02-04 13:08:16 +00:00
|
|
|
uint8_t newValue;
|
2016-06-01 23:30:09 +00:00
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
newValue = __LDREXB(valuePtr) - delta;
|
|
|
|
} while (__STREXB(newValue, valuePtr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2016-06-01 23:30:09 +00:00
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2019-02-04 13:08:16 +00:00
|
|
|
uint16_t newValue;
|
2016-06-01 23:30:09 +00:00
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
newValue = __LDREXH(valuePtr) - delta;
|
|
|
|
} while (__STREXH(newValue, valuePtr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2016-06-01 23:30:09 +00:00
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2019-02-04 13:08:16 +00:00
|
|
|
uint32_t newValue;
|
2016-06-01 23:30:09 +00:00
|
|
|
do {
|
2017-11-28 10:10:53 +00:00
|
|
|
newValue = __LDREXW(valuePtr) - delta;
|
|
|
|
} while (__STREXW(newValue, valuePtr));
|
2019-01-03 11:39:04 +00:00
|
|
|
MBED_BARRIER();
|
2016-06-01 23:30:09 +00:00
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
#else
|
|
|
|
|
2018-10-05 12:55:29 +00:00
|
|
|
bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
|
|
|
|
{
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
uint8_t currentValue = flagPtr->_flag;
|
|
|
|
flagPtr->_flag = true;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
bool success;
|
|
|
|
uint8_t currentValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
currentValue = *ptr;
|
|
|
|
if (currentValue == *expectedCurrentValue) {
|
|
|
|
*ptr = desiredValue;
|
|
|
|
success = true;
|
|
|
|
} else {
|
|
|
|
*expectedCurrentValue = currentValue;
|
|
|
|
success = false;
|
|
|
|
}
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return success;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
bool success;
|
|
|
|
uint16_t currentValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
currentValue = *ptr;
|
|
|
|
if (currentValue == *expectedCurrentValue) {
|
|
|
|
*ptr = desiredValue;
|
|
|
|
success = true;
|
|
|
|
} else {
|
|
|
|
*expectedCurrentValue = currentValue;
|
|
|
|
success = false;
|
|
|
|
}
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return success;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
bool success;
|
|
|
|
uint32_t currentValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
currentValue = *ptr;
|
|
|
|
if (currentValue == *expectedCurrentValue) {
|
|
|
|
*ptr = desiredValue;
|
|
|
|
success = true;
|
|
|
|
} else {
|
|
|
|
*expectedCurrentValue = currentValue;
|
|
|
|
success = false;
|
|
|
|
}
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return success;
|
|
|
|
}
|
|
|
|
|
2016-06-25 15:58:08 +00:00
|
|
|
|
2019-02-04 11:14:07 +00:00
|
|
|
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *ptr, uint8_t desiredValue)
|
|
|
|
{
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
uint8_t currentValue = *ptr;
|
|
|
|
*ptr = desiredValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *ptr, uint16_t desiredValue)
|
|
|
|
{
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
uint16_t currentValue = *ptr;
|
|
|
|
*ptr = desiredValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *ptr, uint32_t desiredValue)
|
|
|
|
{
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
uint32_t currentValue = *ptr;
|
|
|
|
*ptr = desiredValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
uint8_t newValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
newValue = *valuePtr + delta;
|
|
|
|
*valuePtr = newValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
uint16_t newValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
newValue = *valuePtr + delta;
|
|
|
|
*valuePtr = newValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
uint32_t newValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
newValue = *valuePtr + delta;
|
|
|
|
*valuePtr = newValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
uint8_t newValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
newValue = *valuePtr - delta;
|
|
|
|
*valuePtr = newValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
uint16_t newValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
newValue = *valuePtr - delta;
|
|
|
|
*valuePtr = newValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2017-11-28 10:45:50 +00:00
|
|
|
uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
|
2016-06-01 23:30:09 +00:00
|
|
|
{
|
|
|
|
uint32_t newValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
newValue = *valuePtr - delta;
|
|
|
|
*valuePtr = newValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
2016-07-08 19:50:03 +00:00
|
|
|
#endif
|
|
|
|
|
2019-02-04 11:06:58 +00:00
|
|
|
/* No architecture we support has LDREXD/STREXD, so must always disable IRQs for 64-bit operations */
|
|
|
|
uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr)
|
|
|
|
{
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
uint64_t currentValue = *valuePtr;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
|
|
|
|
{
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
*valuePtr = desiredValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
}
|
|
|
|
|
2019-02-04 11:14:07 +00:00
|
|
|
uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
|
|
|
|
{
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
uint64_t currentValue = *valuePtr;
|
|
|
|
*valuePtr = desiredValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return currentValue;
|
|
|
|
}
|
|
|
|
|
2019-02-04 11:06:58 +00:00
|
|
|
bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
|
|
|
|
{
|
|
|
|
bool success;
|
|
|
|
uint64_t currentValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
currentValue = *ptr;
|
|
|
|
if (currentValue == *expectedCurrentValue) {
|
|
|
|
*ptr = desiredValue;
|
|
|
|
success = true;
|
|
|
|
} else {
|
|
|
|
*expectedCurrentValue = currentValue;
|
|
|
|
success = false;
|
|
|
|
}
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return success;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta)
|
|
|
|
{
|
|
|
|
uint64_t newValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
newValue = *valuePtr + delta;
|
|
|
|
*valuePtr = newValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return newValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta)
|
|
|
|
{
|
|
|
|
uint64_t newValue;
|
|
|
|
core_util_critical_section_enter();
|
|
|
|
newValue = *valuePtr - delta;
|
|
|
|
*valuePtr = newValue;
|
|
|
|
core_util_critical_section_exit();
|
|
|
|
return newValue;
|
|
|
|
}
|
2016-07-08 19:50:03 +00:00
|
|
|
|
2019-02-04 11:14:07 +00:00
|
|
|
MBED_STATIC_ASSERT(sizeof(void *) == sizeof(uint32_t), "Alas, pointers must be 32-bit");
|
|
|
|
|
2018-06-27 14:09:15 +00:00
|
|
|
bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
|
|
|
|
{
|
2016-07-08 19:50:03 +00:00
|
|
|
return core_util_atomic_cas_u32(
|
2018-06-27 14:09:15 +00:00
|
|
|
(volatile uint32_t *)ptr,
|
|
|
|
(uint32_t *)expectedCurrentValue,
|
|
|
|
(uint32_t)desiredValue);
|
2016-06-25 15:58:08 +00:00
|
|
|
}
|
|
|
|
|
2019-02-04 11:14:07 +00:00
|
|
|
void *core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue)
|
|
|
|
{
|
|
|
|
return (void *)core_util_atomic_exchange_u32((volatile uint32_t *)valuePtr, (uint32_t)desiredValue);
|
|
|
|
}
|
|
|
|
|
2018-06-27 14:09:15 +00:00
|
|
|
void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
|
|
|
|
{
|
2017-11-28 10:45:50 +00:00
|
|
|
return (void *)core_util_atomic_incr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
|
2016-07-08 19:50:03 +00:00
|
|
|
}
|
2016-07-08 03:51:31 +00:00
|
|
|
|
2018-06-27 14:09:15 +00:00
|
|
|
void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
|
|
|
|
{
|
2017-11-28 10:45:50 +00:00
|
|
|
return (void *)core_util_atomic_decr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
|
2016-07-08 19:50:03 +00:00
|
|
|
}
|
2016-06-01 23:30:09 +00:00
|
|
|
|