Add Critical Section HAL API specification

- Define header functions for Critical Section HAL API
  - hal_critical_section_enter()
  - hal_critical_section_exit()

- Add weak default implementation for HAL API. The default implementation
  matches the previous behaviour stored in mbed_critical:
  - The first call to enter a critical section stores the state of interrupts
    before disabling and each successive call re-disables interrupts.
  - The last call (non-nested) will restore the IRQ state that was set on the
    enter to the critical section. Nested calls are ignored.

- Add function 'core_util_in_critical_section' to User facing API to determine
  if the program is currently in a critical section, instead of depending on
  'core_util_interrupts_enabled'.
pull/5346/head
Steven Cartmell 2017-10-19 13:32:18 +01:00
parent af9e07357a
commit 07a394ee8b
4 changed files with 165 additions and 42 deletions

View File

@ -0,0 +1,70 @@
/** \addtogroup hal */
/** @{*/
/* mbed Microcontroller Library
* Copyright (c) 2006-2017 ARM Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MBED_CRITICAL_SECTION_API_H
#define MBED_CRITICAL_SECTION_API_H
#ifdef __cplusplus
extern "C" {
#endif
/**
* \defgroup hal_critical Critical Section HAL functions
* @{
*/
/**
* Mark the start of a critical section
*
* This function is called directly by core_util_critical_section_enter on
* first entrance to a critical section.
*
* The default behavior of this function is to save the current state of
* interrupts before disabling them.
*
* The function is only called once per critical section by
* core_util_critical_section_enter. When implementing this function for a
* platform you must store any state that you intend to alter within this
* function so it can be restored when exiting the critical section.
*
*/
void hal_critical_section_enter(void);
/** Mark the end of a critical section
*
* This function is called directly by core_util_critical_section_exit on the
* final exit from a critical section.
*
* The default behavior of this function is to restore the state of interrupts
* as they were prior to entering this critical section.
*
* This function is only called once per critical section. When implemented
* for a specific platform it must restore any state that was saved upon
* entering the current critical section.
*
*/
void hal_critical_section_exit(void);
/**@}*/
#ifdef __cplusplus
}
#endif
#endif // MBED_CRITICAL_SECTION_API_H
/** @}*/

View File

@ -0,0 +1,65 @@
/* mbed Microcontroller Library
* Copyright (c) 2017 ARM Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cmsis.h"
#include "hal/critical_section_api.h"
#include "platform/mbed_assert.h"
#include "platform/mbed_toolchain.h"
#include <stdbool.h>
static volatile bool critical_interrupts_enabled = false;
static bool are_interrupts_enabled(void)
{
#if defined(__CORTEX_A9)
return ((__get_CPSR() & 0x80) == 0);
#else
return ((__get_PRIMASK() & 0x1) == 0);
#endif
}
MBED_WEAK void hal_critical_section_enter(void)
{
critical_interrupts_enabled = are_interrupts_enabled();
#ifndef FEATURE_UVISOR
// If we are in a nested critical section and interrupts are still enabled
// something has gone wrong.
MBED_ASSERT(!are_interrupts_enabled());
#else
#warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
#endif /* FEATURE_UVISOR */
__disable_irq();
}
MBED_WEAK void hal_critical_section_exit()
{
// FIXME
#ifndef FEATURE_UVISOR
// Interrupts must be disabled on invoking an exit from a critical section
MBED_ASSERT(!are_interrupts_enabled());
#else
#warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
#endif /* FEATURE_UVISOR */
// Restore the IRQs to their state prior to entering the critical section
if (critical_interrupts_enabled == true) {
__enable_irq();
}
}

View File

@ -17,14 +17,14 @@
/* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
#define __STDC_LIMIT_MACROS
#include "platform/mbed_critical.h"
#include "hal/critical_section_api.h"
#include "cmsis.h"
#include "platform/mbed_assert.h"
#include "platform/mbed_critical.h"
#include "platform/mbed_toolchain.h"
static volatile uint32_t interrupt_enable_counter = 0;
static volatile bool critical_interrupts_disabled = false;
static volatile uint32_t critical_section_reentrancy_counter = 0;
bool core_util_are_interrupts_enabled(void)
{
@ -51,53 +51,34 @@ bool core_util_is_isr_active(void)
#endif
}
MBED_WEAK void core_util_critical_section_enter(void)
bool core_util_in_critical_section(void)
{
bool interrupts_disabled = !core_util_are_interrupts_enabled();
__disable_irq();
/* Save the interrupt disabled state as it was prior to any nested critical section lock use */
if (!interrupt_enable_counter) {
critical_interrupts_disabled = interrupts_disabled;
}
/* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts
are enabled, then something has gone badly wrong thus assert an error.
*/
MBED_ASSERT(interrupt_enable_counter < UINT32_MAX);
// FIXME
#ifndef FEATURE_UVISOR
if (interrupt_enable_counter > 0) {
MBED_ASSERT(interrupts_disabled);
}
#else
#warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
#endif /* FEATURE_UVISOR */
interrupt_enable_counter++;
return (critical_section_reentrancy_counter != 0);
}
MBED_WEAK void core_util_critical_section_exit(void)
void core_util_critical_section_enter(void)
{
/* If critical_section_enter has not previously been called, do nothing */
if (interrupt_enable_counter) {
// If the reentrancy counter overflows something has gone badly wrong.
MBED_ASSERT(critical_section_reentrancy_counter < UINT32_MAX);
// FIXME
#ifndef FEATURE_UVISOR
bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */
if (critical_section_reentrancy_counter == 0) {
hal_critical_section_enter();
}
MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */
#else
#warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
#endif /* FEATURE_UVISOR */
critical_section_reentrancy_counter++;
}
interrupt_enable_counter--;
void core_util_critical_section_exit(void)
{
// If critical_section_enter has not previously been called, do nothing
if (critical_section_reentrancy_counter == 0) {
return;
}
/* Only re-enable interrupts if we are exiting the last of the nested critical sections and
interrupts were enabled on entry to the first critical section.
*/
if (!interrupt_enable_counter && !critical_interrupts_disabled) {
__enable_irq();
}
critical_section_reentrancy_counter--;
if (critical_section_reentrancy_counter == 0) {
hal_critical_section_exit();
}
}

View File

@ -82,6 +82,13 @@ void core_util_critical_section_enter(void);
*/
void core_util_critical_section_exit(void);
/**
* Determine if we are currently in a critical section
*
* @return true if in a critical section, false otherwise.
*/
bool core_util_in_critical_section(void);
/**
* Atomic compare and set. It compares the contents of a memory location to a
* given value and, only if they are the same, modifies the contents of that