[352] | 1 | /*
|
---|
| 2 | * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
|
---|
| 3 | * SPDX-License-Identifier: Apache-2.0
|
---|
| 4 | *
|
---|
| 5 | * Licensed under the Apache License, Version 2.0 (the "License"); you may
|
---|
| 6 | * not use this file except in compliance with the License.
|
---|
| 7 | * You may obtain a copy of the License at
|
---|
| 8 | *
|
---|
| 9 | * http://www.apache.org/licenses/LICENSE-2.0
|
---|
| 10 | *
|
---|
| 11 | * Unless required by applicable law or agreed to in writing, software
|
---|
| 12 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
---|
| 13 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
---|
| 14 | * See the License for the specific language governing permissions and
|
---|
| 15 | * limitations under the License.
|
---|
| 16 | */
|
---|
| 17 |
|
---|
| 18 | /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
|
---|
| 19 | #define __STDC_LIMIT_MACROS
|
---|
[374] | 20 | #include "hal/critical_section_api.h"
|
---|
[352] | 21 |
|
---|
| 22 | #include "cmsis.h"
|
---|
| 23 | #include "platform/mbed_assert.h"
|
---|
[374] | 24 | #include "platform/mbed_critical.h"
|
---|
[352] | 25 | #include "platform/mbed_toolchain.h"
|
---|
| 26 |
|
---|
[374] | 27 | // if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
|
---|
| 28 | #ifndef MBED_EXCLUSIVE_ACCESS
|
---|
| 29 | #ifndef __EXCLUSIVE_ACCESS
|
---|
| 30 | #if ((__ARM_ARCH_7M__ == 1U) || \
|
---|
| 31 | (__ARM_ARCH_7EM__ == 1U) || \
|
---|
| 32 | (__ARM_ARCH_8M_BASE__ == 1U) || \
|
---|
| 33 | (__ARM_ARCH_8M_MAIN__ == 1U)) || \
|
---|
| 34 | (__ARM_ARCH_7A__ == 1U)
|
---|
| 35 | #define MBED_EXCLUSIVE_ACCESS 1U
|
---|
| 36 | #elif (__ARM_ARCH_6M__ == 1U)
|
---|
| 37 | #define MBED_EXCLUSIVE_ACCESS 0U
|
---|
| 38 | #else
|
---|
| 39 | #error "Unknown architecture for exclusive access"
|
---|
| 40 | #endif
|
---|
| 41 | #else
|
---|
| 42 | #define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
|
---|
| 43 | #endif
|
---|
| 44 | #endif
|
---|
[352] | 45 |
|
---|
[374] | 46 | static volatile uint32_t critical_section_reentrancy_counter = 0;
|
---|
[352] | 47 |
|
---|
| 48 | bool core_util_are_interrupts_enabled(void)
|
---|
| 49 | {
|
---|
| 50 | #if defined(__CORTEX_A9)
|
---|
| 51 | return ((__get_CPSR() & 0x80) == 0);
|
---|
| 52 | #else
|
---|
| 53 | return ((__get_PRIMASK() & 0x1) == 0);
|
---|
| 54 | #endif
|
---|
| 55 | }
|
---|
| 56 |
|
---|
[374] | 57 | bool core_util_is_isr_active(void)
|
---|
[352] | 58 | {
|
---|
[374] | 59 | #if defined(__CORTEX_A9)
|
---|
| 60 | switch (__get_CPSR() & 0x1FU) {
|
---|
| 61 | case CPSR_M_USR:
|
---|
| 62 | case CPSR_M_SYS:
|
---|
| 63 | return false;
|
---|
| 64 | case CPSR_M_SVC:
|
---|
| 65 | default:
|
---|
| 66 | return true;
|
---|
[352] | 67 | }
|
---|
| 68 | #else
|
---|
[374] | 69 | return (__get_IPSR() != 0U);
|
---|
| 70 | #endif
|
---|
[352] | 71 | }
|
---|
| 72 |
|
---|
[374] | 73 | bool core_util_in_critical_section(void)
|
---|
[352] | 74 | {
|
---|
[374] | 75 | return hal_in_critical_section();
|
---|
| 76 | }
|
---|
[352] | 77 |
|
---|
[374] | 78 | void core_util_critical_section_enter(void)
|
---|
| 79 | {
|
---|
| 80 | // If the reentrancy counter overflows something has gone badly wrong.
|
---|
| 81 | MBED_ASSERT(critical_section_reentrancy_counter < UINT32_MAX);
|
---|
[352] | 82 |
|
---|
[374] | 83 | hal_critical_section_enter();
|
---|
[352] | 84 |
|
---|
[374] | 85 | ++critical_section_reentrancy_counter;
|
---|
| 86 | }
|
---|
[352] | 87 |
|
---|
[374] | 88 | void core_util_critical_section_exit(void)
|
---|
| 89 | {
|
---|
| 90 |
|
---|
| 91 | // If critical_section_enter has not previously been called, do nothing
|
---|
| 92 | if (critical_section_reentrancy_counter == 0) {
|
---|
| 93 | return;
|
---|
[352] | 94 | }
|
---|
[374] | 95 |
|
---|
| 96 | --critical_section_reentrancy_counter;
|
---|
| 97 |
|
---|
| 98 | if (critical_section_reentrancy_counter == 0) {
|
---|
| 99 | hal_critical_section_exit();
|
---|
| 100 | }
|
---|
[352] | 101 | }
|
---|
| 102 |
|
---|
[374] | 103 | #if MBED_EXCLUSIVE_ACCESS
|
---|
[352] | 104 |
|
---|
| 105 | /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
|
---|
[374] | 106 | #if defined (__CC_ARM)
|
---|
[352] | 107 | #pragma diag_suppress 3731
|
---|
| 108 | #endif
|
---|
| 109 |
|
---|
[374] | 110 | bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
|
---|
[352] | 111 | {
|
---|
[374] | 112 | do {
|
---|
| 113 | uint8_t currentValue = __LDREXB(ptr);
|
---|
| 114 | if (currentValue != *expectedCurrentValue) {
|
---|
| 115 | *expectedCurrentValue = currentValue;
|
---|
| 116 | __CLREX();
|
---|
| 117 | return false;
|
---|
| 118 | }
|
---|
| 119 | } while (__STREXB(desiredValue, ptr));
|
---|
| 120 | return true;
|
---|
[352] | 121 | }
|
---|
| 122 |
|
---|
[374] | 123 | bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
|
---|
[352] | 124 | {
|
---|
[374] | 125 | do {
|
---|
| 126 | uint16_t currentValue = __LDREXH(ptr);
|
---|
| 127 | if (currentValue != *expectedCurrentValue) {
|
---|
| 128 | *expectedCurrentValue = currentValue;
|
---|
| 129 | __CLREX();
|
---|
| 130 | return false;
|
---|
| 131 | }
|
---|
| 132 | } while (__STREXH(desiredValue, ptr));
|
---|
| 133 | return true;
|
---|
[352] | 134 | }
|
---|
| 135 |
|
---|
| 136 |
|
---|
[374] | 137 | bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
|
---|
[352] | 138 | {
|
---|
[374] | 139 | do {
|
---|
| 140 | uint32_t currentValue = __LDREXW(ptr);
|
---|
| 141 | if (currentValue != *expectedCurrentValue) {
|
---|
| 142 | *expectedCurrentValue = currentValue;
|
---|
| 143 | __CLREX();
|
---|
| 144 | return false;
|
---|
| 145 | }
|
---|
| 146 | } while (__STREXW(desiredValue, ptr));
|
---|
| 147 | return true;
|
---|
[352] | 148 | }
|
---|
| 149 |
|
---|
[374] | 150 | uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
|
---|
[352] | 151 | {
|
---|
| 152 | uint8_t newValue;
|
---|
| 153 | do {
|
---|
[374] | 154 | newValue = __LDREXB(valuePtr) + delta;
|
---|
| 155 | } while (__STREXB(newValue, valuePtr));
|
---|
[352] | 156 | return newValue;
|
---|
| 157 | }
|
---|
| 158 |
|
---|
[374] | 159 | uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
|
---|
[352] | 160 | {
|
---|
| 161 | uint16_t newValue;
|
---|
| 162 | do {
|
---|
[374] | 163 | newValue = __LDREXH(valuePtr) + delta;
|
---|
| 164 | } while (__STREXH(newValue, valuePtr));
|
---|
[352] | 165 | return newValue;
|
---|
| 166 | }
|
---|
| 167 |
|
---|
[374] | 168 | uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
|
---|
[352] | 169 | {
|
---|
| 170 | uint32_t newValue;
|
---|
| 171 | do {
|
---|
[374] | 172 | newValue = __LDREXW(valuePtr) + delta;
|
---|
| 173 | } while (__STREXW(newValue, valuePtr));
|
---|
[352] | 174 | return newValue;
|
---|
| 175 | }
|
---|
| 176 |
|
---|
| 177 |
|
---|
[374] | 178 | uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
|
---|
[352] | 179 | {
|
---|
| 180 | uint8_t newValue;
|
---|
| 181 | do {
|
---|
[374] | 182 | newValue = __LDREXB(valuePtr) - delta;
|
---|
| 183 | } while (__STREXB(newValue, valuePtr));
|
---|
[352] | 184 | return newValue;
|
---|
| 185 | }
|
---|
| 186 |
|
---|
[374] | 187 | uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
|
---|
[352] | 188 | {
|
---|
| 189 | uint16_t newValue;
|
---|
| 190 | do {
|
---|
[374] | 191 | newValue = __LDREXH(valuePtr) - delta;
|
---|
| 192 | } while (__STREXH(newValue, valuePtr));
|
---|
[352] | 193 | return newValue;
|
---|
| 194 | }
|
---|
| 195 |
|
---|
[374] | 196 | uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
|
---|
[352] | 197 | {
|
---|
| 198 | uint32_t newValue;
|
---|
| 199 | do {
|
---|
[374] | 200 | newValue = __LDREXW(valuePtr) - delta;
|
---|
| 201 | } while (__STREXW(newValue, valuePtr));
|
---|
[352] | 202 | return newValue;
|
---|
| 203 | }
|
---|
| 204 |
|
---|
| 205 | #else
|
---|
| 206 |
|
---|
[374] | 207 | bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
|
---|
[352] | 208 | {
|
---|
| 209 | bool success;
|
---|
| 210 | uint8_t currentValue;
|
---|
| 211 | core_util_critical_section_enter();
|
---|
| 212 | currentValue = *ptr;
|
---|
| 213 | if (currentValue == *expectedCurrentValue) {
|
---|
| 214 | *ptr = desiredValue;
|
---|
| 215 | success = true;
|
---|
| 216 | } else {
|
---|
| 217 | *expectedCurrentValue = currentValue;
|
---|
| 218 | success = false;
|
---|
| 219 | }
|
---|
| 220 | core_util_critical_section_exit();
|
---|
| 221 | return success;
|
---|
| 222 | }
|
---|
| 223 |
|
---|
[374] | 224 | bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
|
---|
[352] | 225 | {
|
---|
| 226 | bool success;
|
---|
| 227 | uint16_t currentValue;
|
---|
| 228 | core_util_critical_section_enter();
|
---|
| 229 | currentValue = *ptr;
|
---|
| 230 | if (currentValue == *expectedCurrentValue) {
|
---|
| 231 | *ptr = desiredValue;
|
---|
| 232 | success = true;
|
---|
| 233 | } else {
|
---|
| 234 | *expectedCurrentValue = currentValue;
|
---|
| 235 | success = false;
|
---|
| 236 | }
|
---|
| 237 | core_util_critical_section_exit();
|
---|
| 238 | return success;
|
---|
| 239 | }
|
---|
| 240 |
|
---|
| 241 |
|
---|
[374] | 242 | bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
|
---|
[352] | 243 | {
|
---|
| 244 | bool success;
|
---|
| 245 | uint32_t currentValue;
|
---|
| 246 | core_util_critical_section_enter();
|
---|
| 247 | currentValue = *ptr;
|
---|
| 248 | if (currentValue == *expectedCurrentValue) {
|
---|
| 249 | *ptr = desiredValue;
|
---|
| 250 | success = true;
|
---|
| 251 | } else {
|
---|
| 252 | *expectedCurrentValue = currentValue;
|
---|
| 253 | success = false;
|
---|
| 254 | }
|
---|
| 255 | core_util_critical_section_exit();
|
---|
| 256 | return success;
|
---|
| 257 | }
|
---|
| 258 |
|
---|
| 259 |
|
---|
[374] | 260 | uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
|
---|
[352] | 261 | {
|
---|
| 262 | uint8_t newValue;
|
---|
| 263 | core_util_critical_section_enter();
|
---|
| 264 | newValue = *valuePtr + delta;
|
---|
| 265 | *valuePtr = newValue;
|
---|
| 266 | core_util_critical_section_exit();
|
---|
| 267 | return newValue;
|
---|
| 268 | }
|
---|
| 269 |
|
---|
[374] | 270 | uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
|
---|
[352] | 271 | {
|
---|
| 272 | uint16_t newValue;
|
---|
| 273 | core_util_critical_section_enter();
|
---|
| 274 | newValue = *valuePtr + delta;
|
---|
| 275 | *valuePtr = newValue;
|
---|
| 276 | core_util_critical_section_exit();
|
---|
| 277 | return newValue;
|
---|
| 278 | }
|
---|
| 279 |
|
---|
[374] | 280 | uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
|
---|
[352] | 281 | {
|
---|
| 282 | uint32_t newValue;
|
---|
| 283 | core_util_critical_section_enter();
|
---|
| 284 | newValue = *valuePtr + delta;
|
---|
| 285 | *valuePtr = newValue;
|
---|
| 286 | core_util_critical_section_exit();
|
---|
| 287 | return newValue;
|
---|
| 288 | }
|
---|
| 289 |
|
---|
| 290 |
|
---|
[374] | 291 | uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
|
---|
[352] | 292 | {
|
---|
| 293 | uint8_t newValue;
|
---|
| 294 | core_util_critical_section_enter();
|
---|
| 295 | newValue = *valuePtr - delta;
|
---|
| 296 | *valuePtr = newValue;
|
---|
| 297 | core_util_critical_section_exit();
|
---|
| 298 | return newValue;
|
---|
| 299 | }
|
---|
| 300 |
|
---|
[374] | 301 | uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
|
---|
[352] | 302 | {
|
---|
| 303 | uint16_t newValue;
|
---|
| 304 | core_util_critical_section_enter();
|
---|
| 305 | newValue = *valuePtr - delta;
|
---|
| 306 | *valuePtr = newValue;
|
---|
| 307 | core_util_critical_section_exit();
|
---|
| 308 | return newValue;
|
---|
| 309 | }
|
---|
| 310 |
|
---|
[374] | 311 | uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
|
---|
[352] | 312 | {
|
---|
| 313 | uint32_t newValue;
|
---|
| 314 | core_util_critical_section_enter();
|
---|
| 315 | newValue = *valuePtr - delta;
|
---|
| 316 | *valuePtr = newValue;
|
---|
| 317 | core_util_critical_section_exit();
|
---|
| 318 | return newValue;
|
---|
| 319 | }
|
---|
| 320 |
|
---|
| 321 | #endif
|
---|
| 322 |
|
---|
| 323 |
|
---|
[374] | 324 | bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
|
---|
| 325 | {
|
---|
[352] | 326 | return core_util_atomic_cas_u32(
|
---|
[374] | 327 | (volatile uint32_t *)ptr,
|
---|
| 328 | (uint32_t *)expectedCurrentValue,
|
---|
| 329 | (uint32_t)desiredValue);
|
---|
[352] | 330 | }
|
---|
| 331 |
|
---|
[374] | 332 | void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
|
---|
| 333 | {
|
---|
| 334 | return (void *)core_util_atomic_incr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
|
---|
[352] | 335 | }
|
---|
| 336 |
|
---|
[374] | 337 | void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
|
---|
| 338 | {
|
---|
| 339 | return (void *)core_util_atomic_decr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
|
---|
[352] | 340 | }
|
---|
| 341 |
|
---|