1 | /*
|
---|
2 | * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
|
---|
3 | * SPDX-License-Identifier: Apache-2.0
|
---|
4 | *
|
---|
5 | * Licensed under the Apache License, Version 2.0 (the "License"); you may
|
---|
6 | * not use this file except in compliance with the License.
|
---|
7 | * You may obtain a copy of the License at
|
---|
8 | *
|
---|
9 | * http://www.apache.org/licenses/LICENSE-2.0
|
---|
10 | *
|
---|
11 | * Unless required by applicable law or agreed to in writing, software
|
---|
12 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
---|
13 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
---|
14 | * See the License for the specific language governing permissions and
|
---|
15 | * limitations under the License.
|
---|
16 | */
|
---|
17 |
|
---|
18 | /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
|
---|
19 | #define __STDC_LIMIT_MACROS
|
---|
20 | #include "platform/mbed_critical.h"
|
---|
21 |
|
---|
22 | #include "cmsis.h"
|
---|
23 | #include "platform/mbed_assert.h"
|
---|
24 | #include "platform/mbed_toolchain.h"
|
---|
25 |
|
---|
26 | #define EXCLUSIVE_ACCESS (!defined (__CORTEX_M0) && !defined (__CORTEX_M0PLUS))
|
---|
27 |
|
---|
28 | static volatile uint32_t interrupt_enable_counter = 0;
|
---|
29 | static volatile bool critical_interrupts_disabled = false;
|
---|
30 |
|
---|
31 | bool core_util_are_interrupts_enabled(void)
|
---|
32 | {
|
---|
33 | #if defined(__CORTEX_A9)
|
---|
34 | return ((__get_CPSR() & 0x80) == 0);
|
---|
35 | #else
|
---|
36 | return ((__get_PRIMASK() & 0x1) == 0);
|
---|
37 | #endif
|
---|
38 | }
|
---|
39 |
|
---|
40 | MBED_WEAK void core_util_critical_section_enter(void)
|
---|
41 | {
|
---|
42 | bool interrupts_disabled = !core_util_are_interrupts_enabled();
|
---|
43 | __disable_irq();
|
---|
44 |
|
---|
45 | /* Save the interrupt disabled state as it was prior to any nested critical section lock use */
|
---|
46 | if (!interrupt_enable_counter) {
|
---|
47 | critical_interrupts_disabled = interrupts_disabled;
|
---|
48 | }
|
---|
49 |
|
---|
50 | /* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts
|
---|
51 | are enabled, then something has gone badly wrong thus assert an error.
|
---|
52 | */
|
---|
53 | MBED_ASSERT(interrupt_enable_counter < UINT32_MAX);
|
---|
54 | // FIXME
|
---|
55 | #ifndef FEATURE_UVISOR
|
---|
56 | if (interrupt_enable_counter > 0) {
|
---|
57 | MBED_ASSERT(interrupts_disabled);
|
---|
58 | }
|
---|
59 | #else
|
---|
60 | #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
|
---|
61 | #endif /* FEATURE_UVISOR */
|
---|
62 | interrupt_enable_counter++;
|
---|
63 | }
|
---|
64 |
|
---|
65 | MBED_WEAK void core_util_critical_section_exit(void)
|
---|
66 | {
|
---|
67 | /* If critical_section_enter has not previously been called, do nothing */
|
---|
68 | if (interrupt_enable_counter) {
|
---|
69 |
|
---|
70 | // FIXME
|
---|
71 | #ifndef FEATURE_UVISOR
|
---|
72 | bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */
|
---|
73 |
|
---|
74 | MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */
|
---|
75 | #else
|
---|
76 | #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
|
---|
77 | #endif /* FEATURE_UVISOR */
|
---|
78 |
|
---|
79 | interrupt_enable_counter--;
|
---|
80 |
|
---|
81 | /* Only re-enable interrupts if we are exiting the last of the nested critical sections and
|
---|
82 | interrupts were enabled on entry to the first critical section.
|
---|
83 | */
|
---|
84 | if (!interrupt_enable_counter && !critical_interrupts_disabled) {
|
---|
85 | __enable_irq();
|
---|
86 | }
|
---|
87 | }
|
---|
88 | }
|
---|
89 |
|
---|
90 | #if EXCLUSIVE_ACCESS
|
---|
91 |
|
---|
92 | /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
|
---|
93 | #if defined (__CC_ARM)
|
---|
94 | #pragma diag_suppress 3731
|
---|
95 | #endif
|
---|
96 |
|
---|
97 | bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
|
---|
98 | {
|
---|
99 | uint8_t currentValue = __LDREXB((volatile uint8_t*)ptr);
|
---|
100 | if (currentValue != *expectedCurrentValue) {
|
---|
101 | *expectedCurrentValue = currentValue;
|
---|
102 | __CLREX();
|
---|
103 | return false;
|
---|
104 | }
|
---|
105 |
|
---|
106 | return !__STREXB(desiredValue, (volatile uint8_t*)ptr);
|
---|
107 | }
|
---|
108 |
|
---|
109 | bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
|
---|
110 | {
|
---|
111 | uint16_t currentValue = __LDREXH((volatile uint16_t*)ptr);
|
---|
112 | if (currentValue != *expectedCurrentValue) {
|
---|
113 | *expectedCurrentValue = currentValue;
|
---|
114 | __CLREX();
|
---|
115 | return false;
|
---|
116 | }
|
---|
117 |
|
---|
118 | return !__STREXH(desiredValue, (volatile uint16_t*)ptr);
|
---|
119 | }
|
---|
120 |
|
---|
121 |
|
---|
122 | bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
|
---|
123 | {
|
---|
124 | uint32_t currentValue = __LDREXW((volatile uint32_t*)ptr);
|
---|
125 | if (currentValue != *expectedCurrentValue) {
|
---|
126 | *expectedCurrentValue = currentValue;
|
---|
127 | __CLREX();
|
---|
128 | return false;
|
---|
129 | }
|
---|
130 |
|
---|
131 | return !__STREXW(desiredValue, (volatile uint32_t*)ptr);
|
---|
132 | }
|
---|
133 |
|
---|
134 | uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
|
---|
135 | {
|
---|
136 | uint8_t newValue;
|
---|
137 | do {
|
---|
138 | newValue = __LDREXB((volatile uint8_t*)valuePtr) + delta;
|
---|
139 | } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
|
---|
140 | return newValue;
|
---|
141 | }
|
---|
142 |
|
---|
143 | uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
|
---|
144 | {
|
---|
145 | uint16_t newValue;
|
---|
146 | do {
|
---|
147 | newValue = __LDREXH((volatile uint16_t*)valuePtr) + delta;
|
---|
148 | } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
|
---|
149 | return newValue;
|
---|
150 | }
|
---|
151 |
|
---|
152 | uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
|
---|
153 | {
|
---|
154 | uint32_t newValue;
|
---|
155 | do {
|
---|
156 | newValue = __LDREXW((volatile uint32_t*)valuePtr) + delta;
|
---|
157 | } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
|
---|
158 | return newValue;
|
---|
159 | }
|
---|
160 |
|
---|
161 |
|
---|
162 | uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
|
---|
163 | {
|
---|
164 | uint8_t newValue;
|
---|
165 | do {
|
---|
166 | newValue = __LDREXB((volatile uint8_t*)valuePtr) - delta;
|
---|
167 | } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
|
---|
168 | return newValue;
|
---|
169 | }
|
---|
170 |
|
---|
171 | uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
|
---|
172 | {
|
---|
173 | uint16_t newValue;
|
---|
174 | do {
|
---|
175 | newValue = __LDREXH((volatile uint16_t*)valuePtr) - delta;
|
---|
176 | } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
|
---|
177 | return newValue;
|
---|
178 | }
|
---|
179 |
|
---|
180 | uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
|
---|
181 | {
|
---|
182 | uint32_t newValue;
|
---|
183 | do {
|
---|
184 | newValue = __LDREXW((volatile uint32_t*)valuePtr) - delta;
|
---|
185 | } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
|
---|
186 | return newValue;
|
---|
187 | }
|
---|
188 |
|
---|
189 | #else
|
---|
190 |
|
---|
191 | bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
|
---|
192 | {
|
---|
193 | bool success;
|
---|
194 | uint8_t currentValue;
|
---|
195 | core_util_critical_section_enter();
|
---|
196 | currentValue = *ptr;
|
---|
197 | if (currentValue == *expectedCurrentValue) {
|
---|
198 | *ptr = desiredValue;
|
---|
199 | success = true;
|
---|
200 | } else {
|
---|
201 | *expectedCurrentValue = currentValue;
|
---|
202 | success = false;
|
---|
203 | }
|
---|
204 | core_util_critical_section_exit();
|
---|
205 | return success;
|
---|
206 | }
|
---|
207 |
|
---|
208 | bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
|
---|
209 | {
|
---|
210 | bool success;
|
---|
211 | uint16_t currentValue;
|
---|
212 | core_util_critical_section_enter();
|
---|
213 | currentValue = *ptr;
|
---|
214 | if (currentValue == *expectedCurrentValue) {
|
---|
215 | *ptr = desiredValue;
|
---|
216 | success = true;
|
---|
217 | } else {
|
---|
218 | *expectedCurrentValue = currentValue;
|
---|
219 | success = false;
|
---|
220 | }
|
---|
221 | core_util_critical_section_exit();
|
---|
222 | return success;
|
---|
223 | }
|
---|
224 |
|
---|
225 |
|
---|
226 | bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
|
---|
227 | {
|
---|
228 | bool success;
|
---|
229 | uint32_t currentValue;
|
---|
230 | core_util_critical_section_enter();
|
---|
231 | currentValue = *ptr;
|
---|
232 | if (currentValue == *expectedCurrentValue) {
|
---|
233 | *ptr = desiredValue;
|
---|
234 | success = true;
|
---|
235 | } else {
|
---|
236 | *expectedCurrentValue = currentValue;
|
---|
237 | success = false;
|
---|
238 | }
|
---|
239 | core_util_critical_section_exit();
|
---|
240 | return success;
|
---|
241 | }
|
---|
242 |
|
---|
243 |
|
---|
244 | uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
|
---|
245 | {
|
---|
246 | uint8_t newValue;
|
---|
247 | core_util_critical_section_enter();
|
---|
248 | newValue = *valuePtr + delta;
|
---|
249 | *valuePtr = newValue;
|
---|
250 | core_util_critical_section_exit();
|
---|
251 | return newValue;
|
---|
252 | }
|
---|
253 |
|
---|
254 | uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
|
---|
255 | {
|
---|
256 | uint16_t newValue;
|
---|
257 | core_util_critical_section_enter();
|
---|
258 | newValue = *valuePtr + delta;
|
---|
259 | *valuePtr = newValue;
|
---|
260 | core_util_critical_section_exit();
|
---|
261 | return newValue;
|
---|
262 | }
|
---|
263 |
|
---|
264 | uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
|
---|
265 | {
|
---|
266 | uint32_t newValue;
|
---|
267 | core_util_critical_section_enter();
|
---|
268 | newValue = *valuePtr + delta;
|
---|
269 | *valuePtr = newValue;
|
---|
270 | core_util_critical_section_exit();
|
---|
271 | return newValue;
|
---|
272 | }
|
---|
273 |
|
---|
274 |
|
---|
275 | uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
|
---|
276 | {
|
---|
277 | uint8_t newValue;
|
---|
278 | core_util_critical_section_enter();
|
---|
279 | newValue = *valuePtr - delta;
|
---|
280 | *valuePtr = newValue;
|
---|
281 | core_util_critical_section_exit();
|
---|
282 | return newValue;
|
---|
283 | }
|
---|
284 |
|
---|
285 | uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
|
---|
286 | {
|
---|
287 | uint16_t newValue;
|
---|
288 | core_util_critical_section_enter();
|
---|
289 | newValue = *valuePtr - delta;
|
---|
290 | *valuePtr = newValue;
|
---|
291 | core_util_critical_section_exit();
|
---|
292 | return newValue;
|
---|
293 | }
|
---|
294 |
|
---|
295 | uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
|
---|
296 | {
|
---|
297 | uint32_t newValue;
|
---|
298 | core_util_critical_section_enter();
|
---|
299 | newValue = *valuePtr - delta;
|
---|
300 | *valuePtr = newValue;
|
---|
301 | core_util_critical_section_exit();
|
---|
302 | return newValue;
|
---|
303 | }
|
---|
304 |
|
---|
305 | #endif
|
---|
306 |
|
---|
307 |
|
---|
308 | bool core_util_atomic_cas_ptr(void **ptr, void **expectedCurrentValue, void *desiredValue) {
|
---|
309 | return core_util_atomic_cas_u32(
|
---|
310 | (uint32_t *)ptr,
|
---|
311 | (uint32_t *)expectedCurrentValue,
|
---|
312 | (uint32_t)desiredValue);
|
---|
313 | }
|
---|
314 |
|
---|
315 | void *core_util_atomic_incr_ptr(void **valuePtr, ptrdiff_t delta) {
|
---|
316 | return (void *)core_util_atomic_incr_u32((uint32_t *)valuePtr, (uint32_t)delta);
|
---|
317 | }
|
---|
318 |
|
---|
319 | void *core_util_atomic_decr_ptr(void **valuePtr, ptrdiff_t delta) {
|
---|
320 | return (void *)core_util_atomic_decr_u32((uint32_t *)valuePtr, (uint32_t)delta);
|
---|
321 | }
|
---|
322 |
|
---|