Go to most recent revision | Details | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
2 | mjames | 1 | /**************************************************************************//** |
2 | * @file core_cmInstr.h |
||
3 | * @brief CMSIS Cortex-M Core Instruction Access Header File |
||
4 | * @version V4.10 |
||
5 | * @date 18. March 2015 |
||
6 | * |
||
7 | * @note |
||
8 | * |
||
9 | ******************************************************************************/ |
||
10 | /* Copyright (c) 2009 - 2014 ARM LIMITED |
||
11 | |||
12 | All rights reserved. |
||
13 | Redistribution and use in source and binary forms, with or without |
||
14 | modification, are permitted provided that the following conditions are met: |
||
15 | - Redistributions of source code must retain the above copyright |
||
16 | notice, this list of conditions and the following disclaimer. |
||
17 | - Redistributions in binary form must reproduce the above copyright |
||
18 | notice, this list of conditions and the following disclaimer in the |
||
19 | documentation and/or other materials provided with the distribution. |
||
20 | - Neither the name of ARM nor the names of its contributors may be used |
||
21 | to endorse or promote products derived from this software without |
||
22 | specific prior written permission. |
||
23 | * |
||
24 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" |
||
25 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
||
26 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
||
27 | ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE |
||
28 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
||
29 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
||
30 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
||
31 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
||
32 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
||
33 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
||
34 | POSSIBILITY OF SUCH DAMAGE. |
||
35 | ---------------------------------------------------------------------------*/ |
||
36 | |||
37 | |||
38 | #ifndef __CORE_CMINSTR_H |
||
39 | #define __CORE_CMINSTR_H |
||
40 | |||
41 | |||
42 | /* ########################## Core Instruction Access ######################### */ |
||
43 | /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface |
||
44 | Access to dedicated instructions |
||
45 | @{ |
||
46 | */ |
||
47 | |||
48 | #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/ |
||
49 | /* ARM armcc specific functions */ |
||
50 | |||
51 | #if (__ARMCC_VERSION < 400677) |
||
52 | #error "Please use ARM Compiler Toolchain V4.0.677 or later!" |
||
53 | #endif |
||
54 | |||
55 | |||
56 | /** \brief No Operation |
||
57 | |||
58 | No Operation does nothing. This instruction can be used for code alignment purposes. |
||
59 | */ |
||
60 | #define __NOP __nop |
||
61 | |||
62 | |||
63 | /** \brief Wait For Interrupt |
||
64 | |||
65 | Wait For Interrupt is a hint instruction that suspends execution |
||
66 | until one of a number of events occurs. |
||
67 | */ |
||
68 | #define __WFI __wfi |
||
69 | |||
70 | |||
71 | /** \brief Wait For Event |
||
72 | |||
73 | Wait For Event is a hint instruction that permits the processor to enter |
||
74 | a low-power state until one of a number of events occurs. |
||
75 | */ |
||
76 | #define __WFE __wfe |
||
77 | |||
78 | |||
79 | /** \brief Send Event |
||
80 | |||
81 | Send Event is a hint instruction. It causes an event to be signaled to the CPU. |
||
82 | */ |
||
83 | #define __SEV __sev |
||
84 | |||
85 | |||
86 | /** \brief Instruction Synchronization Barrier |
||
87 | |||
88 | Instruction Synchronization Barrier flushes the pipeline in the processor, |
||
89 | so that all instructions following the ISB are fetched from cache or |
||
90 | memory, after the instruction has been completed. |
||
91 | */ |
||
92 | #define __ISB() do {\ |
||
93 | __schedule_barrier();\ |
||
94 | __isb(0xF);\ |
||
95 | __schedule_barrier();\ |
||
96 | } while (0) |
||
97 | |||
98 | /** \brief Data Synchronization Barrier |
||
99 | |||
100 | This function acts as a special kind of Data Memory Barrier. |
||
101 | It completes when all explicit memory accesses before this instruction complete. |
||
102 | */ |
||
103 | #define __DSB() do {\ |
||
104 | __schedule_barrier();\ |
||
105 | __dsb(0xF);\ |
||
106 | __schedule_barrier();\ |
||
107 | } while (0) |
||
108 | |||
109 | /** \brief Data Memory Barrier |
||
110 | |||
111 | This function ensures the apparent order of the explicit memory operations before |
||
112 | and after the instruction, without ensuring their completion. |
||
113 | */ |
||
114 | #define __DMB() do {\ |
||
115 | __schedule_barrier();\ |
||
116 | __dmb(0xF);\ |
||
117 | __schedule_barrier();\ |
||
118 | } while (0) |
||
119 | |||
120 | /** \brief Reverse byte order (32 bit) |
||
121 | |||
122 | This function reverses the byte order in integer value. |
||
123 | |||
124 | \param [in] value Value to reverse |
||
125 | \return Reversed value |
||
126 | */ |
||
127 | #define __REV __rev |
||
128 | |||
129 | |||
130 | /** \brief Reverse byte order (16 bit) |
||
131 | |||
132 | This function reverses the byte order in two unsigned short values. |
||
133 | |||
134 | \param [in] value Value to reverse |
||
135 | \return Reversed value |
||
136 | */ |
||
137 | #ifndef __NO_EMBEDDED_ASM |
||
138 | __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value) |
||
139 | { |
||
140 | rev16 r0, r0 |
||
141 | bx lr |
||
142 | } |
||
143 | #endif |
||
144 | |||
145 | /** \brief Reverse byte order in signed short value |
||
146 | |||
147 | This function reverses the byte order in a signed short value with sign extension to integer. |
||
148 | |||
149 | \param [in] value Value to reverse |
||
150 | \return Reversed value |
||
151 | */ |
||
152 | #ifndef __NO_EMBEDDED_ASM |
||
153 | __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value) |
||
154 | { |
||
155 | revsh r0, r0 |
||
156 | bx lr |
||
157 | } |
||
158 | #endif |
||
159 | |||
160 | |||
161 | /** \brief Rotate Right in unsigned value (32 bit) |
||
162 | |||
163 | This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. |
||
164 | |||
165 | \param [in] value Value to rotate |
||
166 | \param [in] value Number of Bits to rotate |
||
167 | \return Rotated value |
||
168 | */ |
||
169 | #define __ROR __ror |
||
170 | |||
171 | |||
172 | /** \brief Breakpoint |
||
173 | |||
174 | This function causes the processor to enter Debug state. |
||
175 | Debug tools can use this to investigate system state when the instruction at a particular address is reached. |
||
176 | |||
177 | \param [in] value is ignored by the processor. |
||
178 | If required, a debugger can use it to store additional information about the breakpoint. |
||
179 | */ |
||
180 | #define __BKPT(value) __breakpoint(value) |
||
181 | |||
182 | |||
183 | /** \brief Reverse bit order of value |
||
184 | |||
185 | This function reverses the bit order of the given value. |
||
186 | |||
187 | \param [in] value Value to reverse |
||
188 | \return Reversed value |
||
189 | */ |
||
190 | #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) |
||
191 | #define __RBIT __rbit |
||
192 | #else |
||
193 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value) |
||
194 | { |
||
195 | uint32_t result; |
||
196 | int32_t s = 4 /*sizeof(v)*/ * 8 - 1; // extra shift needed at end |
||
197 | |||
198 | result = value; // r will be reversed bits of v; first get LSB of v |
||
199 | for (value >>= 1; value; value >>= 1) |
||
200 | { |
||
201 | result <<= 1; |
||
202 | result |= value & 1; |
||
203 | s--; |
||
204 | } |
||
205 | result <<= s; // shift when v's highest bits are zero |
||
206 | return(result); |
||
207 | } |
||
208 | #endif |
||
209 | |||
210 | |||
211 | /** \brief Count leading zeros |
||
212 | |||
213 | This function counts the number of leading zeros of a data value. |
||
214 | |||
215 | \param [in] value Value to count the leading zeros |
||
216 | \return number of leading zeros in value |
||
217 | */ |
||
218 | #define __CLZ __clz |
||
219 | |||
220 | |||
221 | #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) |
||
222 | |||
223 | /** \brief LDR Exclusive (8 bit) |
||
224 | |||
225 | This function executes a exclusive LDR instruction for 8 bit value. |
||
226 | |||
227 | \param [in] ptr Pointer to data |
||
228 | \return value of type uint8_t at (*ptr) |
||
229 | */ |
||
230 | #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr)) |
||
231 | |||
232 | |||
233 | /** \brief LDR Exclusive (16 bit) |
||
234 | |||
235 | This function executes a exclusive LDR instruction for 16 bit values. |
||
236 | |||
237 | \param [in] ptr Pointer to data |
||
238 | \return value of type uint16_t at (*ptr) |
||
239 | */ |
||
240 | #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr)) |
||
241 | |||
242 | |||
243 | /** \brief LDR Exclusive (32 bit) |
||
244 | |||
245 | This function executes a exclusive LDR instruction for 32 bit values. |
||
246 | |||
247 | \param [in] ptr Pointer to data |
||
248 | \return value of type uint32_t at (*ptr) |
||
249 | */ |
||
250 | #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr)) |
||
251 | |||
252 | |||
253 | /** \brief STR Exclusive (8 bit) |
||
254 | |||
255 | This function executes a exclusive STR instruction for 8 bit values. |
||
256 | |||
257 | \param [in] value Value to store |
||
258 | \param [in] ptr Pointer to location |
||
259 | \return 0 Function succeeded |
||
260 | \return 1 Function failed |
||
261 | */ |
||
262 | #define __STREXB(value, ptr) __strex(value, ptr) |
||
263 | |||
264 | |||
265 | /** \brief STR Exclusive (16 bit) |
||
266 | |||
267 | This function executes a exclusive STR instruction for 16 bit values. |
||
268 | |||
269 | \param [in] value Value to store |
||
270 | \param [in] ptr Pointer to location |
||
271 | \return 0 Function succeeded |
||
272 | \return 1 Function failed |
||
273 | */ |
||
274 | #define __STREXH(value, ptr) __strex(value, ptr) |
||
275 | |||
276 | |||
277 | /** \brief STR Exclusive (32 bit) |
||
278 | |||
279 | This function executes a exclusive STR instruction for 32 bit values. |
||
280 | |||
281 | \param [in] value Value to store |
||
282 | \param [in] ptr Pointer to location |
||
283 | \return 0 Function succeeded |
||
284 | \return 1 Function failed |
||
285 | */ |
||
286 | #define __STREXW(value, ptr) __strex(value, ptr) |
||
287 | |||
288 | |||
289 | /** \brief Remove the exclusive lock |
||
290 | |||
291 | This function removes the exclusive lock which is created by LDREX. |
||
292 | |||
293 | */ |
||
294 | #define __CLREX __clrex |
||
295 | |||
296 | |||
297 | /** \brief Signed Saturate |
||
298 | |||
299 | This function saturates a signed value. |
||
300 | |||
301 | \param [in] value Value to be saturated |
||
302 | \param [in] sat Bit position to saturate to (1..32) |
||
303 | \return Saturated value |
||
304 | */ |
||
305 | #define __SSAT __ssat |
||
306 | |||
307 | |||
308 | /** \brief Unsigned Saturate |
||
309 | |||
310 | This function saturates an unsigned value. |
||
311 | |||
312 | \param [in] value Value to be saturated |
||
313 | \param [in] sat Bit position to saturate to (0..31) |
||
314 | \return Saturated value |
||
315 | */ |
||
316 | #define __USAT __usat |
||
317 | |||
318 | |||
319 | /** \brief Rotate Right with Extend (32 bit) |
||
320 | |||
321 | This function moves each bit of a bitstring right by one bit. |
||
322 | The carry input is shifted in at the left end of the bitstring. |
||
323 | |||
324 | \param [in] value Value to rotate |
||
325 | \return Rotated value |
||
326 | */ |
||
327 | #ifndef __NO_EMBEDDED_ASM |
||
328 | __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value) |
||
329 | { |
||
330 | rrx r0, r0 |
||
331 | bx lr |
||
332 | } |
||
333 | #endif |
||
334 | |||
335 | |||
336 | /** \brief LDRT Unprivileged (8 bit) |
||
337 | |||
338 | This function executes a Unprivileged LDRT instruction for 8 bit value. |
||
339 | |||
340 | \param [in] ptr Pointer to data |
||
341 | \return value of type uint8_t at (*ptr) |
||
342 | */ |
||
343 | #define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr)) |
||
344 | |||
345 | |||
346 | /** \brief LDRT Unprivileged (16 bit) |
||
347 | |||
348 | This function executes a Unprivileged LDRT instruction for 16 bit values. |
||
349 | |||
350 | \param [in] ptr Pointer to data |
||
351 | \return value of type uint16_t at (*ptr) |
||
352 | */ |
||
353 | #define __LDRHT(ptr) ((uint16_t) __ldrt(ptr)) |
||
354 | |||
355 | |||
356 | /** \brief LDRT Unprivileged (32 bit) |
||
357 | |||
358 | This function executes a Unprivileged LDRT instruction for 32 bit values. |
||
359 | |||
360 | \param [in] ptr Pointer to data |
||
361 | \return value of type uint32_t at (*ptr) |
||
362 | */ |
||
363 | #define __LDRT(ptr) ((uint32_t ) __ldrt(ptr)) |
||
364 | |||
365 | |||
366 | /** \brief STRT Unprivileged (8 bit) |
||
367 | |||
368 | This function executes a Unprivileged STRT instruction for 8 bit values. |
||
369 | |||
370 | \param [in] value Value to store |
||
371 | \param [in] ptr Pointer to location |
||
372 | */ |
||
373 | #define __STRBT(value, ptr) __strt(value, ptr) |
||
374 | |||
375 | |||
376 | /** \brief STRT Unprivileged (16 bit) |
||
377 | |||
378 | This function executes a Unprivileged STRT instruction for 16 bit values. |
||
379 | |||
380 | \param [in] value Value to store |
||
381 | \param [in] ptr Pointer to location |
||
382 | */ |
||
383 | #define __STRHT(value, ptr) __strt(value, ptr) |
||
384 | |||
385 | |||
386 | /** \brief STRT Unprivileged (32 bit) |
||
387 | |||
388 | This function executes a Unprivileged STRT instruction for 32 bit values. |
||
389 | |||
390 | \param [in] value Value to store |
||
391 | \param [in] ptr Pointer to location |
||
392 | */ |
||
393 | #define __STRT(value, ptr) __strt(value, ptr) |
||
394 | |||
395 | #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */ |
||
396 | |||
397 | |||
398 | #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/ |
||
399 | /* GNU gcc specific functions */ |
||
400 | |||
401 | /* Define macros for porting to both thumb1 and thumb2. |
||
402 | * For thumb1, use low register (r0-r7), specified by constrant "l" |
||
403 | * Otherwise, use general registers, specified by constrant "r" */ |
||
404 | #if defined (__thumb__) && !defined (__thumb2__) |
||
405 | #define __CMSIS_GCC_OUT_REG(r) "=l" (r) |
||
406 | #define __CMSIS_GCC_USE_REG(r) "l" (r) |
||
407 | #else |
||
408 | #define __CMSIS_GCC_OUT_REG(r) "=r" (r) |
||
409 | #define __CMSIS_GCC_USE_REG(r) "r" (r) |
||
410 | #endif |
||
411 | |||
412 | /** \brief No Operation |
||
413 | |||
414 | No Operation does nothing. This instruction can be used for code alignment purposes. |
||
415 | */ |
||
416 | __attribute__((always_inline)) __STATIC_INLINE void __NOP(void) |
||
417 | { |
||
418 | __ASM volatile ("nop"); |
||
419 | } |
||
420 | |||
421 | |||
422 | /** \brief Wait For Interrupt |
||
423 | |||
424 | Wait For Interrupt is a hint instruction that suspends execution |
||
425 | until one of a number of events occurs. |
||
426 | */ |
||
427 | __attribute__((always_inline)) __STATIC_INLINE void __WFI(void) |
||
428 | { |
||
429 | __ASM volatile ("wfi"); |
||
430 | } |
||
431 | |||
432 | |||
433 | /** \brief Wait For Event |
||
434 | |||
435 | Wait For Event is a hint instruction that permits the processor to enter |
||
436 | a low-power state until one of a number of events occurs. |
||
437 | */ |
||
438 | __attribute__((always_inline)) __STATIC_INLINE void __WFE(void) |
||
439 | { |
||
440 | __ASM volatile ("wfe"); |
||
441 | } |
||
442 | |||
443 | |||
444 | /** \brief Send Event |
||
445 | |||
446 | Send Event is a hint instruction. It causes an event to be signaled to the CPU. |
||
447 | */ |
||
448 | __attribute__((always_inline)) __STATIC_INLINE void __SEV(void) |
||
449 | { |
||
450 | __ASM volatile ("sev"); |
||
451 | } |
||
452 | |||
453 | |||
454 | /** \brief Instruction Synchronization Barrier |
||
455 | |||
456 | Instruction Synchronization Barrier flushes the pipeline in the processor, |
||
457 | so that all instructions following the ISB are fetched from cache or |
||
458 | memory, after the instruction has been completed. |
||
459 | */ |
||
460 | __attribute__((always_inline)) __STATIC_INLINE void __ISB(void) |
||
461 | { |
||
462 | __ASM volatile ("isb 0xF":::"memory"); |
||
463 | } |
||
464 | |||
465 | |||
466 | /** \brief Data Synchronization Barrier |
||
467 | |||
468 | This function acts as a special kind of Data Memory Barrier. |
||
469 | It completes when all explicit memory accesses before this instruction complete. |
||
470 | */ |
||
471 | __attribute__((always_inline)) __STATIC_INLINE void __DSB(void) |
||
472 | { |
||
473 | __ASM volatile ("dsb 0xF":::"memory"); |
||
474 | } |
||
475 | |||
476 | |||
477 | /** \brief Data Memory Barrier |
||
478 | |||
479 | This function ensures the apparent order of the explicit memory operations before |
||
480 | and after the instruction, without ensuring their completion. |
||
481 | */ |
||
482 | __attribute__((always_inline)) __STATIC_INLINE void __DMB(void) |
||
483 | { |
||
484 | __ASM volatile ("dmb 0xF":::"memory"); |
||
485 | } |
||
486 | |||
487 | |||
488 | /** \brief Reverse byte order (32 bit) |
||
489 | |||
490 | This function reverses the byte order in integer value. |
||
491 | |||
492 | \param [in] value Value to reverse |
||
493 | \return Reversed value |
||
494 | */ |
||
495 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV(uint32_t value) |
||
496 | { |
||
497 | #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) |
||
498 | return __builtin_bswap32(value); |
||
499 | #else |
||
500 | uint32_t result; |
||
501 | |||
502 | __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); |
||
503 | return(result); |
||
504 | #endif |
||
505 | } |
||
506 | |||
507 | |||
508 | /** \brief Reverse byte order (16 bit) |
||
509 | |||
510 | This function reverses the byte order in two unsigned short values. |
||
511 | |||
512 | \param [in] value Value to reverse |
||
513 | \return Reversed value |
||
514 | */ |
||
515 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value) |
||
516 | { |
||
517 | uint32_t result; |
||
518 | |||
519 | __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); |
||
520 | return(result); |
||
521 | } |
||
522 | |||
523 | |||
524 | /** \brief Reverse byte order in signed short value |
||
525 | |||
526 | This function reverses the byte order in a signed short value with sign extension to integer. |
||
527 | |||
528 | \param [in] value Value to reverse |
||
529 | \return Reversed value |
||
530 | */ |
||
531 | __attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value) |
||
532 | { |
||
533 | #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) |
||
534 | return (short)__builtin_bswap16(value); |
||
535 | #else |
||
536 | uint32_t result; |
||
537 | |||
538 | __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); |
||
539 | return(result); |
||
540 | #endif |
||
541 | } |
||
542 | |||
543 | |||
544 | /** \brief Rotate Right in unsigned value (32 bit) |
||
545 | |||
546 | This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. |
||
547 | |||
548 | \param [in] value Value to rotate |
||
549 | \param [in] value Number of Bits to rotate |
||
550 | \return Rotated value |
||
551 | */ |
||
552 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2) |
||
553 | { |
||
554 | return (op1 >> op2) | (op1 << (32 - op2)); |
||
555 | } |
||
556 | |||
557 | |||
558 | /** \brief Breakpoint |
||
559 | |||
560 | This function causes the processor to enter Debug state. |
||
561 | Debug tools can use this to investigate system state when the instruction at a particular address is reached. |
||
562 | |||
563 | \param [in] value is ignored by the processor. |
||
564 | If required, a debugger can use it to store additional information about the breakpoint. |
||
565 | */ |
||
566 | #define __BKPT(value) __ASM volatile ("bkpt "#value) |
||
567 | |||
568 | |||
569 | /** \brief Reverse bit order of value |
||
570 | |||
571 | This function reverses the bit order of the given value. |
||
572 | |||
573 | \param [in] value Value to reverse |
||
574 | \return Reversed value |
||
575 | */ |
||
576 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value) |
||
577 | { |
||
578 | uint32_t result; |
||
579 | |||
580 | #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) |
||
581 | __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) ); |
||
582 | #else |
||
583 | int32_t s = 4 /*sizeof(v)*/ * 8 - 1; // extra shift needed at end |
||
584 | |||
585 | result = value; // r will be reversed bits of v; first get LSB of v |
||
586 | for (value >>= 1; value; value >>= 1) |
||
587 | { |
||
588 | result <<= 1; |
||
589 | result |= value & 1; |
||
590 | s--; |
||
591 | } |
||
592 | result <<= s; // shift when v's highest bits are zero |
||
593 | #endif |
||
594 | return(result); |
||
595 | } |
||
596 | |||
597 | |||
598 | /** \brief Count leading zeros |
||
599 | |||
600 | This function counts the number of leading zeros of a data value. |
||
601 | |||
602 | \param [in] value Value to count the leading zeros |
||
603 | \return number of leading zeros in value |
||
604 | */ |
||
605 | #define __CLZ __builtin_clz |
||
606 | |||
607 | |||
608 | #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) |
||
609 | |||
610 | /** \brief LDR Exclusive (8 bit) |
||
611 | |||
612 | This function executes a exclusive LDR instruction for 8 bit value. |
||
613 | |||
614 | \param [in] ptr Pointer to data |
||
615 | \return value of type uint8_t at (*ptr) |
||
616 | */ |
||
617 | __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr) |
||
618 | { |
||
619 | uint32_t result; |
||
620 | |||
621 | #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) |
||
622 | __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) ); |
||
623 | #else |
||
624 | /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not |
||
625 | accepted by assembler. So has to use following less efficient pattern. |
||
626 | */ |
||
627 | __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); |
||
628 | #endif |
||
629 | return ((uint8_t) result); /* Add explicit type cast here */ |
||
630 | } |
||
631 | |||
632 | |||
633 | /** \brief LDR Exclusive (16 bit) |
||
634 | |||
635 | This function executes a exclusive LDR instruction for 16 bit values. |
||
636 | |||
637 | \param [in] ptr Pointer to data |
||
638 | \return value of type uint16_t at (*ptr) |
||
639 | */ |
||
640 | __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr) |
||
641 | { |
||
642 | uint32_t result; |
||
643 | |||
644 | #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) |
||
645 | __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) ); |
||
646 | #else |
||
647 | /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not |
||
648 | accepted by assembler. So has to use following less efficient pattern. |
||
649 | */ |
||
650 | __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); |
||
651 | #endif |
||
652 | return ((uint16_t) result); /* Add explicit type cast here */ |
||
653 | } |
||
654 | |||
655 | |||
656 | /** \brief LDR Exclusive (32 bit) |
||
657 | |||
658 | This function executes a exclusive LDR instruction for 32 bit values. |
||
659 | |||
660 | \param [in] ptr Pointer to data |
||
661 | \return value of type uint32_t at (*ptr) |
||
662 | */ |
||
663 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr) |
||
664 | { |
||
665 | uint32_t result; |
||
666 | |||
667 | __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) ); |
||
668 | return(result); |
||
669 | } |
||
670 | |||
671 | |||
672 | /** \brief STR Exclusive (8 bit) |
||
673 | |||
674 | This function executes a exclusive STR instruction for 8 bit values. |
||
675 | |||
676 | \param [in] value Value to store |
||
677 | \param [in] ptr Pointer to location |
||
678 | \return 0 Function succeeded |
||
679 | \return 1 Function failed |
||
680 | */ |
||
681 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr) |
||
682 | { |
||
683 | uint32_t result; |
||
684 | |||
685 | __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); |
||
686 | return(result); |
||
687 | } |
||
688 | |||
689 | |||
690 | /** \brief STR Exclusive (16 bit) |
||
691 | |||
692 | This function executes a exclusive STR instruction for 16 bit values. |
||
693 | |||
694 | \param [in] value Value to store |
||
695 | \param [in] ptr Pointer to location |
||
696 | \return 0 Function succeeded |
||
697 | \return 1 Function failed |
||
698 | */ |
||
699 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr) |
||
700 | { |
||
701 | uint32_t result; |
||
702 | |||
703 | __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); |
||
704 | return(result); |
||
705 | } |
||
706 | |||
707 | |||
708 | /** \brief STR Exclusive (32 bit) |
||
709 | |||
710 | This function executes a exclusive STR instruction for 32 bit values. |
||
711 | |||
712 | \param [in] value Value to store |
||
713 | \param [in] ptr Pointer to location |
||
714 | \return 0 Function succeeded |
||
715 | \return 1 Function failed |
||
716 | */ |
||
717 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr) |
||
718 | { |
||
719 | uint32_t result; |
||
720 | |||
721 | __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) ); |
||
722 | return(result); |
||
723 | } |
||
724 | |||
725 | |||
726 | /** \brief Remove the exclusive lock |
||
727 | |||
728 | This function removes the exclusive lock which is created by LDREX. |
||
729 | |||
730 | */ |
||
731 | __attribute__((always_inline)) __STATIC_INLINE void __CLREX(void) |
||
732 | { |
||
733 | __ASM volatile ("clrex" ::: "memory"); |
||
734 | } |
||
735 | |||
736 | |||
737 | /** \brief Signed Saturate |
||
738 | |||
739 | This function saturates a signed value. |
||
740 | |||
741 | \param [in] value Value to be saturated |
||
742 | \param [in] sat Bit position to saturate to (1..32) |
||
743 | \return Saturated value |
||
744 | */ |
||
745 | #define __SSAT(ARG1,ARG2) \ |
||
746 | ({ \ |
||
747 | uint32_t __RES, __ARG1 = (ARG1); \ |
||
748 | __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ |
||
749 | __RES; \ |
||
750 | }) |
||
751 | |||
752 | |||
753 | /** \brief Unsigned Saturate |
||
754 | |||
755 | This function saturates an unsigned value. |
||
756 | |||
757 | \param [in] value Value to be saturated |
||
758 | \param [in] sat Bit position to saturate to (0..31) |
||
759 | \return Saturated value |
||
760 | */ |
||
761 | #define __USAT(ARG1,ARG2) \ |
||
762 | ({ \ |
||
763 | uint32_t __RES, __ARG1 = (ARG1); \ |
||
764 | __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ |
||
765 | __RES; \ |
||
766 | }) |
||
767 | |||
768 | |||
769 | /** \brief Rotate Right with Extend (32 bit) |
||
770 | |||
771 | This function moves each bit of a bitstring right by one bit. |
||
772 | The carry input is shifted in at the left end of the bitstring. |
||
773 | |||
774 | \param [in] value Value to rotate |
||
775 | \return Rotated value |
||
776 | */ |
||
777 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value) |
||
778 | { |
||
779 | uint32_t result; |
||
780 | |||
781 | __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); |
||
782 | return(result); |
||
783 | } |
||
784 | |||
785 | |||
786 | /** \brief LDRT Unprivileged (8 bit) |
||
787 | |||
788 | This function executes a Unprivileged LDRT instruction for 8 bit value. |
||
789 | |||
790 | \param [in] ptr Pointer to data |
||
791 | \return value of type uint8_t at (*ptr) |
||
792 | */ |
||
793 | __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr) |
||
794 | { |
||
795 | uint32_t result; |
||
796 | |||
797 | #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) |
||
798 | __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) ); |
||
799 | #else |
||
800 | /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not |
||
801 | accepted by assembler. So has to use following less efficient pattern. |
||
802 | */ |
||
803 | __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); |
||
804 | #endif |
||
805 | return ((uint8_t) result); /* Add explicit type cast here */ |
||
806 | } |
||
807 | |||
808 | |||
809 | /** \brief LDRT Unprivileged (16 bit) |
||
810 | |||
811 | This function executes a Unprivileged LDRT instruction for 16 bit values. |
||
812 | |||
813 | \param [in] ptr Pointer to data |
||
814 | \return value of type uint16_t at (*ptr) |
||
815 | */ |
||
816 | __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr) |
||
817 | { |
||
818 | uint32_t result; |
||
819 | |||
820 | #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) |
||
821 | __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) ); |
||
822 | #else |
||
823 | /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not |
||
824 | accepted by assembler. So has to use following less efficient pattern. |
||
825 | */ |
||
826 | __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); |
||
827 | #endif |
||
828 | return ((uint16_t) result); /* Add explicit type cast here */ |
||
829 | } |
||
830 | |||
831 | |||
832 | /** \brief LDRT Unprivileged (32 bit) |
||
833 | |||
834 | This function executes a Unprivileged LDRT instruction for 32 bit values. |
||
835 | |||
836 | \param [in] ptr Pointer to data |
||
837 | \return value of type uint32_t at (*ptr) |
||
838 | */ |
||
839 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr) |
||
840 | { |
||
841 | uint32_t result; |
||
842 | |||
843 | __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) ); |
||
844 | return(result); |
||
845 | } |
||
846 | |||
847 | |||
848 | /** \brief STRT Unprivileged (8 bit) |
||
849 | |||
850 | This function executes a Unprivileged STRT instruction for 8 bit values. |
||
851 | |||
852 | \param [in] value Value to store |
||
853 | \param [in] ptr Pointer to location |
||
854 | */ |
||
855 | __attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr) |
||
856 | { |
||
857 | __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) ); |
||
858 | } |
||
859 | |||
860 | |||
861 | /** \brief STRT Unprivileged (16 bit) |
||
862 | |||
863 | This function executes a Unprivileged STRT instruction for 16 bit values. |
||
864 | |||
865 | \param [in] value Value to store |
||
866 | \param [in] ptr Pointer to location |
||
867 | */ |
||
868 | __attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr) |
||
869 | { |
||
870 | __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) ); |
||
871 | } |
||
872 | |||
873 | |||
874 | /** \brief STRT Unprivileged (32 bit) |
||
875 | |||
876 | This function executes a Unprivileged STRT instruction for 32 bit values. |
||
877 | |||
878 | \param [in] value Value to store |
||
879 | \param [in] ptr Pointer to location |
||
880 | */ |
||
881 | __attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr) |
||
882 | { |
||
883 | __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) ); |
||
884 | } |
||
885 | |||
886 | #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */ |
||
887 | |||
888 | |||
889 | #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/ |
||
890 | /* IAR iccarm specific functions */ |
||
891 | #include <cmsis_iar.h> |
||
892 | |||
893 | |||
894 | #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/ |
||
895 | /* TI CCS specific functions */ |
||
896 | #include <cmsis_ccs.h> |
||
897 | |||
898 | |||
899 | #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/ |
||
900 | /* TASKING carm specific functions */ |
||
901 | /* |
||
902 | * The CMSIS functions have been implemented as intrinsics in the compiler. |
||
903 | * Please use "carm -?i" to get an up to date list of all intrinsics, |
||
904 | * Including the CMSIS ones. |
||
905 | */ |
||
906 | |||
907 | |||
908 | #elif defined ( __CSMC__ ) /*------------------ COSMIC Compiler -------------------*/ |
||
909 | /* Cosmic specific functions */ |
||
910 | #include <cmsis_csm.h> |
||
911 | |||
912 | #endif |
||
913 | |||
914 | /*@}*/ /* end of group CMSIS_Core_InstructionInterface */ |
||
915 | |||
916 | #endif /* __CORE_CMINSTR_H */ |