Rev 2 | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 2 | Rev 9 | ||
---|---|---|---|
Line 1... | Line 1... | ||
1 | /**************************************************************************//** |
1 | /**************************************************************************//** |
2 | * @file cmsis_armcc.h |
2 | * @file cmsis_armcc.h |
3 | * @brief CMSIS Cortex-M Core Function/Instruction Header File |
3 | * @brief CMSIS compiler ARMCC (Arm Compiler 5) header file |
4 | * @version V4.30 |
4 | * @version V5.0.4 |
5 | * @date 20. October 2015 |
5 | * @date 10. January 2018 |
6 | ******************************************************************************/ |
6 | ******************************************************************************/ |
- | 7 | /* |
|
7 | /* Copyright (c) 2009 - 2015 ARM LIMITED |
8 | * Copyright (c) 2009-2018 Arm Limited. All rights reserved. |
8 | 9 | * |
|
9 | All rights reserved. |
10 | * SPDX-License-Identifier: Apache-2.0 |
10 | Redistribution and use in source and binary forms, with or without |
- | |
11 | modification, are permitted provided that the following conditions are met: |
- | |
- | 11 | * |
|
12 | - Redistributions of source code must retain the above copyright |
12 | * Licensed under the Apache License, Version 2.0 (the License); you may |
13 | notice, this list of conditions and the following disclaimer. |
13 | * not use this file except in compliance with the License. |
14 | - Redistributions in binary form must reproduce the above copyright |
14 | * You may obtain a copy of the License at |
15 | notice, this list of conditions and the following disclaimer in the |
- | |
16 | documentation and/or other materials provided with the distribution. |
- | |
17 | - Neither the name of ARM nor the names of its contributors may be used |
- | |
18 | to endorse or promote products derived from this software without |
- | |
- | 15 | * |
|
19 | specific prior written permission. |
16 | * www.apache.org/licenses/LICENSE-2.0 |
20 | * |
17 | * |
21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" |
- | |
22 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
- | |
23 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
- | |
24 | ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE |
- | |
25 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
- | |
26 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
- | |
27 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
18 | * Unless required by applicable law or agreed to in writing, software |
28 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
19 | * distributed under the License is distributed on an AS IS BASIS, WITHOUT |
29 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
20 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
30 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
21 | * See the License for the specific language governing permissions and |
31 | POSSIBILITY OF SUCH DAMAGE. |
22 | * limitations under the License. |
32 | ---------------------------------------------------------------------------*/ |
- | |
33 | 23 | */ |
|
34 | 24 | ||
35 | #ifndef __CMSIS_ARMCC_H |
25 | #ifndef __CMSIS_ARMCC_H |
36 | #define __CMSIS_ARMCC_H |
26 | #define __CMSIS_ARMCC_H |
37 | 27 | ||
38 | 28 | ||
39 | #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677) |
29 | #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677) |
40 | #error "Please use ARM Compiler Toolchain V4.0.677 or later!" |
30 | #error "Please use Arm Compiler Toolchain V4.0.677 or later!" |
- | 31 | #endif |
|
- | 32 | ||
- | 33 | /* CMSIS compiler control architecture macros */ |
|
- | 34 | #if ((defined (__TARGET_ARCH_6_M ) && (__TARGET_ARCH_6_M == 1)) || \ |
|
- | 35 | (defined (__TARGET_ARCH_6S_M ) && (__TARGET_ARCH_6S_M == 1)) ) |
|
- | 36 | #define __ARM_ARCH_6M__ 1 |
|
- | 37 | #endif |
|
- | 38 | ||
- | 39 | #if (defined (__TARGET_ARCH_7_M ) && (__TARGET_ARCH_7_M == 1)) |
|
- | 40 | #define __ARM_ARCH_7M__ 1 |
|
- | 41 | #endif |
|
- | 42 | ||
- | 43 | #if (defined (__TARGET_ARCH_7E_M) && (__TARGET_ARCH_7E_M == 1)) |
|
- | 44 | #define __ARM_ARCH_7EM__ 1 |
|
- | 45 | #endif |
|
- | 46 | ||
- | 47 | /* __ARM_ARCH_8M_BASE__ not applicable */ |
|
- | 48 | /* __ARM_ARCH_8M_MAIN__ not applicable */ |
|
- | 49 | ||
- | 50 | ||
- | 51 | /* CMSIS compiler specific defines */ |
|
- | 52 | #ifndef __ASM |
|
- | 53 | #define __ASM __asm |
|
- | 54 | #endif |
|
- | 55 | #ifndef __INLINE |
|
- | 56 | #define __INLINE __inline |
|
- | 57 | #endif |
|
- | 58 | #ifndef __STATIC_INLINE |
|
- | 59 | #define __STATIC_INLINE static __inline |
|
- | 60 | #endif |
|
- | 61 | #ifndef __STATIC_FORCEINLINE |
|
- | 62 | #define __STATIC_FORCEINLINE static __forceinline |
|
- | 63 | #endif |
|
- | 64 | #ifndef __NO_RETURN |
|
- | 65 | #define __NO_RETURN __declspec(noreturn) |
|
- | 66 | #endif |
|
- | 67 | #ifndef __USED |
|
- | 68 | #define __USED __attribute__((used)) |
|
- | 69 | #endif |
|
- | 70 | #ifndef __WEAK |
|
- | 71 | #define __WEAK __attribute__((weak)) |
|
- | 72 | #endif |
|
- | 73 | #ifndef __PACKED |
|
- | 74 | #define __PACKED __attribute__((packed)) |
|
- | 75 | #endif |
|
- | 76 | #ifndef __PACKED_STRUCT |
|
- | 77 | #define __PACKED_STRUCT __packed struct |
|
- | 78 | #endif |
|
- | 79 | #ifndef __PACKED_UNION |
|
- | 80 | #define __PACKED_UNION __packed union |
|
- | 81 | #endif |
|
- | 82 | #ifndef __UNALIGNED_UINT32 /* deprecated */ |
|
- | 83 | #define __UNALIGNED_UINT32(x) (*((__packed uint32_t *)(x))) |
|
- | 84 | #endif |
|
- | 85 | #ifndef __UNALIGNED_UINT16_WRITE |
|
- | 86 | #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val)) |
|
- | 87 | #endif |
|
- | 88 | #ifndef __UNALIGNED_UINT16_READ |
|
- | 89 | #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr))) |
|
- | 90 | #endif |
|
- | 91 | #ifndef __UNALIGNED_UINT32_WRITE |
|
- | 92 | #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val)) |
|
- | 93 | #endif |
|
- | 94 | #ifndef __UNALIGNED_UINT32_READ |
|
- | 95 | #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr))) |
|
- | 96 | #endif |
|
- | 97 | #ifndef __ALIGNED |
|
- | 98 | #define __ALIGNED(x) __attribute__((aligned(x))) |
|
- | 99 | #endif |
|
- | 100 | #ifndef __RESTRICT |
|
- | 101 | #define __RESTRICT __restrict |
|
41 | #endif |
102 | #endif |
42 | 103 | ||
43 | /* ########################### Core Function Access ########################### */ |
104 | /* ########################### Core Function Access ########################### */ |
44 | /** \ingroup CMSIS_Core_FunctionInterface |
105 | /** \ingroup CMSIS_Core_FunctionInterface |
45 | \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions |
106 | \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions |
46 | @{ |
107 | @{ |
47 | */ |
108 | */ |
48 | 109 | ||
- | 110 | /** |
|
- | 111 | \brief Enable IRQ Interrupts |
|
- | 112 | \details Enables IRQ interrupts by clearing the I-bit in the CPSR. |
|
- | 113 | Can only be executed in Privileged modes. |
|
- | 114 | */ |
|
49 | /* intrinsic void __enable_irq(); */ |
115 | /* intrinsic void __enable_irq(); */ |
- | 116 | ||
- | 117 | ||
- | 118 | /** |
|
- | 119 | \brief Disable IRQ Interrupts |
|
- | 120 | \details Disables IRQ interrupts by setting the I-bit in the CPSR. |
|
- | 121 | Can only be executed in Privileged modes. |
|
- | 122 | */ |
|
50 | /* intrinsic void __disable_irq(); */ |
123 | /* intrinsic void __disable_irq(); */ |
51 | 124 | ||
52 | /** |
125 | /** |
53 | \brief Get Control Register |
126 | \brief Get Control Register |
54 | \details Returns the content of the Control Register. |
127 | \details Returns the content of the Control Register. |
Line 179... | Line 252... | ||
179 | register uint32_t __regPriMask __ASM("primask"); |
252 | register uint32_t __regPriMask __ASM("primask"); |
180 | __regPriMask = (priMask); |
253 | __regPriMask = (priMask); |
181 | } |
254 | } |
182 | 255 | ||
183 | 256 | ||
184 | #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) |
257 | #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ |
- | 258 | (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) |
|
185 | 259 | ||
186 | /** |
260 | /** |
187 | \brief Enable FIQ |
261 | \brief Enable FIQ |
188 | \details Enables FIQ interrupts by clearing the F-bit in the CPSR. |
262 | \details Enables FIQ interrupts by clearing the F-bit in the CPSR. |
189 | Can only be executed in Privileged modes. |
263 | Can only be executed in Privileged modes. |
Line 254... | Line 328... | ||
254 | \param [in] faultMask Fault Mask value to set |
328 | \param [in] faultMask Fault Mask value to set |
255 | */ |
329 | */ |
256 | __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask) |
330 | __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask) |
257 | { |
331 | { |
258 | register uint32_t __regFaultMask __ASM("faultmask"); |
332 | register uint32_t __regFaultMask __ASM("faultmask"); |
259 | __regFaultMask = (faultMask & (uint32_t)1); |
333 | __regFaultMask = (faultMask & (uint32_t)1U); |
260 | } |
334 | } |
261 | 335 | ||
262 | #endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */ |
336 | #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ |
263 | - | ||
- | 337 | (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */ |
|
264 | 338 | ||
265 | #if (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) |
- | |
266 | 339 | ||
267 | /** |
340 | /** |
268 | \brief Get FPSCR |
341 | \brief Get FPSCR |
269 | \details Returns the current value of the Floating Point Status/Control register. |
342 | \details Returns the current value of the Floating Point Status/Control register. |
270 | \return Floating Point Status/Control register value |
343 | \return Floating Point Status/Control register value |
271 | */ |
344 | */ |
272 | __STATIC_INLINE uint32_t __get_FPSCR(void) |
345 | __STATIC_INLINE uint32_t __get_FPSCR(void) |
273 | { |
346 | { |
274 | #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) |
347 | #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ |
- | 348 | (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) |
|
275 | register uint32_t __regfpscr __ASM("fpscr"); |
349 | register uint32_t __regfpscr __ASM("fpscr"); |
276 | return(__regfpscr); |
350 | return(__regfpscr); |
277 | #else |
351 | #else |
278 | return(0U); |
352 | return(0U); |
279 | #endif |
353 | #endif |
Line 285... | Line 359... | ||
285 | \details Assigns the given value to the Floating Point Status/Control register. |
359 | \details Assigns the given value to the Floating Point Status/Control register. |
286 | \param [in] fpscr Floating Point Status/Control value to set |
360 | \param [in] fpscr Floating Point Status/Control value to set |
287 | */ |
361 | */ |
288 | __STATIC_INLINE void __set_FPSCR(uint32_t fpscr) |
362 | __STATIC_INLINE void __set_FPSCR(uint32_t fpscr) |
289 | { |
363 | { |
290 | #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) |
364 | #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ |
- | 365 | (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) |
|
291 | register uint32_t __regfpscr __ASM("fpscr"); |
366 | register uint32_t __regfpscr __ASM("fpscr"); |
292 | __regfpscr = (fpscr); |
367 | __regfpscr = (fpscr); |
- | 368 | #else |
|
- | 369 | (void)fpscr; |
|
293 | #endif |
370 | #endif |
294 | } |
371 | } |
295 | 372 | ||
296 | #endif /* (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) */ |
- | |
297 | - | ||
298 | - | ||
299 | 373 | ||
300 | /*@} end of CMSIS_Core_RegAccFunctions */ |
374 | /*@} end of CMSIS_Core_RegAccFunctions */ |
301 | 375 | ||
302 | 376 | ||
303 | /* ########################## Core Instruction Access ######################### */ |
377 | /* ########################## Core Instruction Access ######################### */ |
Line 367... | Line 441... | ||
367 | __schedule_barrier();\ |
441 | __schedule_barrier();\ |
368 | __dmb(0xF);\ |
442 | __dmb(0xF);\ |
369 | __schedule_barrier();\ |
443 | __schedule_barrier();\ |
370 | } while (0U) |
444 | } while (0U) |
371 | 445 | ||
- | 446 | ||
372 | /** |
447 | /** |
373 | \brief Reverse byte order (32 bit) |
448 | \brief Reverse byte order (32 bit) |
374 | \details Reverses the byte order in integer value. |
449 | \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412. |
375 | \param [in] value Value to reverse |
450 | \param [in] value Value to reverse |
376 | \return Reversed value |
451 | \return Reversed value |
377 | */ |
452 | */ |
378 | #define __REV __rev |
453 | #define __REV __rev |
379 | 454 | ||
380 | 455 | ||
381 | /** |
456 | /** |
382 | \brief Reverse byte order (16 bit) |
457 | \brief Reverse byte order (16 bit) |
383 | \details Reverses the byte order in two unsigned short values. |
458 | \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856. |
384 | \param [in] value Value to reverse |
459 | \param [in] value Value to reverse |
385 | \return Reversed value |
460 | \return Reversed value |
386 | */ |
461 | */ |
387 | #ifndef __NO_EMBEDDED_ASM |
462 | #ifndef __NO_EMBEDDED_ASM |
388 | __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value) |
463 | __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value) |
Line 390... | Line 465... | ||
390 | rev16 r0, r0 |
465 | rev16 r0, r0 |
391 | bx lr |
466 | bx lr |
392 | } |
467 | } |
393 | #endif |
468 | #endif |
394 | 469 | ||
- | 470 | ||
395 | /** |
471 | /** |
396 | \brief Reverse byte order in signed short value |
472 | \brief Reverse byte order (16 bit) |
397 | \details Reverses the byte order in a signed short value with sign extension to integer. |
473 | \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000. |
398 | \param [in] value Value to reverse |
474 | \param [in] value Value to reverse |
399 | \return Reversed value |
475 | \return Reversed value |
400 | */ |
476 | */ |
401 | #ifndef __NO_EMBEDDED_ASM |
477 | #ifndef __NO_EMBEDDED_ASM |
402 | __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value) |
478 | __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value) |
403 | { |
479 | { |
404 | revsh r0, r0 |
480 | revsh r0, r0 |
405 | bx lr |
481 | bx lr |
406 | } |
482 | } |
407 | #endif |
483 | #endif |
408 | 484 | ||
409 | 485 | ||
410 | /** |
486 | /** |
411 | \brief Rotate Right in unsigned value (32 bit) |
487 | \brief Rotate Right in unsigned value (32 bit) |
412 | \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. |
488 | \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. |
413 | \param [in] value Value to rotate |
489 | \param [in] op1 Value to rotate |
414 | \param [in] value Number of Bits to rotate |
490 | \param [in] op2 Number of Bits to rotate |
415 | \return Rotated value |
491 | \return Rotated value |
416 | */ |
492 | */ |
417 | #define __ROR __ror |
493 | #define __ROR __ror |
418 | 494 | ||
419 | 495 | ||
Line 431... | Line 507... | ||
431 | \brief Reverse bit order of value |
507 | \brief Reverse bit order of value |
432 | \details Reverses the bit order of the given value. |
508 | \details Reverses the bit order of the given value. |
433 | \param [in] value Value to reverse |
509 | \param [in] value Value to reverse |
434 | \return Reversed value |
510 | \return Reversed value |
435 | */ |
511 | */ |
436 | #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) |
512 | #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ |
- | 513 | (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) |
|
437 | #define __RBIT __rbit |
514 | #define __RBIT __rbit |
438 | #else |
515 | #else |
439 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value) |
516 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value) |
440 | { |
517 | { |
441 | uint32_t result; |
518 | uint32_t result; |
442 | int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */ |
519 | uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */ |
443 | 520 | ||
444 | result = value; /* r will be reversed bits of v; first get LSB of v */ |
521 | result = value; /* r will be reversed bits of v; first get LSB of v */ |
445 | for (value >>= 1U; value; value >>= 1U) |
522 | for (value >>= 1U; value != 0U; value >>= 1U) |
446 | { |
523 | { |
447 | result <<= 1U; |
524 | result <<= 1U; |
448 | result |= value & 1U; |
525 | result |= value & 1U; |
449 | s--; |
526 | s--; |
450 | } |
527 | } |
451 | result <<= s; /* shift when v's highest bits are zero */ |
528 | result <<= s; /* shift when v's highest bits are zero */ |
452 | return(result); |
529 | return result; |
453 | } |
530 | } |
454 | #endif |
531 | #endif |
455 | 532 | ||
456 | 533 | ||
457 | /** |
534 | /** |
Line 461... | Line 538... | ||
461 | \return number of leading zeros in value |
538 | \return number of leading zeros in value |
462 | */ |
539 | */ |
463 | #define __CLZ __clz |
540 | #define __CLZ __clz |
464 | 541 | ||
465 | 542 | ||
466 | #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) |
543 | #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ |
- | 544 | (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) |
|
467 | 545 | ||
468 | /** |
546 | /** |
469 | \brief LDR Exclusive (8 bit) |
547 | \brief LDR Exclusive (8 bit) |
470 | \details Executes a exclusive LDR instruction for 8 bit value. |
548 | \details Executes a exclusive LDR instruction for 8 bit value. |
471 | \param [in] ptr Pointer to data |
549 | \param [in] ptr Pointer to data |
Line 643... | Line 721... | ||
643 | \param [in] value Value to store |
721 | \param [in] value Value to store |
644 | \param [in] ptr Pointer to location |
722 | \param [in] ptr Pointer to location |
645 | */ |
723 | */ |
646 | #define __STRT(value, ptr) __strt(value, ptr) |
724 | #define __STRT(value, ptr) __strt(value, ptr) |
647 | 725 | ||
- | 726 | #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ |
|
- | 727 | (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */ |
|
- | 728 | ||
- | 729 | /** |
|
- | 730 | \brief Signed Saturate |
|
- | 731 | \details Saturates a signed value. |
|
- | 732 | \param [in] value Value to be saturated |
|
- | 733 | \param [in] sat Bit position to saturate to (1..32) |
|
- | 734 | \return Saturated value |
|
- | 735 | */ |
|
- | 736 | __attribute__((always_inline)) __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat) |
|
- | 737 | { |
|
- | 738 | if ((sat >= 1U) && (sat <= 32U)) |
|
- | 739 | { |
|
- | 740 | const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U); |
|
- | 741 | const int32_t min = -1 - max ; |
|
- | 742 | if (val > max) |
|
- | 743 | { |
|
- | 744 | return max; |
|
- | 745 | } |
|
- | 746 | else if (val < min) |
|
- | 747 | { |
|
- | 748 | return min; |
|
- | 749 | } |
|
- | 750 | } |
|
- | 751 | return val; |
|
- | 752 | } |
|
- | 753 | ||
- | 754 | /** |
|
- | 755 | \brief Unsigned Saturate |
|
- | 756 | \details Saturates an unsigned value. |
|
- | 757 | \param [in] value Value to be saturated |
|
- | 758 | \param [in] sat Bit position to saturate to (0..31) |
|
- | 759 | \return Saturated value |
|
- | 760 | */ |
|
- | 761 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat) |
|
- | 762 | { |
|
- | 763 | if (sat <= 31U) |
|
- | 764 | { |
|
- | 765 | const uint32_t max = ((1U << sat) - 1U); |
|
- | 766 | if (val > (int32_t)max) |
|
- | 767 | { |
|
- | 768 | return max; |
|
- | 769 | } |
|
- | 770 | else if (val < 0) |
|
- | 771 | { |
|
- | 772 | return 0U; |
|
- | 773 | } |
|
- | 774 | } |
|
- | 775 | return (uint32_t)val; |
|
- | 776 | } |
|
- | 777 | ||
648 | #endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */ |
778 | #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ |
- | 779 | (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */ |
|
649 | 780 | ||
650 | /*@}*/ /* end of group CMSIS_Core_InstructionInterface */ |
781 | /*@}*/ /* end of group CMSIS_Core_InstructionInterface */ |
651 | 782 | ||
652 | 783 | ||
653 | /* ################### Compiler specific Intrinsics ########################### */ |
784 | /* ################### Compiler specific Intrinsics ########################### */ |
654 | /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics |
785 | /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics |
655 | Access to dedicated SIMD instructions |
786 | Access to dedicated SIMD instructions |
656 | @{ |
787 | @{ |
657 | */ |
788 | */ |
658 | 789 | ||
659 | #if (__CORTEX_M >= 0x04U) /* only for Cortex-M4 and above */ |
790 | #if ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) |
660 | 791 | ||
661 | #define __SADD8 __sadd8 |
792 | #define __SADD8 __sadd8 |
662 | #define __QADD8 __qadd8 |
793 | #define __QADD8 __qadd8 |
663 | #define __SHADD8 __shadd8 |
794 | #define __SHADD8 __shadd8 |
664 | #define __UADD8 __uadd8 |
795 | #define __UADD8 __uadd8 |
Line 725... | Line 856... | ||
725 | ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) ) |
856 | ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) ) |
726 | 857 | ||
727 | #define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \ |
858 | #define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \ |
728 | ((int64_t)(ARG3) << 32U) ) >> 32U)) |
859 | ((int64_t)(ARG3) << 32U) ) >> 32U)) |
729 | 860 | ||
730 | #endif /* (__CORTEX_M >= 0x04) */ |
861 | #endif /* ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */ |
731 | /*@} end of group CMSIS_SIMD_intrinsics */ |
862 | /*@} end of group CMSIS_SIMD_intrinsics */ |
732 | 863 | ||
733 | 864 | ||
734 | #endif /* __CMSIS_ARMCC_H */ |
865 | #endif /* __CMSIS_ARMCC_H */ |