Subversion Repositories FuelGauge

Rev

Blame | Last modification | View Log | Download | RSS feed

  1. /**************************************************************************//**
  2.  * @file     cmsis_armclang.h
  3.  * @brief    CMSIS compiler specific macros, functions, instructions
  4.  * @version  V1.0.2
  5.  * @date     10. January 2018
  6.  ******************************************************************************/
  7. /*
  8.  * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
  9.  *
  10.  * SPDX-License-Identifier: Apache-2.0
  11.  *
  12.  * Licensed under the Apache License, Version 2.0 (the License); you may
  13.  * not use this file except in compliance with the License.
  14.  * You may obtain a copy of the License at
  15.  *
  16.  * www.apache.org/licenses/LICENSE-2.0
  17.  *
  18.  * Unless required by applicable law or agreed to in writing, software
  19.  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
  20.  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  21.  * See the License for the specific language governing permissions and
  22.  * limitations under the License.
  23.  */
  24.  
  25. #ifndef __CMSIS_ARMCLANG_H
  26. #define __CMSIS_ARMCLANG_H
  27.  
  28. #pragma clang system_header   /* treat file as system include file */
  29.  
  30. #ifndef __ARM_COMPAT_H
  31. #include <arm_compat.h>    /* Compatibility header for Arm Compiler 5 intrinsics */
  32. #endif
  33.  
  34. /* CMSIS compiler specific defines */
  35. #ifndef   __ASM
  36.   #define __ASM                                  __asm
  37. #endif
  38. #ifndef   __INLINE
  39.   #define __INLINE                               __inline
  40. #endif
  41. #ifndef   __FORCEINLINE
  42.   #define __FORCEINLINE                          __attribute__((always_inline))
  43. #endif
  44. #ifndef   __STATIC_INLINE
  45.   #define __STATIC_INLINE                        static __inline
  46. #endif
  47. #ifndef   __STATIC_FORCEINLINE
  48.   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static __inline
  49. #endif
  50. #ifndef   __NO_RETURN
  51.   #define __NO_RETURN                            __attribute__((__noreturn__))
  52. #endif
  53. #ifndef   CMSIS_DEPRECATED
  54.   #define CMSIS_DEPRECATED                       __attribute__((deprecated))
  55. #endif
  56. #ifndef   __USED
  57.   #define __USED                                 __attribute__((used))
  58. #endif
  59. #ifndef   __WEAK
  60.   #define __WEAK                                 __attribute__((weak))
  61. #endif
  62. #ifndef   __PACKED
  63.   #define __PACKED                               __attribute__((packed, aligned(1)))
  64. #endif
  65. #ifndef   __PACKED_STRUCT
  66.   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
  67. #endif
  68. #ifndef   __UNALIGNED_UINT16_WRITE
  69.   #pragma clang diagnostic push
  70.   #pragma clang diagnostic ignored "-Wpacked"
  71. /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
  72.   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
  73.   #pragma clang diagnostic pop
  74.   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
  75. #endif
  76. #ifndef   __UNALIGNED_UINT16_READ
  77.   #pragma clang diagnostic push
  78.   #pragma clang diagnostic ignored "-Wpacked"
  79. /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
  80.   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
  81.   #pragma clang diagnostic pop
  82.   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
  83. #endif
  84. #ifndef   __UNALIGNED_UINT32_WRITE
  85.   #pragma clang diagnostic push
  86.   #pragma clang diagnostic ignored "-Wpacked"
  87. /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
  88.   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
  89.   #pragma clang diagnostic pop
  90.   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
  91. #endif
  92. #ifndef   __UNALIGNED_UINT32_READ
  93.   #pragma clang diagnostic push
  94.   #pragma clang diagnostic ignored "-Wpacked"
  95.   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
  96.   #pragma clang diagnostic pop
  97.   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
  98. #endif
  99. #ifndef   __ALIGNED
  100.   #define __ALIGNED(x)                           __attribute__((aligned(x)))
  101. #endif
  102. #ifndef   __PACKED
  103.   #define __PACKED                               __attribute__((packed))
  104. #endif
  105.  
  106. /* ##########################  Core Instruction Access  ######################### */
  107. /**
  108.   \brief   No Operation
  109.  */
  110. #define __NOP                             __builtin_arm_nop
  111.  
  112. /**
  113.   \brief   Wait For Interrupt
  114.  */
  115. #define __WFI                             __builtin_arm_wfi
  116.  
  117. /**
  118.   \brief   Wait For Event
  119.  */
  120. #define __WFE                             __builtin_arm_wfe
  121.  
  122. /**
  123.   \brief   Send Event
  124.  */
  125. #define __SEV                             __builtin_arm_sev
  126.  
  127. /**
  128.   \brief   Instruction Synchronization Barrier
  129.  */
  130. #define __ISB() do {\
  131.                    __schedule_barrier();\
  132.                    __builtin_arm_isb(0xF);\
  133.                    __schedule_barrier();\
  134.                 } while (0U)
  135.  
  136. /**
  137.   \brief   Data Synchronization Barrier
  138.  */
  139. #define __DSB() do {\
  140.                    __schedule_barrier();\
  141.                    __builtin_arm_dsb(0xF);\
  142.                    __schedule_barrier();\
  143.                 } while (0U)
  144.  
  145. /**
  146.   \brief   Data Memory Barrier
  147.  */
  148. #define __DMB() do {\
  149.                    __schedule_barrier();\
  150.                    __builtin_arm_dmb(0xF);\
  151.                    __schedule_barrier();\
  152.                 } while (0U)
  153.  
  154. /**
  155.   \brief   Reverse byte order (32 bit)
  156.   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
  157.   \param [in]    value  Value to reverse
  158.   \return               Reversed value
  159.  */
  160. #define __REV(value)   __builtin_bswap32(value)
  161.  
  162. /**
  163.   \brief   Reverse byte order (16 bit)
  164.   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
  165.   \param [in]    value  Value to reverse
  166.   \return               Reversed value
  167.  */
  168. #define __REV16(value) __ROR(__REV(value), 16)
  169.  
  170.  
  171. /**
  172.   \brief   Reverse byte order (16 bit)
  173.   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
  174.   \param [in]    value  Value to reverse
  175.   \return               Reversed value
  176.  */
  177. #define __REVSH(value) (int16_t)__builtin_bswap16(value)
  178.  
  179.  
  180. /**
  181.   \brief   Rotate Right in unsigned value (32 bit)
  182.   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
  183.   \param [in]    op1  Value to rotate
  184.   \param [in]    op2  Number of Bits to rotate
  185.   \return               Rotated value
  186.  */
  187. __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
  188. {
  189.   op2 %= 32U;
  190.   if (op2 == 0U)
  191.   {
  192.     return op1;
  193.   }
  194.   return (op1 >> op2) | (op1 << (32U - op2));
  195. }
  196.  
  197.  
  198. /**
  199.   \brief   Breakpoint
  200.   \param [in]    value  is ignored by the processor.
  201.                  If required, a debugger can use it to store additional information about the breakpoint.
  202.  */
  203. #define __BKPT(value)   __ASM volatile ("bkpt "#value)
  204.  
  205. /**
  206.   \brief   Reverse bit order of value
  207.   \param [in]    value  Value to reverse
  208.   \return               Reversed value
  209.  */
  210. #define __RBIT          __builtin_arm_rbit
  211.  
  212. /**
  213.   \brief   Count leading zeros
  214.   \param [in]  value  Value to count the leading zeros
  215.   \return             number of leading zeros in value
  216.  */
  217. #define __CLZ           (uint8_t)__builtin_clz
  218.  
  219. /**
  220.   \brief   LDR Exclusive (8 bit)
  221.   \details Executes a exclusive LDR instruction for 8 bit value.
  222.   \param [in]    ptr  Pointer to data
  223.   \return             value of type uint8_t at (*ptr)
  224.  */
  225. #define __LDREXB        (uint8_t)__builtin_arm_ldrex
  226.  
  227.  
  228. /**
  229.   \brief   LDR Exclusive (16 bit)
  230.   \details Executes a exclusive LDR instruction for 16 bit values.
  231.   \param [in]    ptr  Pointer to data
  232.   \return        value of type uint16_t at (*ptr)
  233.  */
  234. #define __LDREXH        (uint16_t)__builtin_arm_ldrex
  235.  
  236. /**
  237.   \brief   LDR Exclusive (32 bit)
  238.   \details Executes a exclusive LDR instruction for 32 bit values.
  239.   \param [in]    ptr  Pointer to data
  240.   \return        value of type uint32_t at (*ptr)
  241.  */
  242. #define __LDREXW        (uint32_t)__builtin_arm_ldrex
  243.  
  244. /**
  245.   \brief   STR Exclusive (8 bit)
  246.   \details Executes a exclusive STR instruction for 8 bit values.
  247.   \param [in]  value  Value to store
  248.   \param [in]    ptr  Pointer to location
  249.   \return          0  Function succeeded
  250.   \return          1  Function failed
  251.  */
  252. #define __STREXB        (uint32_t)__builtin_arm_strex
  253.  
  254. /**
  255.   \brief   STR Exclusive (16 bit)
  256.   \details Executes a exclusive STR instruction for 16 bit values.
  257.   \param [in]  value  Value to store
  258.   \param [in]    ptr  Pointer to location
  259.   \return          0  Function succeeded
  260.   \return          1  Function failed
  261.  */
  262. #define __STREXH        (uint32_t)__builtin_arm_strex
  263.  
  264. /**
  265.   \brief   STR Exclusive (32 bit)
  266.   \details Executes a exclusive STR instruction for 32 bit values.
  267.   \param [in]  value  Value to store
  268.   \param [in]    ptr  Pointer to location
  269.   \return          0  Function succeeded
  270.   \return          1  Function failed
  271.  */
  272. #define __STREXW        (uint32_t)__builtin_arm_strex
  273.  
  274. /**
  275.   \brief   Remove the exclusive lock
  276.   \details Removes the exclusive lock which is created by LDREX.
  277.  */
  278. #define __CLREX             __builtin_arm_clrex
  279.  
  280. /**
  281.   \brief   Signed Saturate
  282.   \details Saturates a signed value.
  283.   \param [in]  value  Value to be saturated
  284.   \param [in]    sat  Bit position to saturate to (1..32)
  285.   \return             Saturated value
  286.  */
  287. #define __SSAT             __builtin_arm_ssat
  288.  
  289. /**
  290.   \brief   Unsigned Saturate
  291.   \details Saturates an unsigned value.
  292.   \param [in]  value  Value to be saturated
  293.   \param [in]    sat  Bit position to saturate to (0..31)
  294.   \return             Saturated value
  295.  */
  296. #define __USAT             __builtin_arm_usat
  297.  
  298.  
  299. /* ###########################  Core Function Access  ########################### */
  300.  
  301. /**
  302.   \brief   Get FPSCR
  303.   \details Returns the current value of the Floating Point Status/Control register.
  304.   \return               Floating Point Status/Control register value
  305.  */
  306. #define __get_FPSCR      __builtin_arm_get_fpscr
  307.  
  308. /**
  309.   \brief   Set FPSCR
  310.   \details Assigns the given value to the Floating Point Status/Control register.
  311.   \param [in]    fpscr  Floating Point Status/Control value to set
  312.  */
  313. #define __set_FPSCR      __builtin_arm_set_fpscr
  314.  
  315. /** \brief  Get CPSR Register
  316.     \return               CPSR Register value
  317.  */
  318. __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
  319. {
  320.   uint32_t result;
  321.   __ASM volatile("MRS %0, cpsr" : "=r" (result) );
  322.   return(result);
  323. }
  324.  
  325. /** \brief  Set CPSR Register
  326.     \param [in]    cpsr  CPSR value to set
  327.  */
  328. __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
  329. {
  330. __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "memory");
  331. }
  332.  
  333. /** \brief  Get Mode
  334.     \return                Processor Mode
  335.  */
  336. __STATIC_FORCEINLINE uint32_t __get_mode(void)
  337. {
  338.         return (__get_CPSR() & 0x1FU);
  339. }
  340.  
  341. /** \brief  Set Mode
  342.     \param [in]    mode  Mode value to set
  343.  */
  344. __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
  345. {
  346.   __ASM volatile("MSR  cpsr_c, %0" : : "r" (mode) : "memory");
  347. }
  348.  
  349. /** \brief  Get Stack Pointer
  350.     \return Stack Pointer value
  351.  */
  352. __STATIC_FORCEINLINE uint32_t __get_SP()
  353. {
  354.   uint32_t result;
  355.   __ASM volatile("MOV  %0, sp" : "=r" (result) : : "memory");
  356.   return result;
  357. }
  358.  
  359. /** \brief  Set Stack Pointer
  360.     \param [in]    stack  Stack Pointer value to set
  361.  */
  362. __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
  363. {
  364.   __ASM volatile("MOV  sp, %0" : : "r" (stack) : "memory");
  365. }
  366.  
  367. /** \brief  Get USR/SYS Stack Pointer
  368.     \return USR/SYS Stack Pointer value
  369.  */
  370. __STATIC_FORCEINLINE uint32_t __get_SP_usr()
  371. {
  372.   uint32_t cpsr;
  373.   uint32_t result;
  374.   __ASM volatile(
  375.     "MRS     %0, cpsr   \n"
  376.     "CPS     #0x1F      \n" // no effect in USR mode
  377.     "MOV     %1, sp     \n"
  378.     "MSR     cpsr_c, %2 \n" // no effect in USR mode
  379.     "ISB" :  "=r"(cpsr), "=r"(result) : "r"(cpsr) : "memory"
  380.    );
  381.   return result;
  382. }
  383.  
  384. /** \brief  Set USR/SYS Stack Pointer
  385.     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
  386.  */
  387. __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
  388. {
  389.   uint32_t cpsr;
  390.   __ASM volatile(
  391.     "MRS     %0, cpsr   \n"
  392.     "CPS     #0x1F      \n" // no effect in USR mode
  393.     "MOV     sp, %1     \n"
  394.     "MSR     cpsr_c, %2 \n" // no effect in USR mode
  395.     "ISB" : "=r"(cpsr) : "r" (topOfProcStack), "r"(cpsr) : "memory"
  396.    );
  397. }
  398.  
  399. /** \brief  Get FPEXC
  400.     \return               Floating Point Exception Control register value
  401.  */
  402. __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
  403. {
  404. #if (__FPU_PRESENT == 1)
  405.   uint32_t result;
  406.   __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
  407.   return(result);
  408. #else
  409.   return(0);
  410. #endif
  411. }
  412.  
  413. /** \brief  Set FPEXC
  414.     \param [in]    fpexc  Floating Point Exception Control value to set
  415.  */
  416. __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
  417. {
  418. #if (__FPU_PRESENT == 1)
  419.   __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
  420. #endif
  421. }
  422.  
  423. /*
  424.  * Include common core functions to access Coprocessor 15 registers
  425.  */
  426.  
  427. #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
  428. #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
  429. #define __get_CP64(cp, op1, Rt, CRm)         __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : "=r" (Rt) : : "memory" )
  430. #define __set_CP64(cp, op1, Rt, CRm)         __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : : "r" (Rt) : "memory" )
  431.  
  432. #include "cmsis_cp15.h"
  433.  
  434. /** \brief  Enable Floating Point Unit
  435.  
  436.   Critical section, called from undef handler, so systick is disabled
  437.  */
  438. __STATIC_INLINE void __FPU_Enable(void)
  439. {
  440.   __ASM volatile(
  441.     //Permit access to VFP/NEON, registers by modifying CPACR
  442.     "        MRC     p15,0,R1,c1,c0,2  \n"
  443.     "        ORR     R1,R1,#0x00F00000 \n"
  444.     "        MCR     p15,0,R1,c1,c0,2  \n"
  445.  
  446.     //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
  447.     "        ISB                       \n"
  448.  
  449.     //Enable VFP/NEON
  450.     "        VMRS    R1,FPEXC          \n"
  451.     "        ORR     R1,R1,#0x40000000 \n"
  452.     "        VMSR    FPEXC,R1          \n"
  453.  
  454.     //Initialise VFP/NEON registers to 0
  455.     "        MOV     R2,#0             \n"
  456.  
  457.     //Initialise D16 registers to 0
  458.     "        VMOV    D0, R2,R2         \n"
  459.     "        VMOV    D1, R2,R2         \n"
  460.     "        VMOV    D2, R2,R2         \n"
  461.     "        VMOV    D3, R2,R2         \n"
  462.     "        VMOV    D4, R2,R2         \n"
  463.     "        VMOV    D5, R2,R2         \n"
  464.     "        VMOV    D6, R2,R2         \n"
  465.     "        VMOV    D7, R2,R2         \n"
  466.     "        VMOV    D8, R2,R2         \n"
  467.     "        VMOV    D9, R2,R2         \n"
  468.     "        VMOV    D10,R2,R2         \n"
  469.     "        VMOV    D11,R2,R2         \n"
  470.     "        VMOV    D12,R2,R2         \n"
  471.     "        VMOV    D13,R2,R2         \n"
  472.     "        VMOV    D14,R2,R2         \n"
  473.     "        VMOV    D15,R2,R2         \n"
  474.  
  475. #if __ARM_NEON == 1
  476.     //Initialise D32 registers to 0
  477.     "        VMOV    D16,R2,R2         \n"
  478.     "        VMOV    D17,R2,R2         \n"
  479.     "        VMOV    D18,R2,R2         \n"
  480.     "        VMOV    D19,R2,R2         \n"
  481.     "        VMOV    D20,R2,R2         \n"
  482.     "        VMOV    D21,R2,R2         \n"
  483.     "        VMOV    D22,R2,R2         \n"
  484.     "        VMOV    D23,R2,R2         \n"
  485.     "        VMOV    D24,R2,R2         \n"
  486.     "        VMOV    D25,R2,R2         \n"
  487.     "        VMOV    D26,R2,R2         \n"
  488.     "        VMOV    D27,R2,R2         \n"
  489.     "        VMOV    D28,R2,R2         \n"
  490.     "        VMOV    D29,R2,R2         \n"
  491.     "        VMOV    D30,R2,R2         \n"
  492.     "        VMOV    D31,R2,R2         \n"
  493. #endif
  494.  
  495.     //Initialise FPSCR to a known state
  496.     "        VMRS    R2,FPSCR          \n"
  497.     "        LDR     R3,=0x00086060    \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
  498.     "        AND     R2,R2,R3          \n"
  499.     "        VMSR    FPSCR,R2            "
  500.   );
  501. }
  502.  
  503. #endif /* __CMSIS_ARMCLANG_H */
  504.