Subversion Repositories DashDisplay

Rev

Rev 49 | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 49 Rev 50
Line 1... Line 1...
1
/**************************************************************************//**
1
/**************************************************************************//**
2
 * @file     cmsis_gcc.h
2
 * @file     cmsis_gcc.h
3
 * @brief    CMSIS Cortex-M Core Function/Instruction Header File
3
 * @brief    CMSIS compiler GCC header file
4
 * @version  V4.30
4
 * @version  V5.0.4
5
 * @date     20. October 2015
5
 * @date     09. April 2018
6
 ******************************************************************************/
6
 ******************************************************************************/
-
 
7
/*
7
/* Copyright (c) 2009 - 2015 ARM LIMITED
8
 * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
8
 
9
 *
9
   All rights reserved.
10
 * SPDX-License-Identifier: Apache-2.0
10
   Redistribution and use in source and binary forms, with or without
-
 
11
   modification, are permitted provided that the following conditions are met:
-
 
-
 
11
 *
12
   - Redistributions of source code must retain the above copyright
12
 * Licensed under the Apache License, Version 2.0 (the License); you may
13
     notice, this list of conditions and the following disclaimer.
13
 * not use this file except in compliance with the License.
14
   - Redistributions in binary form must reproduce the above copyright
14
 * You may obtain a copy of the License at
15
     notice, this list of conditions and the following disclaimer in the
-
 
16
     documentation and/or other materials provided with the distribution.
-
 
17
   - Neither the name of ARM nor the names of its contributors may be used
-
 
18
     to endorse or promote products derived from this software without
-
 
-
 
15
 *
19
     specific prior written permission.
16
 * www.apache.org/licenses/LICENSE-2.0
20
   *
17
 *
21
   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-
 
22
   AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-
 
23
   IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-
 
24
   ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
-
 
25
   LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-
 
26
   CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-
 
27
   SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
18
 * Unless required by applicable law or agreed to in writing, software
28
   INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
19
 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
29
   CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
20
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
30
   ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
21
 * See the License for the specific language governing permissions and
31
   POSSIBILITY OF SUCH DAMAGE.
22
 * limitations under the License.
32
   ---------------------------------------------------------------------------*/
-
 
33
 
23
 */
34
 
24
 
35
#ifndef __CMSIS_GCC_H
25
#ifndef __CMSIS_GCC_H
36
#define __CMSIS_GCC_H
26
#define __CMSIS_GCC_H
37
 
27
 
38
/* ignore some GCC warnings */
28
/* ignore some GCC warnings */
39
#if defined ( __GNUC__ )
-
 
40
#pragma GCC diagnostic push
29
#pragma GCC diagnostic push
41
#pragma GCC diagnostic ignored "-Wsign-conversion"
30
#pragma GCC diagnostic ignored "-Wsign-conversion"
42
#pragma GCC diagnostic ignored "-Wconversion"
31
#pragma GCC diagnostic ignored "-Wconversion"
43
#pragma GCC diagnostic ignored "-Wunused-parameter"
32
#pragma GCC diagnostic ignored "-Wunused-parameter"
-
 
33
 
-
 
34
/* Fallback for __has_builtin */
-
 
35
#ifndef __has_builtin
-
 
36
  #define __has_builtin(x) (0)
-
 
37
#endif
-
 
38
 
-
 
39
/* CMSIS compiler specific defines */
-
 
40
#ifndef   __ASM
-
 
41
  #define __ASM                                  __asm
-
 
42
#endif
-
 
43
#ifndef   __INLINE
-
 
44
  #define __INLINE                               inline
-
 
45
#endif
-
 
46
#ifndef   __STATIC_INLINE
-
 
47
  #define __STATIC_INLINE                        static inline
-
 
48
#endif
-
 
49
#ifndef   __STATIC_FORCEINLINE                 
-
 
50
  #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static inline
-
 
51
#endif                                           
-
 
52
#ifndef   __NO_RETURN
-
 
53
  #define __NO_RETURN                            __attribute__((__noreturn__))
-
 
54
#endif
-
 
55
#ifndef   __USED
-
 
56
  #define __USED                                 __attribute__((used))
-
 
57
#endif
-
 
58
#ifndef   __WEAK
-
 
59
  #define __WEAK                                 __attribute__((weak))
-
 
60
#endif
-
 
61
#ifndef   __PACKED
-
 
62
  #define __PACKED                               __attribute__((packed, aligned(1)))
-
 
63
#endif
-
 
64
#ifndef   __PACKED_STRUCT
-
 
65
  #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
-
 
66
#endif
-
 
67
#ifndef   __PACKED_UNION
-
 
68
  #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
-
 
69
#endif
-
 
70
#ifndef   __UNALIGNED_UINT32        /* deprecated */
-
 
71
  #pragma GCC diagnostic push
-
 
72
  #pragma GCC diagnostic ignored "-Wpacked"
-
 
73
  #pragma GCC diagnostic ignored "-Wattributes"
-
 
74
  struct __attribute__((packed)) T_UINT32 { uint32_t v; };
-
 
75
  #pragma GCC diagnostic pop
-
 
76
  #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
-
 
77
#endif
-
 
78
#ifndef   __UNALIGNED_UINT16_WRITE
-
 
79
  #pragma GCC diagnostic push
-
 
80
  #pragma GCC diagnostic ignored "-Wpacked"
-
 
81
  #pragma GCC diagnostic ignored "-Wattributes"
-
 
82
  __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
-
 
83
  #pragma GCC diagnostic pop
-
 
84
  #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
-
 
85
#endif
-
 
86
#ifndef   __UNALIGNED_UINT16_READ
-
 
87
  #pragma GCC diagnostic push
-
 
88
  #pragma GCC diagnostic ignored "-Wpacked"
-
 
89
  #pragma GCC diagnostic ignored "-Wattributes"
-
 
90
  __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
-
 
91
  #pragma GCC diagnostic pop
-
 
92
  #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
-
 
93
#endif
-
 
94
#ifndef   __UNALIGNED_UINT32_WRITE
-
 
95
  #pragma GCC diagnostic push
-
 
96
  #pragma GCC diagnostic ignored "-Wpacked"
-
 
97
  #pragma GCC diagnostic ignored "-Wattributes"
-
 
98
  __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
-
 
99
  #pragma GCC diagnostic pop
-
 
100
  #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
-
 
101
#endif
-
 
102
#ifndef   __UNALIGNED_UINT32_READ
-
 
103
  #pragma GCC diagnostic push
-
 
104
  #pragma GCC diagnostic ignored "-Wpacked"
-
 
105
  #pragma GCC diagnostic ignored "-Wattributes"
-
 
106
  __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
-
 
107
  #pragma GCC diagnostic pop
-
 
108
  #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
-
 
109
#endif
-
 
110
#ifndef   __ALIGNED
-
 
111
  #define __ALIGNED(x)                           __attribute__((aligned(x)))
-
 
112
#endif
-
 
113
#ifndef   __RESTRICT
-
 
114
  #define __RESTRICT                             __restrict
44
#endif
115
#endif
45
 
116
 
46
 
117
 
47
/* ###########################  Core Function Access  ########################### */
118
/* ###########################  Core Function Access  ########################### */
48
/** \ingroup  CMSIS_Core_FunctionInterface
119
/** \ingroup  CMSIS_Core_FunctionInterface
Line 53... Line 124...
53
/**
124
/**
54
  \brief   Enable IRQ Interrupts
125
  \brief   Enable IRQ Interrupts
55
  \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
126
  \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
56
           Can only be executed in Privileged modes.
127
           Can only be executed in Privileged modes.
57
 */
128
 */
58
__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
129
__STATIC_FORCEINLINE void __enable_irq(void)
59
{
130
{
60
  __ASM volatile ("cpsie i" : : : "memory");
131
  __ASM volatile ("cpsie i" : : : "memory");
61
}
132
}
62
 
133
 
63
 
134
 
64
/**
135
/**
65
  \brief   Disable IRQ Interrupts
136
  \brief   Disable IRQ Interrupts
66
  \details Disables IRQ interrupts by setting the I-bit in the CPSR.
137
  \details Disables IRQ interrupts by setting the I-bit in the CPSR.
67
  Can only be executed in Privileged modes.
138
           Can only be executed in Privileged modes.
68
 */
139
 */
69
__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_irq(void)
140
__STATIC_FORCEINLINE void __disable_irq(void)
70
{
141
{
71
  __ASM volatile ("cpsid i" : : : "memory");
142
  __ASM volatile ("cpsid i" : : : "memory");
72
}
143
}
73
 
144
 
74
 
145
 
75
/**
146
/**
76
  \brief   Get Control Register
147
  \brief   Get Control Register
77
  \details Returns the content of the Control Register.
148
  \details Returns the content of the Control Register.
78
  \return               Control Register value
149
  \return               Control Register value
79
 */
150
 */
80
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CONTROL(void)
151
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
81
{
152
{
82
  uint32_t result;
153
  uint32_t result;
83
 
154
 
84
  __ASM volatile ("MRS %0, control" : "=r" (result) );
155
  __ASM volatile ("MRS %0, control" : "=r" (result) );
85
  return(result);
156
  return(result);
86
}
157
}
87
 
158
 
88
 
159
 
-
 
160
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
161
/**
-
 
162
  \brief   Get Control Register (non-secure)
-
 
163
  \details Returns the content of the non-secure Control Register when in secure mode.
-
 
164
  \return               non-secure Control Register value
-
 
165
 */
-
 
166
__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
-
 
167
{
-
 
168
  uint32_t result;
-
 
169
 
-
 
170
  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
-
 
171
  return(result);
-
 
172
}
-
 
173
#endif
-
 
174
 
-
 
175
 
89
/**
176
/**
90
  \brief   Set Control Register
177
  \brief   Set Control Register
91
  \details Writes the given value to the Control Register.
178
  \details Writes the given value to the Control Register.
92
  \param [in]    control  Control Register value to set
179
  \param [in]    control  Control Register value to set
93
 */
180
 */
94
__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CONTROL(uint32_t control)
181
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
95
{
182
{
96
  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
183
  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
97
}
184
}
98
 
185
 
99
 
186
 
-
 
187
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
188
/**
-
 
189
  \brief   Set Control Register (non-secure)
-
 
190
  \details Writes the given value to the non-secure Control Register when in secure state.
-
 
191
  \param [in]    control  Control Register value to set
-
 
192
 */
-
 
193
__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
-
 
194
{
-
 
195
  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
-
 
196
}
-
 
197
#endif
-
 
198
 
-
 
199
 
100
/**
200
/**
101
  \brief   Get IPSR Register
201
  \brief   Get IPSR Register
102
  \details Returns the content of the IPSR Register.
202
  \details Returns the content of the IPSR Register.
103
  \return               IPSR Register value
203
  \return               IPSR Register value
104
 */
204
 */
105
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_IPSR(void)
205
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
106
{
206
{
107
  uint32_t result;
207
  uint32_t result;
108
 
208
 
109
  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
209
  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
110
  return(result);
210
  return(result);
Line 114... Line 214...
114
/**
214
/**
115
  \brief   Get APSR Register
215
  \brief   Get APSR Register
116
  \details Returns the content of the APSR Register.
216
  \details Returns the content of the APSR Register.
117
  \return               APSR Register value
217
  \return               APSR Register value
118
 */
218
 */
119
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
219
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
120
{
220
{
121
  uint32_t result;
221
  uint32_t result;
122
 
222
 
123
  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
223
  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
124
  return(result);
224
  return(result);
Line 126... Line 226...
126
 
226
 
127
 
227
 
128
/**
228
/**
129
  \brief   Get xPSR Register
229
  \brief   Get xPSR Register
130
  \details Returns the content of the xPSR Register.
230
  \details Returns the content of the xPSR Register.
131
 
-
 
132
    \return               xPSR Register value
231
  \return               xPSR Register value
133
 */
232
 */
134
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_xPSR(void)
233
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
135
{
234
{
136
  uint32_t result;
235
  uint32_t result;
137
 
236
 
138
  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
237
  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
139
  return(result);
238
  return(result);
Line 143... Line 242...
143
/**
242
/**
144
  \brief   Get Process Stack Pointer
243
  \brief   Get Process Stack Pointer
145
  \details Returns the current value of the Process Stack Pointer (PSP).
244
  \details Returns the current value of the Process Stack Pointer (PSP).
146
  \return               PSP Register value
245
  \return               PSP Register value
147
 */
246
 */
148
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_PSP(void)
247
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
149
{
248
{
150
  register uint32_t result;
249
  uint32_t result;
151
 
250
 
152
  __ASM volatile ("MRS %0, psp\n"  : "=r" (result) );
251
  __ASM volatile ("MRS %0, psp"  : "=r" (result) );
153
  return(result);
252
  return(result);
154
}
253
}
155
 
254
 
156
 
255
 
-
 
256
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
257
/**
-
 
258
  \brief   Get Process Stack Pointer (non-secure)
-
 
259
  \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
-
 
260
  \return               PSP Register value
-
 
261
 */
-
 
262
__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
-
 
263
{
-
 
264
  uint32_t result;
-
 
265
 
-
 
266
  __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
-
 
267
  return(result);
-
 
268
}
-
 
269
#endif
-
 
270
 
-
 
271
 
157
/**
272
/**
158
  \brief   Set Process Stack Pointer
273
  \brief   Set Process Stack Pointer
159
  \details Assigns the given value to the Process Stack Pointer (PSP).
274
  \details Assigns the given value to the Process Stack Pointer (PSP).
160
  \param [in]    topOfProcStack  Process Stack Pointer value to set
275
  \param [in]    topOfProcStack  Process Stack Pointer value to set
161
 */
276
 */
162
__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
277
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
163
{
278
{
164
  __ASM volatile ("MSR psp, %0\n" : : "r" (topOfProcStack) : "sp");
279
  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
165
}
280
}
166
 
281
 
167
 
282
 
-
 
283
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
284
/**
-
 
285
  \brief   Set Process Stack Pointer (non-secure)
-
 
286
  \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
-
 
287
  \param [in]    topOfProcStack  Process Stack Pointer value to set
-
 
288
 */
-
 
289
__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
-
 
290
{
-
 
291
  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
-
 
292
}
-
 
293
#endif
-
 
294
 
-
 
295
 
168
/**
296
/**
169
  \brief   Get Main Stack Pointer
297
  \brief   Get Main Stack Pointer
170
  \details Returns the current value of the Main Stack Pointer (MSP).
298
  \details Returns the current value of the Main Stack Pointer (MSP).
171
  \return               MSP Register value
299
  \return               MSP Register value
172
 */
300
 */
173
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_MSP(void)
301
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
-
 
302
{
-
 
303
  uint32_t result;
-
 
304
 
-
 
305
  __ASM volatile ("MRS %0, msp" : "=r" (result) );
-
 
306
  return(result);
-
 
307
}
-
 
308
 
-
 
309
 
-
 
310
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
311
/**
-
 
312
  \brief   Get Main Stack Pointer (non-secure)
-
 
313
  \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
-
 
314
  \return               MSP Register value
-
 
315
 */
-
 
316
__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
174
{
317
{
175
  register uint32_t result;
318
  uint32_t result;
176
 
319
 
177
  __ASM volatile ("MRS %0, msp\n" : "=r" (result) );
320
  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
178
  return(result);
321
  return(result);
179
}
322
}
-
 
323
#endif
180
 
324
 
181
 
325
 
182
/**
326
/**
183
  \brief   Set Main Stack Pointer
327
  \brief   Set Main Stack Pointer
184
  \details Assigns the given value to the Main Stack Pointer (MSP).
328
  \details Assigns the given value to the Main Stack Pointer (MSP).
-
 
329
  \param [in]    topOfMainStack  Main Stack Pointer value to set
-
 
330
 */
-
 
331
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
-
 
332
{
-
 
333
  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
-
 
334
}
-
 
335
 
185
 
336
 
-
 
337
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
338
/**
-
 
339
  \brief   Set Main Stack Pointer (non-secure)
-
 
340
  \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
186
    \param [in]    topOfMainStack  Main Stack Pointer value to set
341
  \param [in]    topOfMainStack  Main Stack Pointer value to set
187
 */
342
 */
188
__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
343
__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
189
{
344
{
190
  __ASM volatile ("MSR msp, %0\n" : : "r" (topOfMainStack) : "sp");
345
  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
191
}
346
}
-
 
347
#endif
-
 
348
 
-
 
349
 
-
 
350
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
351
/**
-
 
352
  \brief   Get Stack Pointer (non-secure)
-
 
353
  \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
-
 
354
  \return               SP Register value
-
 
355
 */
-
 
356
__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
-
 
357
{
-
 
358
  uint32_t result;
-
 
359
 
-
 
360
  __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
-
 
361
  return(result);
-
 
362
}
-
 
363
 
-
 
364
 
-
 
365
/**
-
 
366
  \brief   Set Stack Pointer (non-secure)
-
 
367
  \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
-
 
368
  \param [in]    topOfStack  Stack Pointer value to set
-
 
369
 */
-
 
370
__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
-
 
371
{
-
 
372
  __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
-
 
373
}
-
 
374
#endif
192
 
375
 
193
 
376
 
194
/**
377
/**
195
  \brief   Get Priority Mask
378
  \brief   Get Priority Mask
196
  \details Returns the current state of the priority mask bit from the Priority Mask Register.
379
  \details Returns the current state of the priority mask bit from the Priority Mask Register.
197
  \return               Priority Mask value
380
  \return               Priority Mask value
198
 */
381
 */
199
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_PRIMASK(void)
382
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
200
{
383
{
201
  uint32_t result;
384
  uint32_t result;
202
 
385
 
203
  __ASM volatile ("MRS %0, primask" : "=r" (result) );
386
  __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
204
  return(result);
387
  return(result);
205
}
388
}
206
 
389
 
207
 
390
 
-
 
391
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
392
/**
-
 
393
  \brief   Get Priority Mask (non-secure)
-
 
394
  \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
-
 
395
  \return               Priority Mask value
-
 
396
 */
-
 
397
__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
-
 
398
{
-
 
399
  uint32_t result;
-
 
400
 
-
 
401
  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
-
 
402
  return(result);
-
 
403
}
-
 
404
#endif
-
 
405
 
-
 
406
 
208
/**
407
/**
209
  \brief   Set Priority Mask
408
  \brief   Set Priority Mask
210
  \details Assigns the given value to the Priority Mask Register.
409
  \details Assigns the given value to the Priority Mask Register.
211
  \param [in]    priMask  Priority Mask
410
  \param [in]    priMask  Priority Mask
212
 */
411
 */
213
__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
412
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
214
{
413
{
215
  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
414
  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
216
}
415
}
217
 
416
 
218
 
417
 
-
 
418
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
419
/**
219
#if       (__CORTEX_M >= 0x03U)
420
  \brief   Set Priority Mask (non-secure)
-
 
421
  \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
-
 
422
  \param [in]    priMask  Priority Mask
-
 
423
 */
-
 
424
__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
-
 
425
{
-
 
426
  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
-
 
427
}
-
 
428
#endif
-
 
429
 
220
 
430
 
-
 
431
#if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
-
 
432
     (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
-
 
433
     (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
221
/**
434
/**
222
  \brief   Enable FIQ
435
  \brief   Enable FIQ
223
  \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
436
  \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
224
           Can only be executed in Privileged modes.
437
           Can only be executed in Privileged modes.
225
 */
438
 */
226
__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_fault_irq(void)
439
__STATIC_FORCEINLINE void __enable_fault_irq(void)
227
{
440
{
228
  __ASM volatile ("cpsie f" : : : "memory");
441
  __ASM volatile ("cpsie f" : : : "memory");
229
}
442
}
230
 
443
 
231
 
444
 
232
/**
445
/**
233
  \brief   Disable FIQ
446
  \brief   Disable FIQ
234
  \details Disables FIQ interrupts by setting the F-bit in the CPSR.
447
  \details Disables FIQ interrupts by setting the F-bit in the CPSR.
235
           Can only be executed in Privileged modes.
448
           Can only be executed in Privileged modes.
236
 */
449
 */
237
__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_fault_irq(void)
450
__STATIC_FORCEINLINE void __disable_fault_irq(void)
238
{
451
{
239
  __ASM volatile ("cpsid f" : : : "memory");
452
  __ASM volatile ("cpsid f" : : : "memory");
240
}
453
}
241
 
454
 
242
 
455
 
243
/**
456
/**
244
  \brief   Get Base Priority
457
  \brief   Get Base Priority
245
  \details Returns the current value of the Base Priority register.
458
  \details Returns the current value of the Base Priority register.
246
  \return               Base Priority register value
459
  \return               Base Priority register value
247
 */
460
 */
248
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_BASEPRI(void)
461
__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
249
{
462
{
250
  uint32_t result;
463
  uint32_t result;
251
 
464
 
252
  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
465
  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
253
  return(result);
466
  return(result);
254
}
467
}
255
 
468
 
256
 
469
 
-
 
470
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
471
/**
-
 
472
  \brief   Get Base Priority (non-secure)
-
 
473
  \details Returns the current value of the non-secure Base Priority register when in secure state.
-
 
474
  \return               Base Priority register value
-
 
475
 */
-
 
476
__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
-
 
477
{
-
 
478
  uint32_t result;
-
 
479
 
-
 
480
  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
-
 
481
  return(result);
-
 
482
}
-
 
483
#endif
-
 
484
 
-
 
485
 
257
/**
486
/**
258
  \brief   Set Base Priority
487
  \brief   Set Base Priority
259
  \details Assigns the given value to the Base Priority register.
488
  \details Assigns the given value to the Base Priority register.
260
  \param [in]    basePri  Base Priority value to set
489
  \param [in]    basePri  Base Priority value to set
261
 */
490
 */
262
__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_BASEPRI(uint32_t value)
491
__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
263
{
492
{
264
  __ASM volatile ("MSR basepri, %0" : : "r" (value) : "memory");
493
  __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
265
}
494
}
266
 
495
 
267
 
496
 
-
 
497
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
498
/**
-
 
499
  \brief   Set Base Priority (non-secure)
-
 
500
  \details Assigns the given value to the non-secure Base Priority register when in secure state.
-
 
501
  \param [in]    basePri  Base Priority value to set
-
 
502
 */
-
 
503
__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
-
 
504
{
-
 
505
  __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
-
 
506
}
-
 
507
#endif
-
 
508
 
-
 
509
 
268
/**
510
/**
269
  \brief   Set Base Priority with condition
511
  \brief   Set Base Priority with condition
270
  \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
512
  \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
271
           or the new value increases the BASEPRI priority level.
513
           or the new value increases the BASEPRI priority level.
272
  \param [in]    basePri  Base Priority value to set
514
  \param [in]    basePri  Base Priority value to set
273
 */
515
 */
274
__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t value)
516
__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
275
{
517
{
276
  __ASM volatile ("MSR basepri_max, %0" : : "r" (value) : "memory");
518
  __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
277
}
519
}
278
 
520
 
279
 
521
 
280
/**
522
/**
281
  \brief   Get Fault Mask
523
  \brief   Get Fault Mask
282
  \details Returns the current value of the Fault Mask register.
524
  \details Returns the current value of the Fault Mask register.
283
  \return               Fault Mask register value
525
  \return               Fault Mask register value
284
 */
526
 */
285
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FAULTMASK(void)
527
__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
286
{
528
{
287
  uint32_t result;
529
  uint32_t result;
288
 
530
 
289
  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
531
  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
290
  return(result);
532
  return(result);
291
}
533
}
292
 
534
 
293
 
535
 
-
 
536
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
537
/**
-
 
538
  \brief   Get Fault Mask (non-secure)
-
 
539
  \details Returns the current value of the non-secure Fault Mask register when in secure state.
-
 
540
  \return               Fault Mask register value
-
 
541
 */
-
 
542
__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
-
 
543
{
-
 
544
  uint32_t result;
-
 
545
 
-
 
546
  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
-
 
547
  return(result);
-
 
548
}
-
 
549
#endif
-
 
550
 
-
 
551
 
294
/**
552
/**
295
  \brief   Set Fault Mask
553
  \brief   Set Fault Mask
296
  \details Assigns the given value to the Fault Mask register.
554
  \details Assigns the given value to the Fault Mask register.
297
  \param [in]    faultMask  Fault Mask value to set
555
  \param [in]    faultMask  Fault Mask value to set
298
 */
556
 */
299
__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
557
__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
300
{
558
{
301
  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
559
  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
302
}
560
}
303
 
561
 
304
#endif /* (__CORTEX_M >= 0x03U) */
-
 
305
 
562
 
-
 
563
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
-
 
564
/**
-
 
565
  \brief   Set Fault Mask (non-secure)
-
 
566
  \details Assigns the given value to the non-secure Fault Mask register when in secure state.
-
 
567
  \param [in]    faultMask  Fault Mask value to set
-
 
568
 */
-
 
569
__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
-
 
570
{
-
 
571
  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
-
 
572
}
-
 
573
#endif
-
 
574
 
-
 
575
#endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
-
 
576
           (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
-
 
577
           (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
-
 
578
 
-
 
579
 
-
 
580
#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
-
 
581
     (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
-
 
582
 
-
 
583
/**
-
 
584
  \brief   Get Process Stack Pointer Limit
-
 
585
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
-
 
586
  Stack Pointer Limit register hence zero is returned always in non-secure
-
 
587
  mode.
-
 
588
 
-
 
589
  \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
-
 
590
  \return               PSPLIM Register value
-
 
591
 */
-
 
592
__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
-
 
593
{
-
 
594
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
-
 
595
    (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
-
 
596
    // without main extensions, the non-secure PSPLIM is RAZ/WI
-
 
597
  return 0U;
-
 
598
#else
-
 
599
  uint32_t result;
-
 
600
  __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
-
 
601
  return result;
-
 
602
#endif
-
 
603
}
-
 
604
 
-
 
605
#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
-
 
606
/**
-
 
607
  \brief   Get Process Stack Pointer Limit (non-secure)
-
 
608
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
-
 
609
  Stack Pointer Limit register hence zero is returned always.
-
 
610
 
-
 
611
  \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
-
 
612
  \return               PSPLIM Register value
-
 
613
 */
-
 
614
__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
-
 
615
{
-
 
616
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
-
 
617
  // without main extensions, the non-secure PSPLIM is RAZ/WI
-
 
618
  return 0U;
-
 
619
#else
-
 
620
  uint32_t result;
-
 
621
  __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
-
 
622
  return result;
-
 
623
#endif
-
 
624
}
-
 
625
#endif
-
 
626
 
-
 
627
 
-
 
628
/**
-
 
629
  \brief   Set Process Stack Pointer Limit
-
 
630
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
-
 
631
  Stack Pointer Limit register hence the write is silently ignored in non-secure
-
 
632
  mode.
-
 
633
 
-
 
634
  \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
-
 
635
  \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
-
 
636
 */
-
 
637
__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
-
 
638
{
-
 
639
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
-
 
640
    (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
-
 
641
  // without main extensions, the non-secure PSPLIM is RAZ/WI
-
 
642
  (void)ProcStackPtrLimit;
-
 
643
#else
-
 
644
  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
-
 
645
#endif
-
 
646
}
-
 
647
 
-
 
648
 
-
 
649
#if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
-
 
650
/**
-
 
651
  \brief   Set Process Stack Pointer (non-secure)
-
 
652
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
-
 
653
  Stack Pointer Limit register hence the write is silently ignored.
-
 
654
 
-
 
655
  \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
-
 
656
  \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
-
 
657
 */
-
 
658
__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
-
 
659
{
-
 
660
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
-
 
661
  // without main extensions, the non-secure PSPLIM is RAZ/WI
-
 
662
  (void)ProcStackPtrLimit;
-
 
663
#else
-
 
664
  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
-
 
665
#endif
-
 
666
}
-
 
667
#endif
-
 
668
 
-
 
669
 
-
 
670
/**
-
 
671
  \brief   Get Main Stack Pointer Limit
-
 
672
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
-
 
673
  Stack Pointer Limit register hence zero is returned always in non-secure
-
 
674
  mode.
-
 
675
 
-
 
676
  \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
-
 
677
  \return               MSPLIM Register value
-
 
678
 */
-
 
679
__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
-
 
680
{
-
 
681
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
-
 
682
    (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
-
 
683
  // without main extensions, the non-secure MSPLIM is RAZ/WI
-
 
684
  return 0U;
-
 
685
#else
-
 
686
  uint32_t result;
-
 
687
  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
-
 
688
  return result;
-
 
689
#endif
-
 
690
}
-
 
691
 
-
 
692
 
-
 
693
#if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
-
 
694
/**
-
 
695
  \brief   Get Main Stack Pointer Limit (non-secure)
-
 
696
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
-
 
697
  Stack Pointer Limit register hence zero is returned always.
-
 
698
 
-
 
699
  \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
-
 
700
  \return               MSPLIM Register value
-
 
701
 */
-
 
702
__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
-
 
703
{
-
 
704
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
-
 
705
  // without main extensions, the non-secure MSPLIM is RAZ/WI
-
 
706
  return 0U;
-
 
707
#else
-
 
708
  uint32_t result;
-
 
709
  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
-
 
710
  return result;
-
 
711
#endif
-
 
712
}
-
 
713
#endif
-
 
714
 
-
 
715
 
-
 
716
/**
-
 
717
  \brief   Set Main Stack Pointer Limit
-
 
718
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
-
 
719
  Stack Pointer Limit register hence the write is silently ignored in non-secure
-
 
720
  mode.
-
 
721
 
-
 
722
  \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
-
 
723
  \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
-
 
724
 */
-
 
725
__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
-
 
726
{
-
 
727
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
-
 
728
    (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
-
 
729
  // without main extensions, the non-secure MSPLIM is RAZ/WI
-
 
730
  (void)MainStackPtrLimit;
-
 
731
#else
-
 
732
  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
-
 
733
#endif
-
 
734
}
-
 
735
 
-
 
736
 
-
 
737
#if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
-
 
738
/**
-
 
739
  \brief   Set Main Stack Pointer Limit (non-secure)
-
 
740
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
-
 
741
  Stack Pointer Limit register hence the write is silently ignored.
-
 
742
 
-
 
743
  \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
-
 
744
  \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
-
 
745
 */
-
 
746
__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
-
 
747
{
-
 
748
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
-
 
749
  // without main extensions, the non-secure MSPLIM is RAZ/WI
-
 
750
  (void)MainStackPtrLimit;
-
 
751
#else
-
 
752
  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
-
 
753
#endif
-
 
754
}
-
 
755
#endif
-
 
756
 
-
 
757
#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
-
 
758
           (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
306
 
759
 
307
#if       (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U)
-
 
308
 
760
 
309
/**
761
/**
310
  \brief   Get FPSCR
762
  \brief   Get FPSCR
311
  \details Returns the current value of the Floating Point Status/Control register.
763
  \details Returns the current value of the Floating Point Status/Control register.
312
  \return               Floating Point Status/Control register value
764
  \return               Floating Point Status/Control register value
313
 */
765
 */
314
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
766
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
315
{
767
{
316
#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
768
#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
-
 
769
     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
-
 
770
#if __has_builtin(__builtin_arm_get_fpscr) 
-
 
771
// Re-enable using built-in when GCC has been fixed
-
 
772
// || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
-
 
773
  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
-
 
774
  return __builtin_arm_get_fpscr();
-
 
775
#else
317
  uint32_t result;
776
  uint32_t result;
318
 
777
 
319
  /* Empty asm statement works as a scheduling barrier */
-
 
320
  __ASM volatile ("");
-
 
321
  __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
778
  __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
322
  __ASM volatile ("");
-
 
323
  return(result);
779
  return(result);
-
 
780
#endif
324
#else
781
#else
325
   return(0);
782
  return(0U);
326
#endif
783
#endif
327
}
784
}
328
 
785
 
329
 
786
 
330
/**
787
/**
331
  \brief   Set FPSCR
788
  \brief   Set FPSCR
332
  \details Assigns the given value to the Floating Point Status/Control register.
789
  \details Assigns the given value to the Floating Point Status/Control register.
333
  \param [in]    fpscr  Floating Point Status/Control value to set
790
  \param [in]    fpscr  Floating Point Status/Control value to set
334
 */
791
 */
335
__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
792
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
336
{
793
{
337
#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
794
#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
-
 
795
     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
-
 
796
#if __has_builtin(__builtin_arm_set_fpscr)
-
 
797
// Re-enable using built-in when GCC has been fixed
-
 
798
// || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
338
  /* Empty asm statement works as a scheduling barrier */
799
  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
339
  __ASM volatile ("");
800
  __builtin_arm_set_fpscr(fpscr);
-
 
801
#else
340
  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc");
802
  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
-
 
803
#endif
-
 
804
#else
341
  __ASM volatile ("");
805
  (void)fpscr;
342
#endif
806
#endif
343
}
807
}
344
 
808
 
345
#endif /* (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) */
-
 
346
 
-
 
347
 
-
 
348
 
809
 
349
/*@} end of CMSIS_Core_RegAccFunctions */
810
/*@} end of CMSIS_Core_RegAccFunctions */
350
 
811
 
351
 
812
 
352
/* ##########################  Core Instruction Access  ######################### */
813
/* ##########################  Core Instruction Access  ######################### */
Line 358... Line 819...
358
/* Define macros for porting to both thumb1 and thumb2.
819
/* Define macros for porting to both thumb1 and thumb2.
359
 * For thumb1, use low register (r0-r7), specified by constraint "l"
820
 * For thumb1, use low register (r0-r7), specified by constraint "l"
360
 * Otherwise, use general registers, specified by constraint "r" */
821
 * Otherwise, use general registers, specified by constraint "r" */
361
#if defined (__thumb__) && !defined (__thumb2__)
822
#if defined (__thumb__) && !defined (__thumb2__)
362
#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
823
#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
-
 
824
#define __CMSIS_GCC_RW_REG(r) "+l" (r)
363
#define __CMSIS_GCC_USE_REG(r) "l" (r)
825
#define __CMSIS_GCC_USE_REG(r) "l" (r)
364
#else
826
#else
365
#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
827
#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
-
 
828
#define __CMSIS_GCC_RW_REG(r) "+r" (r)
366
#define __CMSIS_GCC_USE_REG(r) "r" (r)
829
#define __CMSIS_GCC_USE_REG(r) "r" (r)
367
#endif
830
#endif
368
 
831
 
369
/**
832
/**
370
  \brief   No Operation
833
  \brief   No Operation
371
  \details No Operation does nothing. This instruction can be used for code alignment purposes.
834
  \details No Operation does nothing. This instruction can be used for code alignment purposes.
372
 */
835
 */
373
__attribute__((always_inline)) __STATIC_INLINE void __NOP(void)
836
#define __NOP()                             __ASM volatile ("nop")
374
{
-
 
375
  __ASM volatile ("nop");
-
 
376
}
-
 
377
 
-
 
378
 
837
 
379
/**
838
/**
380
  \brief   Wait For Interrupt
839
  \brief   Wait For Interrupt
381
  \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
840
  \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
382
 */
841
 */
383
__attribute__((always_inline)) __STATIC_INLINE void __WFI(void)
842
#define __WFI()                             __ASM volatile ("wfi")
384
{
-
 
385
  __ASM volatile ("wfi");
-
 
386
}
-
 
387
 
843
 
388
 
844
 
389
/**
845
/**
390
  \brief   Wait For Event
846
  \brief   Wait For Event
391
  \details Wait For Event is a hint instruction that permits the processor to enter
847
  \details Wait For Event is a hint instruction that permits the processor to enter
392
    a low-power state until one of a number of events occurs.
848
           a low-power state until one of a number of events occurs.
393
 */
849
 */
394
__attribute__((always_inline)) __STATIC_INLINE void __WFE(void)
850
#define __WFE()                             __ASM volatile ("wfe")
395
{
-
 
396
  __ASM volatile ("wfe");
-
 
397
}
-
 
398
 
851
 
399
 
852
 
400
/**
853
/**
401
  \brief   Send Event
854
  \brief   Send Event
402
  \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
855
  \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
403
 */
856
 */
404
__attribute__((always_inline)) __STATIC_INLINE void __SEV(void)
857
#define __SEV()                             __ASM volatile ("sev")
405
{
-
 
406
  __ASM volatile ("sev");
-
 
407
}
-
 
408
 
858
 
409
 
859
 
410
/**
860
/**
411
  \brief   Instruction Synchronization Barrier
861
  \brief   Instruction Synchronization Barrier
412
  \details Instruction Synchronization Barrier flushes the pipeline in the processor,
862
  \details Instruction Synchronization Barrier flushes the pipeline in the processor,
413
           so that all instructions following the ISB are fetched from cache or memory,
863
           so that all instructions following the ISB are fetched from cache or memory,
414
           after the instruction has been completed.
864
           after the instruction has been completed.
415
 */
865
 */
416
__attribute__((always_inline)) __STATIC_INLINE void __ISB(void)
866
__STATIC_FORCEINLINE void __ISB(void)
417
{
867
{
418
  __ASM volatile ("isb 0xF":::"memory");
868
  __ASM volatile ("isb 0xF":::"memory");
419
}
869
}
420
 
870
 
421
 
871
 
422
/**
872
/**
423
  \brief   Data Synchronization Barrier
873
  \brief   Data Synchronization Barrier
424
  \details Acts as a special kind of Data Memory Barrier.
874
  \details Acts as a special kind of Data Memory Barrier.
425
           It completes when all explicit memory accesses before this instruction complete.
875
           It completes when all explicit memory accesses before this instruction complete.
426
 */
876
 */
427
__attribute__((always_inline)) __STATIC_INLINE void __DSB(void)
877
__STATIC_FORCEINLINE void __DSB(void)
428
{
878
{
429
  __ASM volatile ("dsb 0xF":::"memory");
879
  __ASM volatile ("dsb 0xF":::"memory");
430
}
880
}
431
 
881
 
432
 
882
 
433
/**
883
/**
434
  \brief   Data Memory Barrier
884
  \brief   Data Memory Barrier
435
  \details Ensures the apparent order of the explicit memory operations before
885
  \details Ensures the apparent order of the explicit memory operations before
436
           and after the instruction, without ensuring their completion.
886
           and after the instruction, without ensuring their completion.
437
 */
887
 */
438
__attribute__((always_inline)) __STATIC_INLINE void __DMB(void)
888
__STATIC_FORCEINLINE void __DMB(void)
439
{
889
{
440
  __ASM volatile ("dmb 0xF":::"memory");
890
  __ASM volatile ("dmb 0xF":::"memory");
441
}
891
}
442
 
892
 
443
 
893
 
444
/**
894
/**
445
  \brief   Reverse byte order (32 bit)
895
  \brief   Reverse byte order (32 bit)
446
  \details Reverses the byte order in integer value.
896
  \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
447
  \param [in]    value  Value to reverse
897
  \param [in]    value  Value to reverse
448
  \return               Reversed value
898
  \return               Reversed value
449
 */
899
 */
450
__attribute__((always_inline)) __STATIC_INLINE uint32_t __REV(uint32_t value)
900
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
451
{
901
{
452
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
902
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
453
  return __builtin_bswap32(value);
903
  return __builtin_bswap32(value);
454
#else
904
#else
455
  uint32_t result;
905
  uint32_t result;
456
 
906
 
457
  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
907
  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
458
  return(result);
908
  return result;
459
#endif
909
#endif
460
}
910
}
461
 
911
 
462
 
912
 
463
/**
913
/**
464
  \brief   Reverse byte order (16 bit)
914
  \brief   Reverse byte order (16 bit)
465
  \details Reverses the byte order in two unsigned short values.
915
  \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
466
  \param [in]    value  Value to reverse
916
  \param [in]    value  Value to reverse
467
  \return               Reversed value
917
  \return               Reversed value
468
 */
918
 */
469
__attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)
919
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
470
{
920
{
471
  uint32_t result;
921
  uint32_t result;
472
 
922
 
473
  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
923
  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
474
  return(result);
924
  return result;
475
}
925
}
476
 
926
 
477
 
927
 
478
/**
928
/**
479
  \brief   Reverse byte order in signed short value
929
  \brief   Reverse byte order (16 bit)
480
  \details Reverses the byte order in a signed short value with sign extension to integer.
930
  \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
481
  \param [in]    value  Value to reverse
931
  \param [in]    value  Value to reverse
482
  \return               Reversed value
932
  \return               Reversed value
483
 */
933
 */
484
__attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value)
934
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
485
{
935
{
486
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
936
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
487
  return (short)__builtin_bswap16(value);
937
  return (int16_t)__builtin_bswap16(value);
488
#else
938
#else
489
  int32_t result;
939
  int16_t result;
490
 
940
 
491
  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
941
  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
492
  return(result);
942
  return result;
493
#endif
943
#endif
494
}
944
}
495
 
945
 
496
 
946
 
497
/**
947
/**
498
  \brief   Rotate Right in unsigned value (32 bit)
948
  \brief   Rotate Right in unsigned value (32 bit)
499
  \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
949
  \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
500
  \param [in]    value  Value to rotate
950
  \param [in]    op1  Value to rotate
501
  \param [in]    value  Number of Bits to rotate
951
  \param [in]    op2  Number of Bits to rotate
502
  \return               Rotated value
952
  \return               Rotated value
503
 */
953
 */
504
__attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
954
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
505
{
955
{
-
 
956
  op2 %= 32U;
-
 
957
  if (op2 == 0U)
-
 
958
  {
-
 
959
    return op1;
-
 
960
  }
506
  return (op1 >> op2) | (op1 << (32U - op2));
961
  return (op1 >> op2) | (op1 << (32U - op2));
507
}
962
}
508
 
963
 
509
 
964
 
510
/**
965
/**
Line 521... Line 976...
521
  \brief   Reverse bit order of value
976
  \brief   Reverse bit order of value
522
  \details Reverses the bit order of the given value.
977
  \details Reverses the bit order of the given value.
523
  \param [in]    value  Value to reverse
978
  \param [in]    value  Value to reverse
524
  \return               Reversed value
979
  \return               Reversed value
525
 */
980
 */
526
__attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
981
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
527
{
982
{
528
  uint32_t result;
983
  uint32_t result;
529
 
984
 
530
#if       (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
985
#if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
-
 
986
     (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
-
 
987
     (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
531
   __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
988
   __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
532
#else
989
#else
533
  int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */
990
  uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
534
 
991
 
535
  result = value;                      /* r will be reversed bits of v; first get LSB of v */
992
  result = value;                      /* r will be reversed bits of v; first get LSB of v */
536
  for (value >>= 1U; value; value >>= 1U)
993
  for (value >>= 1U; value != 0U; value >>= 1U)
537
  {
994
  {
538
    result <<= 1U;
995
    result <<= 1U;
539
    result |= value & 1U;
996
    result |= value & 1U;
540
    s--;
997
    s--;
541
  }
998
  }
542
  result <<= s;                        /* shift when v's highest bits are zero */
999
  result <<= s;                        /* shift when v's highest bits are zero */
543
#endif
1000
#endif
544
  return(result);
1001
  return result;
545
}
1002
}
546
 
1003
 
547
 
1004
 
548
/**
1005
/**
549
  \brief   Count leading zeros
1006
  \brief   Count leading zeros
550
  \details Counts the number of leading zeros of a data value.
1007
  \details Counts the number of leading zeros of a data value.
551
  \param [in]  value  Value to count the leading zeros
1008
  \param [in]  value  Value to count the leading zeros
552
  \return             number of leading zeros in value
1009
  \return             number of leading zeros in value
553
 */
1010
 */
554
#define __CLZ             __builtin_clz
1011
#define __CLZ             (uint8_t)__builtin_clz
555
 
-
 
556
 
1012
 
557
#if       (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
-
 
558
 
1013
 
-
 
1014
#if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
-
 
1015
     (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
-
 
1016
     (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
-
 
1017
     (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
559
/**
1018
/**
560
  \brief   LDR Exclusive (8 bit)
1019
  \brief   LDR Exclusive (8 bit)
561
  \details Executes a exclusive LDR instruction for 8 bit value.
1020
  \details Executes a exclusive LDR instruction for 8 bit value.
562
  \param [in]    ptr  Pointer to data
1021
  \param [in]    ptr  Pointer to data
563
  \return             value of type uint8_t at (*ptr)
1022
  \return             value of type uint8_t at (*ptr)
564
 */
1023
 */
565
__attribute__((always_inline)) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
1024
__STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
566
{
1025
{
567
    uint32_t result;
1026
    uint32_t result;
568
 
1027
 
569
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1028
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
570
   __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
1029
   __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
Line 582... Line 1041...
582
  \brief   LDR Exclusive (16 bit)
1041
  \brief   LDR Exclusive (16 bit)
583
  \details Executes a exclusive LDR instruction for 16 bit values.
1042
  \details Executes a exclusive LDR instruction for 16 bit values.
584
  \param [in]    ptr  Pointer to data
1043
  \param [in]    ptr  Pointer to data
585
  \return        value of type uint16_t at (*ptr)
1044
  \return        value of type uint16_t at (*ptr)
586
 */
1045
 */
587
__attribute__((always_inline)) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
1046
__STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
588
{
1047
{
589
    uint32_t result;
1048
    uint32_t result;
590
 
1049
 
591
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1050
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
592
   __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
1051
   __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
Line 604... Line 1063...
604
  \brief   LDR Exclusive (32 bit)
1063
  \brief   LDR Exclusive (32 bit)
605
  \details Executes a exclusive LDR instruction for 32 bit values.
1064
  \details Executes a exclusive LDR instruction for 32 bit values.
606
  \param [in]    ptr  Pointer to data
1065
  \param [in]    ptr  Pointer to data
607
  \return        value of type uint32_t at (*ptr)
1066
  \return        value of type uint32_t at (*ptr)
608
 */
1067
 */
609
__attribute__((always_inline)) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
1068
__STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
610
{
1069
{
611
    uint32_t result;
1070
    uint32_t result;
612
 
1071
 
613
   __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
1072
   __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
614
   return(result);
1073
   return(result);
Line 621... Line 1080...
621
  \param [in]  value  Value to store
1080
  \param [in]  value  Value to store
622
  \param [in]    ptr  Pointer to location
1081
  \param [in]    ptr  Pointer to location
623
  \return          0  Function succeeded
1082
  \return          0  Function succeeded
624
  \return          1  Function failed
1083
  \return          1  Function failed
625
 */
1084
 */
626
__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
1085
__STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
627
{
1086
{
628
   uint32_t result;
1087
   uint32_t result;
629
 
1088
 
630
   __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1089
   __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
631
   return(result);
1090
   return(result);
Line 638... Line 1097...
638
  \param [in]  value  Value to store
1097
  \param [in]  value  Value to store
639
  \param [in]    ptr  Pointer to location
1098
  \param [in]    ptr  Pointer to location
640
  \return          0  Function succeeded
1099
  \return          0  Function succeeded
641
  \return          1  Function failed
1100
  \return          1  Function failed
642
 */
1101
 */
643
__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
1102
__STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
644
{
1103
{
645
   uint32_t result;
1104
   uint32_t result;
646
 
1105
 
647
   __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1106
   __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
648
   return(result);
1107
   return(result);
Line 655... Line 1114...
655
  \param [in]  value  Value to store
1114
  \param [in]  value  Value to store
656
  \param [in]    ptr  Pointer to location
1115
  \param [in]    ptr  Pointer to location
657
  \return          0  Function succeeded
1116
  \return          0  Function succeeded
658
  \return          1  Function failed
1117
  \return          1  Function failed
659
 */
1118
 */
660
__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
1119
__STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
661
{
1120
{
662
   uint32_t result;
1121
   uint32_t result;
663
 
1122
 
664
   __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
1123
   __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
665
   return(result);
1124
   return(result);
Line 668... Line 1127...
668
 
1127
 
669
/**
1128
/**
670
  \brief   Remove the exclusive lock
1129
  \brief   Remove the exclusive lock
671
  \details Removes the exclusive lock which is created by LDREX.
1130
  \details Removes the exclusive lock which is created by LDREX.
672
 */
1131
 */
673
__attribute__((always_inline)) __STATIC_INLINE void __CLREX(void)
1132
__STATIC_FORCEINLINE void __CLREX(void)
674
{
1133
{
675
  __ASM volatile ("clrex" ::: "memory");
1134
  __ASM volatile ("clrex" ::: "memory");
676
}
1135
}
677
 
1136
 
-
 
1137
#endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
-
 
1138
           (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
-
 
1139
           (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
-
 
1140
           (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
678
 
1141
 
-
 
1142
 
-
 
1143
#if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
-
 
1144
     (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
-
 
1145
     (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
679
/**
1146
/**
680
  \brief   Signed Saturate
1147
  \brief   Signed Saturate
681
  \details Saturates a signed value.
1148
  \details Saturates a signed value.
682
  \param [in]  value  Value to be saturated
1149
  \param [in]  ARG1  Value to be saturated
683
  \param [in]    sat  Bit position to saturate to (1..32)
1150
  \param [in]  ARG2  Bit position to saturate to (1..32)
684
  \return             Saturated value
1151
  \return             Saturated value
685
 */
1152
 */
686
#define __SSAT(ARG1,ARG2) \
1153
#define __SSAT(ARG1,ARG2) \
-
 
1154
__extension__ \
687
({                          \
1155
({                          \
688
  uint32_t __RES, __ARG1 = (ARG1); \
1156
  int32_t __RES, __ARG1 = (ARG1); \
689
  __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1157
  __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
690
  __RES; \
1158
  __RES; \
691
 })
1159
 })
692
 
1160
 
693
 
1161
 
694
/**
1162
/**
695
  \brief   Unsigned Saturate
1163
  \brief   Unsigned Saturate
696
  \details Saturates an unsigned value.
1164
  \details Saturates an unsigned value.
697
  \param [in]  value  Value to be saturated
1165
  \param [in]  ARG1  Value to be saturated
698
  \param [in]    sat  Bit position to saturate to (0..31)
1166
  \param [in]  ARG2  Bit position to saturate to (0..31)
699
  \return             Saturated value
1167
  \return             Saturated value
700
 */
1168
 */
701
#define __USAT(ARG1,ARG2) \
1169
#define __USAT(ARG1,ARG2) \
-
 
1170
 __extension__ \
702
({                          \
1171
({                          \
703
  uint32_t __RES, __ARG1 = (ARG1); \
1172
  uint32_t __RES, __ARG1 = (ARG1); \
704
  __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1173
  __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
705
  __RES; \
1174
  __RES; \
706
 })
1175
 })
Line 711... Line 1180...
711
  \details Moves each bit of a bitstring right by one bit.
1180
  \details Moves each bit of a bitstring right by one bit.
712
           The carry input is shifted in at the left end of the bitstring.
1181
           The carry input is shifted in at the left end of the bitstring.
713
  \param [in]    value  Value to rotate
1182
  \param [in]    value  Value to rotate
714
  \return               Rotated value
1183
  \return               Rotated value
715
 */
1184
 */
716
__attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)
1185
__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
717
{
1186
{
718
  uint32_t result;
1187
  uint32_t result;
719
 
1188
 
720
  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1189
  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
721
  return(result);
1190
  return(result);
Line 726... Line 1195...
726
  \brief   LDRT Unprivileged (8 bit)
1195
  \brief   LDRT Unprivileged (8 bit)
727
  \details Executes a Unprivileged LDRT instruction for 8 bit value.
1196
  \details Executes a Unprivileged LDRT instruction for 8 bit value.
728
  \param [in]    ptr  Pointer to data
1197
  \param [in]    ptr  Pointer to data
729
  \return             value of type uint8_t at (*ptr)
1198
  \return             value of type uint8_t at (*ptr)
730
 */
1199
 */
731
__attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr)
1200
__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
732
{
1201
{
733
    uint32_t result;
1202
    uint32_t result;
734
 
1203
 
735
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1204
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
736
   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) );
1205
   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
737
#else
1206
#else
738
    /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1207
    /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
739
       accepted by assembler. So has to use following less efficient pattern.
1208
       accepted by assembler. So has to use following less efficient pattern.
740
    */
1209
    */
741
   __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1210
   __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
742
#endif
1211
#endif
743
   return ((uint8_t) result);    /* Add explicit type cast here */
1212
   return ((uint8_t) result);    /* Add explicit type cast here */
744
}
1213
}
745
 
1214
 
746
 
1215
 
Line 748... Line 1217...
748
  \brief   LDRT Unprivileged (16 bit)
1217
  \brief   LDRT Unprivileged (16 bit)
749
  \details Executes a Unprivileged LDRT instruction for 16 bit values.
1218
  \details Executes a Unprivileged LDRT instruction for 16 bit values.
750
  \param [in]    ptr  Pointer to data
1219
  \param [in]    ptr  Pointer to data
751
  \return        value of type uint16_t at (*ptr)
1220
  \return        value of type uint16_t at (*ptr)
752
 */
1221
 */
753
__attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr)
1222
__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
754
{
1223
{
755
    uint32_t result;
1224
    uint32_t result;
756
 
1225
 
757
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1226
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
758
   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) );
1227
   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
759
#else
1228
#else
760
    /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1229
    /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
761
       accepted by assembler. So has to use following less efficient pattern.
1230
       accepted by assembler. So has to use following less efficient pattern.
762
    */
1231
    */
763
   __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1232
   __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
764
#endif
1233
#endif
765
   return ((uint16_t) result);    /* Add explicit type cast here */
1234
   return ((uint16_t) result);    /* Add explicit type cast here */
766
}
1235
}
767
 
1236
 
768
 
1237
 
Line 770... Line 1239...
770
  \brief   LDRT Unprivileged (32 bit)
1239
  \brief   LDRT Unprivileged (32 bit)
771
  \details Executes a Unprivileged LDRT instruction for 32 bit values.
1240
  \details Executes a Unprivileged LDRT instruction for 32 bit values.
772
  \param [in]    ptr  Pointer to data
1241
  \param [in]    ptr  Pointer to data
773
  \return        value of type uint32_t at (*ptr)
1242
  \return        value of type uint32_t at (*ptr)
774
 */
1243
 */
775
__attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr)
1244
__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
776
{
1245
{
777
    uint32_t result;
1246
    uint32_t result;
778
 
1247
 
779
   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) );
1248
   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
780
   return(result);
1249
   return(result);
781
}
1250
}
782
 
1251
 
783
 
1252
 
784
/**
1253
/**
785
  \brief   STRT Unprivileged (8 bit)
1254
  \brief   STRT Unprivileged (8 bit)
786
  \details Executes a Unprivileged STRT instruction for 8 bit values.
1255
  \details Executes a Unprivileged STRT instruction for 8 bit values.
787
  \param [in]  value  Value to store
1256
  \param [in]  value  Value to store
788
  \param [in]    ptr  Pointer to location
1257
  \param [in]    ptr  Pointer to location
789
 */
1258
 */
790
__attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr)
1259
__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
791
{
1260
{
792
   __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
1261
   __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
793
}
1262
}
794
 
1263
 
795
 
1264
 
796
/**
1265
/**
797
  \brief   STRT Unprivileged (16 bit)
1266
  \brief   STRT Unprivileged (16 bit)
798
  \details Executes a Unprivileged STRT instruction for 16 bit values.
1267
  \details Executes a Unprivileged STRT instruction for 16 bit values.
799
  \param [in]  value  Value to store
1268
  \param [in]  value  Value to store
800
  \param [in]    ptr  Pointer to location
1269
  \param [in]    ptr  Pointer to location
801
 */
1270
 */
802
__attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr)
1271
__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
803
{
1272
{
804
   __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
1273
   __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
805
}
1274
}
806
 
1275
 
807
 
1276
 
808
/**
1277
/**
809
  \brief   STRT Unprivileged (32 bit)
1278
  \brief   STRT Unprivileged (32 bit)
810
  \details Executes a Unprivileged STRT instruction for 32 bit values.
1279
  \details Executes a Unprivileged STRT instruction for 32 bit values.
811
  \param [in]  value  Value to store
1280
  \param [in]  value  Value to store
812
  \param [in]    ptr  Pointer to location
1281
  \param [in]    ptr  Pointer to location
813
 */
1282
 */
814
__attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr)
1283
__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
-
 
1284
{
-
 
1285
   __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
-
 
1286
}
-
 
1287
 
-
 
1288
#else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
-
 
1289
           (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
-
 
1290
           (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
-
 
1291
 
-
 
1292
/**
-
 
1293
  \brief   Signed Saturate
-
 
1294
  \details Saturates a signed value.
-
 
1295
  \param [in]  value  Value to be saturated
-
 
1296
  \param [in]    sat  Bit position to saturate to (1..32)
-
 
1297
  \return             Saturated value
-
 
1298
 */
-
 
1299
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
-
 
1300
{
-
 
1301
  if ((sat >= 1U) && (sat <= 32U))
-
 
1302
  {
-
 
1303
    const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
-
 
1304
    const int32_t min = -1 - max ;
-
 
1305
    if (val > max)
-
 
1306
    {
-
 
1307
      return max;
-
 
1308
    }
-
 
1309
    else if (val < min)
-
 
1310
    {
-
 
1311
      return min;
-
 
1312
    }
-
 
1313
  }
-
 
1314
  return val;
-
 
1315
}
-
 
1316
 
-
 
1317
/**
-
 
1318
  \brief   Unsigned Saturate
-
 
1319
  \details Saturates an unsigned value.
-
 
1320
  \param [in]  value  Value to be saturated
-
 
1321
  \param [in]    sat  Bit position to saturate to (0..31)
-
 
1322
  \return             Saturated value
-
 
1323
 */
-
 
1324
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
-
 
1325
{
-
 
1326
  if (sat <= 31U)
-
 
1327
  {
-
 
1328
    const uint32_t max = ((1U << sat) - 1U);
-
 
1329
    if (val > (int32_t)max)
-
 
1330
    {
-
 
1331
      return max;
-
 
1332
    }
-
 
1333
    else if (val < 0)
-
 
1334
    {
-
 
1335
      return 0U;
-
 
1336
    }
-
 
1337
  }
-
 
1338
  return (uint32_t)val;
-
 
1339
}
-
 
1340
 
-
 
1341
#endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
-
 
1342
           (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
-
 
1343
           (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
-
 
1344
 
-
 
1345
 
-
 
1346
#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
-
 
1347
     (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
-
 
1348
/**
-
 
1349
  \brief   Load-Acquire (8 bit)
-
 
1350
  \details Executes a LDAB instruction for 8 bit value.
-
 
1351
  \param [in]    ptr  Pointer to data
-
 
1352
  \return             value of type uint8_t at (*ptr)
-
 
1353
 */
-
 
1354
__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
-
 
1355
{
-
 
1356
    uint32_t result;
-
 
1357
 
-
 
1358
   __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
-
 
1359
   return ((uint8_t) result);
-
 
1360
}
-
 
1361
 
-
 
1362
 
-
 
1363
/**
-
 
1364
  \brief   Load-Acquire (16 bit)
-
 
1365
  \details Executes a LDAH instruction for 16 bit values.
-
 
1366
  \param [in]    ptr  Pointer to data
-
 
1367
  \return        value of type uint16_t at (*ptr)
-
 
1368
 */
-
 
1369
__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
-
 
1370
{
-
 
1371
    uint32_t result;
-
 
1372
 
-
 
1373
   __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
-
 
1374
   return ((uint16_t) result);
-
 
1375
}
-
 
1376
 
-
 
1377
 
-
 
1378
/**
-
 
1379
  \brief   Load-Acquire (32 bit)
-
 
1380
  \details Executes a LDA instruction for 32 bit values.
-
 
1381
  \param [in]    ptr  Pointer to data
-
 
1382
  \return        value of type uint32_t at (*ptr)
-
 
1383
 */
-
 
1384
__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
-
 
1385
{
-
 
1386
    uint32_t result;
-
 
1387
 
-
 
1388
   __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
-
 
1389
   return(result);
-
 
1390
}
-
 
1391
 
-
 
1392
 
-
 
1393
/**
-
 
1394
  \brief   Store-Release (8 bit)
-
 
1395
  \details Executes a STLB instruction for 8 bit values.
-
 
1396
  \param [in]  value  Value to store
-
 
1397
  \param [in]    ptr  Pointer to location
-
 
1398
 */
-
 
1399
__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
-
 
1400
{
-
 
1401
   __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
-
 
1402
}
-
 
1403
 
-
 
1404
 
-
 
1405
/**
-
 
1406
  \brief   Store-Release (16 bit)
-
 
1407
  \details Executes a STLH instruction for 16 bit values.
-
 
1408
  \param [in]  value  Value to store
-
 
1409
  \param [in]    ptr  Pointer to location
-
 
1410
 */
-
 
1411
__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
-
 
1412
{
-
 
1413
   __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
-
 
1414
}
-
 
1415
 
-
 
1416
 
-
 
1417
/**
-
 
1418
  \brief   Store-Release (32 bit)
-
 
1419
  \details Executes a STL instruction for 32 bit values.
-
 
1420
  \param [in]  value  Value to store
-
 
1421
  \param [in]    ptr  Pointer to location
-
 
1422
 */
-
 
1423
__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
-
 
1424
{
-
 
1425
   __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
-
 
1426
}
-
 
1427
 
-
 
1428
 
-
 
1429
/**
-
 
1430
  \brief   Load-Acquire Exclusive (8 bit)
-
 
1431
  \details Executes a LDAB exclusive instruction for 8 bit value.
-
 
1432
  \param [in]    ptr  Pointer to data
-
 
1433
  \return             value of type uint8_t at (*ptr)
-
 
1434
 */
-
 
1435
__STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
-
 
1436
{
-
 
1437
    uint32_t result;
-
 
1438
 
-
 
1439
   __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
-
 
1440
   return ((uint8_t) result);
-
 
1441
}
-
 
1442
 
-
 
1443
 
-
 
1444
/**
-
 
1445
  \brief   Load-Acquire Exclusive (16 bit)
-
 
1446
  \details Executes a LDAH exclusive instruction for 16 bit values.
-
 
1447
  \param [in]    ptr  Pointer to data
-
 
1448
  \return        value of type uint16_t at (*ptr)
-
 
1449
 */
-
 
1450
__STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
-
 
1451
{
-
 
1452
    uint32_t result;
-
 
1453
 
-
 
1454
   __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
-
 
1455
   return ((uint16_t) result);
-
 
1456
}
-
 
1457
 
-
 
1458
 
-
 
1459
/**
-
 
1460
  \brief   Load-Acquire Exclusive (32 bit)
-
 
1461
  \details Executes a LDA exclusive instruction for 32 bit values.
-
 
1462
  \param [in]    ptr  Pointer to data
-
 
1463
  \return        value of type uint32_t at (*ptr)
-
 
1464
 */
-
 
1465
__STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
-
 
1466
{
-
 
1467
    uint32_t result;
-
 
1468
 
-
 
1469
   __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
-
 
1470
   return(result);
-
 
1471
}
-
 
1472
 
-
 
1473
 
-
 
1474
/**
-
 
1475
  \brief   Store-Release Exclusive (8 bit)
-
 
1476
  \details Executes a STLB exclusive instruction for 8 bit values.
-
 
1477
  \param [in]  value  Value to store
-
 
1478
  \param [in]    ptr  Pointer to location
-
 
1479
  \return          0  Function succeeded
-
 
1480
  \return          1  Function failed
-
 
1481
 */
-
 
1482
__STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
815
{
1483
{
-
 
1484
   uint32_t result;
-
 
1485
 
816
   __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) );
1486
   __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
-
 
1487
   return(result);
817
}
1488
}
818
 
1489
 
-
 
1490
 
-
 
1491
/**
-
 
1492
  \brief   Store-Release Exclusive (16 bit)
-
 
1493
  \details Executes a STLH exclusive instruction for 16 bit values.
-
 
1494
  \param [in]  value  Value to store
-
 
1495
  \param [in]    ptr  Pointer to location
-
 
1496
  \return          0  Function succeeded
-
 
1497
  \return          1  Function failed
-
 
1498
 */
-
 
1499
__STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
-
 
1500
{
-
 
1501
   uint32_t result;
-
 
1502
 
-
 
1503
   __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
-
 
1504
   return(result);
-
 
1505
}
-
 
1506
 
-
 
1507
 
-
 
1508
/**
-
 
1509
  \brief   Store-Release Exclusive (32 bit)
-
 
1510
  \details Executes a STL exclusive instruction for 32 bit values.
-
 
1511
  \param [in]  value  Value to store
-
 
1512
  \param [in]    ptr  Pointer to location
-
 
1513
  \return          0  Function succeeded
-
 
1514
  \return          1  Function failed
-
 
1515
 */
-
 
1516
__STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
-
 
1517
{
-
 
1518
   uint32_t result;
-
 
1519
 
-
 
1520
   __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
-
 
1521
   return(result);
-
 
1522
}
-
 
1523
 
819
#endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */
1524
#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
-
 
1525
           (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
820
 
1526
 
821
/*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1527
/*@}*/ /* end of group CMSIS_Core_InstructionInterface */
822
 
1528
 
823
 
1529
 
824
/* ###################  Compiler specific Intrinsics  ########################### */
1530
/* ###################  Compiler specific Intrinsics  ########################### */
825
/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1531
/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
826
  Access to dedicated SIMD instructions
1532
  Access to dedicated SIMD instructions
827
  @{
1533
  @{
828
*/
1534
*/
829
 
1535
 
830
#if (__CORTEX_M >= 0x04U)  /* only for Cortex-M4 and above */
1536
#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
831
 
1537
 
832
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1538
__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
833
{
1539
{
834
  uint32_t result;
1540
  uint32_t result;
835
 
1541
 
836
  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1542
  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
837
  return(result);
1543
  return(result);
838
}
1544
}
839
 
1545
 
840
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1546
__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
841
{
1547
{
842
  uint32_t result;
1548
  uint32_t result;
843
 
1549
 
844
  __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1550
  __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
845
  return(result);
1551
  return(result);
846
}
1552
}
847
 
1553
 
848
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1554
__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
849
{
1555
{
850
  uint32_t result;
1556
  uint32_t result;
851
 
1557
 
852
  __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1558
  __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
853
  return(result);
1559
  return(result);
854
}
1560
}
855
 
1561
 
856
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1562
__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
857
{
1563
{
858
  uint32_t result;
1564
  uint32_t result;
859
 
1565
 
860
  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1566
  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
861
  return(result);
1567
  return(result);
862
}
1568
}
863
 
1569
 
864
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1570
__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
865
{
1571
{
866
  uint32_t result;
1572
  uint32_t result;
867
 
1573
 
868
  __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1574
  __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
869
  return(result);
1575
  return(result);
870
}
1576
}
871
 
1577
 
872
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1578
__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
873
{
1579
{
874
  uint32_t result;
1580
  uint32_t result;
875
 
1581
 
876
  __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1582
  __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
877
  return(result);
1583
  return(result);
878
}
1584
}
879
 
1585
 
880
 
1586
 
881
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1587
__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
882
{
1588
{
883
  uint32_t result;
1589
  uint32_t result;
884
 
1590
 
885
  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1591
  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
886
  return(result);
1592
  return(result);
887
}
1593
}
888
 
1594
 
889
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1595
__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
890
{
1596
{
891
  uint32_t result;
1597
  uint32_t result;
892
 
1598
 
893
  __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1599
  __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
894
  return(result);
1600
  return(result);
895
}
1601
}
896
 
1602
 
897
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1603
__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
898
{
1604
{
899
  uint32_t result;
1605
  uint32_t result;
900
 
1606
 
901
  __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1607
  __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
902
  return(result);
1608
  return(result);
903
}
1609
}
904
 
1610
 
905
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1611
__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
906
{
1612
{
907
  uint32_t result;
1613
  uint32_t result;
908
 
1614
 
909
  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1615
  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
910
  return(result);
1616
  return(result);
911
}
1617
}
912
 
1618
 
913
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1619
__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
914
{
1620
{
915
  uint32_t result;
1621
  uint32_t result;
916
 
1622
 
917
  __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1623
  __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
918
  return(result);
1624
  return(result);
919
}
1625
}
920
 
1626
 
921
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1627
__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
922
{
1628
{
923
  uint32_t result;
1629
  uint32_t result;
924
 
1630
 
925
  __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1631
  __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
926
  return(result);
1632
  return(result);
927
}
1633
}
928
 
1634
 
929
 
1635
 
930
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1636
__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
931
{
1637
{
932
  uint32_t result;
1638
  uint32_t result;
933
 
1639
 
934
  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1640
  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
935
  return(result);
1641
  return(result);
936
}
1642
}
937
 
1643
 
938
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1644
__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
939
{
1645
{
940
  uint32_t result;
1646
  uint32_t result;
941
 
1647
 
942
  __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1648
  __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
943
  return(result);
1649
  return(result);
944
}
1650
}
945
 
1651
 
946
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1652
__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
947
{
1653
{
948
  uint32_t result;
1654
  uint32_t result;
949
 
1655
 
950
  __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1656
  __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
951
  return(result);
1657
  return(result);
952
}
1658
}
953
 
1659
 
954
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1660
__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
955
{
1661
{
956
  uint32_t result;
1662
  uint32_t result;
957
 
1663
 
958
  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1664
  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
959
  return(result);
1665
  return(result);
960
}
1666
}
961
 
1667
 
962
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1668
__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
963
{
1669
{
964
  uint32_t result;
1670
  uint32_t result;
965
 
1671
 
966
  __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1672
  __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
967
  return(result);
1673
  return(result);
968
}
1674
}
969
 
1675
 
970
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1676
__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
971
{
1677
{
972
  uint32_t result;
1678
  uint32_t result;
973
 
1679
 
974
  __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1680
  __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
975
  return(result);
1681
  return(result);
976
}
1682
}
977
 
1683
 
978
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1684
__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
979
{
1685
{
980
  uint32_t result;
1686
  uint32_t result;
981
 
1687
 
982
  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1688
  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
983
  return(result);
1689
  return(result);
984
}
1690
}
985
 
1691
 
986
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1692
__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
987
{
1693
{
988
  uint32_t result;
1694
  uint32_t result;
989
 
1695
 
990
  __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1696
  __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
991
  return(result);
1697
  return(result);
992
}
1698
}
993
 
1699
 
994
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1700
__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
995
{
1701
{
996
  uint32_t result;
1702
  uint32_t result;
997
 
1703
 
998
  __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1704
  __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
999
  return(result);
1705
  return(result);
1000
}
1706
}
1001
 
1707
 
1002
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1708
__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1003
{
1709
{
1004
  uint32_t result;
1710
  uint32_t result;
1005
 
1711
 
1006
  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1712
  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1007
  return(result);
1713
  return(result);
1008
}
1714
}
1009
 
1715
 
1010
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1716
__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1011
{
1717
{
1012
  uint32_t result;
1718
  uint32_t result;
1013
 
1719
 
1014
  __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1720
  __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1015
  return(result);
1721
  return(result);
1016
}
1722
}
1017
 
1723
 
1018
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1724
__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1019
{
1725
{
1020
  uint32_t result;
1726
  uint32_t result;
1021
 
1727
 
1022
  __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1728
  __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1023
  return(result);
1729
  return(result);
1024
}
1730
}
1025
 
1731
 
1026
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1732
__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1027
{
1733
{
1028
  uint32_t result;
1734
  uint32_t result;
1029
 
1735
 
1030
  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1736
  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1031
  return(result);
1737
  return(result);
1032
}
1738
}
1033
 
1739
 
1034
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1740
__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1035
{
1741
{
1036
  uint32_t result;
1742
  uint32_t result;
1037
 
1743
 
1038
  __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1744
  __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1039
  return(result);
1745
  return(result);
1040
}
1746
}
1041
 
1747
 
1042
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1748
__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1043
{
1749
{
1044
  uint32_t result;
1750
  uint32_t result;
1045
 
1751
 
1046
  __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1752
  __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1047
  return(result);
1753
  return(result);
1048
}
1754
}
1049
 
1755
 
1050
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1756
__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1051
{
1757
{
1052
  uint32_t result;
1758
  uint32_t result;
1053
 
1759
 
1054
  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1760
  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1055
  return(result);
1761
  return(result);
1056
}
1762
}
1057
 
1763
 
1058
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1764
__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1059
{
1765
{
1060
  uint32_t result;
1766
  uint32_t result;
1061
 
1767
 
1062
  __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1768
  __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1063
  return(result);
1769
  return(result);
1064
}
1770
}
1065
 
1771
 
1066
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1772
__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1067
{
1773
{
1068
  uint32_t result;
1774
  uint32_t result;
1069
 
1775
 
1070
  __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1776
  __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1071
  return(result);
1777
  return(result);
1072
}
1778
}
1073
 
1779
 
1074
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1780
__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1075
{
1781
{
1076
  uint32_t result;
1782
  uint32_t result;
1077
 
1783
 
1078
  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1784
  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1079
  return(result);
1785
  return(result);
1080
}
1786
}
1081
 
1787
 
1082
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1788
__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1083
{
1789
{
1084
  uint32_t result;
1790
  uint32_t result;
1085
 
1791
 
1086
  __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1792
  __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1087
  return(result);
1793
  return(result);
1088
}
1794
}
1089
 
1795
 
1090
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1796
__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1091
{
1797
{
1092
  uint32_t result;
1798
  uint32_t result;
1093
 
1799
 
1094
  __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1800
  __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1095
  return(result);
1801
  return(result);
1096
}
1802
}
1097
 
1803
 
1098
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1804
__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1099
{
1805
{
1100
  uint32_t result;
1806
  uint32_t result;
1101
 
1807
 
1102
  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1808
  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1103
  return(result);
1809
  return(result);
1104
}
1810
}
1105
 
1811
 
1106
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1812
__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1107
{
1813
{
1108
  uint32_t result;
1814
  uint32_t result;
1109
 
1815
 
1110
  __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1816
  __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1111
  return(result);
1817
  return(result);
1112
}
1818
}
1113
 
1819
 
1114
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1820
__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1115
{
1821
{
1116
  uint32_t result;
1822
  uint32_t result;
1117
 
1823
 
1118
  __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1824
  __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1119
  return(result);
1825
  return(result);
1120
}
1826
}
1121
 
1827
 
1122
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1828
__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1123
{
1829
{
1124
  uint32_t result;
1830
  uint32_t result;
1125
 
1831
 
1126
  __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1832
  __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1127
  return(result);
1833
  return(result);
1128
}
1834
}
1129
 
1835
 
1130
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1836
__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1131
{
1837
{
1132
  uint32_t result;
1838
  uint32_t result;
1133
 
1839
 
1134
  __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1840
  __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1135
  return(result);
1841
  return(result);
Line 1147... Line 1853...
1147
  uint32_t __RES, __ARG1 = (ARG1); \
1853
  uint32_t __RES, __ARG1 = (ARG1); \
1148
  __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1854
  __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1149
  __RES; \
1855
  __RES; \
1150
 })
1856
 })
1151
 
1857
 
1152
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
1858
__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1153
{
1859
{
1154
  uint32_t result;
1860
  uint32_t result;
1155
 
1861
 
1156
  __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1862
  __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1157
  return(result);
1863
  return(result);
1158
}
1864
}
1159
 
1865
 
1160
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1866
__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1161
{
1867
{
1162
  uint32_t result;
1868
  uint32_t result;
1163
 
1869
 
1164
  __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1870
  __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1165
  return(result);
1871
  return(result);
1166
}
1872
}
1167
 
1873
 
1168
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
1874
__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1169
{
1875
{
1170
  uint32_t result;
1876
  uint32_t result;
1171
 
1877
 
1172
  __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1878
  __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1173
  return(result);
1879
  return(result);
1174
}
1880
}
1175
 
1881
 
1176
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1882
__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1177
{
1883
{
1178
  uint32_t result;
1884
  uint32_t result;
1179
 
1885
 
1180
  __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1886
  __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1181
  return(result);
1887
  return(result);
1182
}
1888
}
1183
 
1889
 
1184
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
1890
__STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
1185
{
1891
{
1186
  uint32_t result;
1892
  uint32_t result;
1187
 
1893
 
1188
  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1894
  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1189
  return(result);
1895
  return(result);
1190
}
1896
}
1191
 
1897
 
1192
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1898
__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1193
{
1899
{
1194
  uint32_t result;
1900
  uint32_t result;
1195
 
1901
 
1196
  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1902
  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1197
  return(result);
1903
  return(result);
1198
}
1904
}
1199
 
1905
 
1200
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1906
__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1201
{
1907
{
1202
  uint32_t result;
1908
  uint32_t result;
1203
 
1909
 
1204
  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1910
  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1205
  return(result);
1911
  return(result);
1206
}
1912
}
1207
 
1913
 
1208
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1914
__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1209
{
1915
{
1210
  uint32_t result;
1916
  uint32_t result;
1211
 
1917
 
1212
  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1918
  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1213
  return(result);
1919
  return(result);
1214
}
1920
}
1215
 
1921
 
1216
__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1922
__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1217
{
1923
{
1218
  union llreg_u{
1924
  union llreg_u{
1219
    uint32_t w32[2];
1925
    uint32_t w32[2];
1220
    uint64_t w64;
1926
    uint64_t w64;
1221
  } llr;
1927
  } llr;
Line 1228... Line 1934...
1228
#endif
1934
#endif
1229
 
1935
 
1230
  return(llr.w64);
1936
  return(llr.w64);
1231
}
1937
}
1232
 
1938
 
1233
__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1939
__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1234
{
1940
{
1235
  union llreg_u{
1941
  union llreg_u{
1236
    uint32_t w32[2];
1942
    uint32_t w32[2];
1237
    uint64_t w64;
1943
    uint64_t w64;
1238
  } llr;
1944
  } llr;
Line 1245... Line 1951...
1245
#endif
1951
#endif
1246
 
1952
 
1247
  return(llr.w64);
1953
  return(llr.w64);
1248
}
1954
}
1249
 
1955
 
1250
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
1956
__STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
1251
{
1957
{
1252
  uint32_t result;
1958
  uint32_t result;
1253
 
1959
 
1254
  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1960
  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1255
  return(result);
1961
  return(result);
1256
}
1962
}
1257
 
1963
 
1258
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1964
__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1259
{
1965
{
1260
  uint32_t result;
1966
  uint32_t result;
1261
 
1967
 
1262
  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1968
  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1263
  return(result);
1969
  return(result);
1264
}
1970
}
1265
 
1971
 
1266
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1972
__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1267
{
1973
{
1268
  uint32_t result;
1974
  uint32_t result;
1269
 
1975
 
1270
  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1976
  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1271
  return(result);
1977
  return(result);
1272
}
1978
}
1273
 
1979
 
1274
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1980
__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1275
{
1981
{
1276
  uint32_t result;
1982
  uint32_t result;
1277
 
1983
 
1278
  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1984
  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1279
  return(result);
1985
  return(result);
1280
}
1986
}
1281
 
1987
 
1282
__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1988
__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1283
{
1989
{
1284
  union llreg_u{
1990
  union llreg_u{
1285
    uint32_t w32[2];
1991
    uint32_t w32[2];
1286
    uint64_t w64;
1992
    uint64_t w64;
1287
  } llr;
1993
  } llr;
Line 1294... Line 2000...
1294
#endif
2000
#endif
1295
 
2001
 
1296
  return(llr.w64);
2002
  return(llr.w64);
1297
}
2003
}
1298
 
2004
 
1299
__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2005
__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1300
{
2006
{
1301
  union llreg_u{
2007
  union llreg_u{
1302
    uint32_t w32[2];
2008
    uint32_t w32[2];
1303
    uint64_t w64;
2009
    uint64_t w64;
1304
  } llr;
2010
  } llr;
Line 1311... Line 2017...
1311
#endif
2017
#endif
1312
 
2018
 
1313
  return(llr.w64);
2019
  return(llr.w64);
1314
}
2020
}
1315
 
2021
 
1316
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
2022
__STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
1317
{
2023
{
1318
  uint32_t result;
2024
  uint32_t result;
1319
 
2025
 
1320
  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2026
  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1321
  return(result);
2027
  return(result);
1322
}
2028
}
1323
 
2029
 
1324
__attribute__( ( always_inline ) ) __STATIC_INLINE  int32_t __QADD( int32_t op1,  int32_t op2)
2030
__STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
1325
{
2031
{
1326
  int32_t result;
2032
  int32_t result;
1327
 
2033
 
1328
  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2034
  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1329
  return(result);
2035
  return(result);
1330
}
2036
}
1331
 
2037
 
1332
__attribute__( ( always_inline ) ) __STATIC_INLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
2038
__STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
1333
{
2039
{
1334
  int32_t result;
2040
  int32_t result;
1335
 
2041
 
1336
  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2042
  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1337
  return(result);
2043
  return(result);
1338
}
2044
}
1339
 
2045
 
-
 
2046
#if 0
1340
#define __PKHBT(ARG1,ARG2,ARG3) \
2047
#define __PKHBT(ARG1,ARG2,ARG3) \
1341
({                          \
2048
({                          \
1342
  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2049
  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1343
  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
2050
  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
1344
  __RES; \
2051
  __RES; \
Line 1351... Line 2058...
1351
    __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
2058
    __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
1352
  else \
2059
  else \
1353
    __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
2060
    __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
1354
  __RES; \
2061
  __RES; \
1355
 })
2062
 })
-
 
2063
#endif
1356
 
2064
 
-
 
2065
#define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
-
 
2066
                                           ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
-
 
2067
 
-
 
2068
#define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
-
 
2069
                                           ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
-
 
2070
 
1357
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2071
__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1358
{
2072
{
1359
 int32_t result;
2073
 int32_t result;
1360
 
2074
 
1361
 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
2075
 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
1362
 return(result);
2076
 return(result);
1363
}
2077
}
1364
 
2078
 
1365
#endif /* (__CORTEX_M >= 0x04) */
2079
#endif /* (__ARM_FEATURE_DSP == 1) */
1366
/*@} end of group CMSIS_SIMD_intrinsics */
2080
/*@} end of group CMSIS_SIMD_intrinsics */
1367
 
2081
 
1368
 
2082
 
1369
#if defined ( __GNUC__ )
-
 
1370
#pragma GCC diagnostic pop
2083
#pragma GCC diagnostic pop
1371
#endif
-
 
1372
 
2084
 
1373
#endif /* __CMSIS_GCC_H */
2085
#endif /* __CMSIS_GCC_H */