Subversion Repositories FuelGauge

Rev

Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
2 mjames 1
/**************************************************************************//**
2
 * @file     cmsis_gcc.h
3
 * @brief    CMSIS compiler GCC header file
4
 * @version  V5.0.4
5
 * @date     09. April 2018
6
 ******************************************************************************/
7
/*
8
 * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9
 *
10
 * SPDX-License-Identifier: Apache-2.0
11
 *
12
 * Licensed under the Apache License, Version 2.0 (the License); you may
13
 * not use this file except in compliance with the License.
14
 * You may obtain a copy of the License at
15
 *
16
 * www.apache.org/licenses/LICENSE-2.0
17
 *
18
 * Unless required by applicable law or agreed to in writing, software
19
 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21
 * See the License for the specific language governing permissions and
22
 * limitations under the License.
23
 */
24
 
25
#ifndef __CMSIS_GCC_H
26
#define __CMSIS_GCC_H
27
 
28
/* ignore some GCC warnings */
29
#pragma GCC diagnostic push
30
#pragma GCC diagnostic ignored "-Wsign-conversion"
31
#pragma GCC diagnostic ignored "-Wconversion"
32
#pragma GCC diagnostic ignored "-Wunused-parameter"
33
 
34
/* Fallback for __has_builtin */
35
#ifndef __has_builtin
36
  #define __has_builtin(x) (0)
37
#endif
38
 
39
/* CMSIS compiler specific defines */
40
#ifndef   __ASM
41
  #define __ASM                                  __asm
42
#endif
43
#ifndef   __INLINE
44
  #define __INLINE                               inline
45
#endif
46
#ifndef   __STATIC_INLINE
47
  #define __STATIC_INLINE                        static inline
48
#endif
49
#ifndef   __STATIC_FORCEINLINE                 
50
  #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static inline
51
#endif                                           
52
#ifndef   __NO_RETURN
53
  #define __NO_RETURN                            __attribute__((__noreturn__))
54
#endif
55
#ifndef   __USED
56
  #define __USED                                 __attribute__((used))
57
#endif
58
#ifndef   __WEAK
59
  #define __WEAK                                 __attribute__((weak))
60
#endif
61
#ifndef   __PACKED
62
  #define __PACKED                               __attribute__((packed, aligned(1)))
63
#endif
64
#ifndef   __PACKED_STRUCT
65
  #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
66
#endif
67
#ifndef   __PACKED_UNION
68
  #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
69
#endif
70
#ifndef   __UNALIGNED_UINT32        /* deprecated */
71
  #pragma GCC diagnostic push
72
  #pragma GCC diagnostic ignored "-Wpacked"
73
  #pragma GCC diagnostic ignored "-Wattributes"
74
  struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75
  #pragma GCC diagnostic pop
76
  #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
77
#endif
78
#ifndef   __UNALIGNED_UINT16_WRITE
79
  #pragma GCC diagnostic push
80
  #pragma GCC diagnostic ignored "-Wpacked"
81
  #pragma GCC diagnostic ignored "-Wattributes"
82
  __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
83
  #pragma GCC diagnostic pop
84
  #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
85
#endif
86
#ifndef   __UNALIGNED_UINT16_READ
87
  #pragma GCC diagnostic push
88
  #pragma GCC diagnostic ignored "-Wpacked"
89
  #pragma GCC diagnostic ignored "-Wattributes"
90
  __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91
  #pragma GCC diagnostic pop
92
  #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
93
#endif
94
#ifndef   __UNALIGNED_UINT32_WRITE
95
  #pragma GCC diagnostic push
96
  #pragma GCC diagnostic ignored "-Wpacked"
97
  #pragma GCC diagnostic ignored "-Wattributes"
98
  __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
99
  #pragma GCC diagnostic pop
100
  #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
101
#endif
102
#ifndef   __UNALIGNED_UINT32_READ
103
  #pragma GCC diagnostic push
104
  #pragma GCC diagnostic ignored "-Wpacked"
105
  #pragma GCC diagnostic ignored "-Wattributes"
106
  __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
107
  #pragma GCC diagnostic pop
108
  #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
109
#endif
110
#ifndef   __ALIGNED
111
  #define __ALIGNED(x)                           __attribute__((aligned(x)))
112
#endif
113
#ifndef   __RESTRICT
114
  #define __RESTRICT                             __restrict
115
#endif
116
 
117
 
118
/* ###########################  Core Function Access  ########################### */
119
/** \ingroup  CMSIS_Core_FunctionInterface
120
    \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
121
  @{
122
 */
123
 
124
/**
125
  \brief   Enable IRQ Interrupts
126
  \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
127
           Can only be executed in Privileged modes.
128
 */
129
__STATIC_FORCEINLINE void __enable_irq(void)
130
{
131
  __ASM volatile ("cpsie i" : : : "memory");
132
}
133
 
134
 
135
/**
136
  \brief   Disable IRQ Interrupts
137
  \details Disables IRQ interrupts by setting the I-bit in the CPSR.
138
           Can only be executed in Privileged modes.
139
 */
140
__STATIC_FORCEINLINE void __disable_irq(void)
141
{
142
  __ASM volatile ("cpsid i" : : : "memory");
143
}
144
 
145
 
146
/**
147
  \brief   Get Control Register
148
  \details Returns the content of the Control Register.
149
  \return               Control Register value
150
 */
151
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
152
{
153
  uint32_t result;
154
 
155
  __ASM volatile ("MRS %0, control" : "=r" (result) );
156
  return(result);
157
}
158
 
159
 
160
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
161
/**
162
  \brief   Get Control Register (non-secure)
163
  \details Returns the content of the non-secure Control Register when in secure mode.
164
  \return               non-secure Control Register value
165
 */
166
__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
167
{
168
  uint32_t result;
169
 
170
  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
171
  return(result);
172
}
173
#endif
174
 
175
 
176
/**
177
  \brief   Set Control Register
178
  \details Writes the given value to the Control Register.
179
  \param [in]    control  Control Register value to set
180
 */
181
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
182
{
183
  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
184
}
185
 
186
 
187
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
188
/**
189
  \brief   Set Control Register (non-secure)
190
  \details Writes the given value to the non-secure Control Register when in secure state.
191
  \param [in]    control  Control Register value to set
192
 */
193
__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
194
{
195
  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
196
}
197
#endif
198
 
199
 
200
/**
201
  \brief   Get IPSR Register
202
  \details Returns the content of the IPSR Register.
203
  \return               IPSR Register value
204
 */
205
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
206
{
207
  uint32_t result;
208
 
209
  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
210
  return(result);
211
}
212
 
213
 
214
/**
215
  \brief   Get APSR Register
216
  \details Returns the content of the APSR Register.
217
  \return               APSR Register value
218
 */
219
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
220
{
221
  uint32_t result;
222
 
223
  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
224
  return(result);
225
}
226
 
227
 
228
/**
229
  \brief   Get xPSR Register
230
  \details Returns the content of the xPSR Register.
231
  \return               xPSR Register value
232
 */
233
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
234
{
235
  uint32_t result;
236
 
237
  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
238
  return(result);
239
}
240
 
241
 
242
/**
243
  \brief   Get Process Stack Pointer
244
  \details Returns the current value of the Process Stack Pointer (PSP).
245
  \return               PSP Register value
246
 */
247
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
248
{
249
  uint32_t result;
250
 
251
  __ASM volatile ("MRS %0, psp"  : "=r" (result) );
252
  return(result);
253
}
254
 
255
 
256
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
257
/**
258
  \brief   Get Process Stack Pointer (non-secure)
259
  \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
260
  \return               PSP Register value
261
 */
262
__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
263
{
264
  uint32_t result;
265
 
266
  __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
267
  return(result);
268
}
269
#endif
270
 
271
 
272
/**
273
  \brief   Set Process Stack Pointer
274
  \details Assigns the given value to the Process Stack Pointer (PSP).
275
  \param [in]    topOfProcStack  Process Stack Pointer value to set
276
 */
277
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
278
{
279
  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
280
}
281
 
282
 
283
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
284
/**
285
  \brief   Set Process Stack Pointer (non-secure)
286
  \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
287
  \param [in]    topOfProcStack  Process Stack Pointer value to set
288
 */
289
__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
290
{
291
  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
292
}
293
#endif
294
 
295
 
296
/**
297
  \brief   Get Main Stack Pointer
298
  \details Returns the current value of the Main Stack Pointer (MSP).
299
  \return               MSP Register value
300
 */
301
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
302
{
303
  uint32_t result;
304
 
305
  __ASM volatile ("MRS %0, msp" : "=r" (result) );
306
  return(result);
307
}
308
 
309
 
310
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
311
/**
312
  \brief   Get Main Stack Pointer (non-secure)
313
  \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
314
  \return               MSP Register value
315
 */
316
__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
317
{
318
  uint32_t result;
319
 
320
  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
321
  return(result);
322
}
323
#endif
324
 
325
 
326
/**
327
  \brief   Set Main Stack Pointer
328
  \details Assigns the given value to the Main Stack Pointer (MSP).
329
  \param [in]    topOfMainStack  Main Stack Pointer value to set
330
 */
331
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
332
{
333
  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
334
}
335
 
336
 
337
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
338
/**
339
  \brief   Set Main Stack Pointer (non-secure)
340
  \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
341
  \param [in]    topOfMainStack  Main Stack Pointer value to set
342
 */
343
__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
344
{
345
  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
346
}
347
#endif
348
 
349
 
350
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
351
/**
352
  \brief   Get Stack Pointer (non-secure)
353
  \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
354
  \return               SP Register value
355
 */
356
__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
357
{
358
  uint32_t result;
359
 
360
  __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
361
  return(result);
362
}
363
 
364
 
365
/**
366
  \brief   Set Stack Pointer (non-secure)
367
  \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
368
  \param [in]    topOfStack  Stack Pointer value to set
369
 */
370
__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
371
{
372
  __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
373
}
374
#endif
375
 
376
 
377
/**
378
  \brief   Get Priority Mask
379
  \details Returns the current state of the priority mask bit from the Priority Mask Register.
380
  \return               Priority Mask value
381
 */
382
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
383
{
384
  uint32_t result;
385
 
386
  __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
387
  return(result);
388
}
389
 
390
 
391
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
392
/**
393
  \brief   Get Priority Mask (non-secure)
394
  \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
395
  \return               Priority Mask value
396
 */
397
__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
398
{
399
  uint32_t result;
400
 
401
  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
402
  return(result);
403
}
404
#endif
405
 
406
 
407
/**
408
  \brief   Set Priority Mask
409
  \details Assigns the given value to the Priority Mask Register.
410
  \param [in]    priMask  Priority Mask
411
 */
412
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
413
{
414
  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
415
}
416
 
417
 
418
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
419
/**
420
  \brief   Set Priority Mask (non-secure)
421
  \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
422
  \param [in]    priMask  Priority Mask
423
 */
424
__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
425
{
426
  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
427
}
428
#endif
429
 
430
 
431
#if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
432
     (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
433
     (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
434
/**
435
  \brief   Enable FIQ
436
  \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
437
           Can only be executed in Privileged modes.
438
 */
439
__STATIC_FORCEINLINE void __enable_fault_irq(void)
440
{
441
  __ASM volatile ("cpsie f" : : : "memory");
442
}
443
 
444
 
445
/**
446
  \brief   Disable FIQ
447
  \details Disables FIQ interrupts by setting the F-bit in the CPSR.
448
           Can only be executed in Privileged modes.
449
 */
450
__STATIC_FORCEINLINE void __disable_fault_irq(void)
451
{
452
  __ASM volatile ("cpsid f" : : : "memory");
453
}
454
 
455
 
456
/**
457
  \brief   Get Base Priority
458
  \details Returns the current value of the Base Priority register.
459
  \return               Base Priority register value
460
 */
461
__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
462
{
463
  uint32_t result;
464
 
465
  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
466
  return(result);
467
}
468
 
469
 
470
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
471
/**
472
  \brief   Get Base Priority (non-secure)
473
  \details Returns the current value of the non-secure Base Priority register when in secure state.
474
  \return               Base Priority register value
475
 */
476
__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
477
{
478
  uint32_t result;
479
 
480
  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
481
  return(result);
482
}
483
#endif
484
 
485
 
486
/**
487
  \brief   Set Base Priority
488
  \details Assigns the given value to the Base Priority register.
489
  \param [in]    basePri  Base Priority value to set
490
 */
491
__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
492
{
493
  __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
494
}
495
 
496
 
497
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
498
/**
499
  \brief   Set Base Priority (non-secure)
500
  \details Assigns the given value to the non-secure Base Priority register when in secure state.
501
  \param [in]    basePri  Base Priority value to set
502
 */
503
__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
504
{
505
  __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
506
}
507
#endif
508
 
509
 
510
/**
511
  \brief   Set Base Priority with condition
512
  \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
513
           or the new value increases the BASEPRI priority level.
514
  \param [in]    basePri  Base Priority value to set
515
 */
516
__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
517
{
518
  __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
519
}
520
 
521
 
522
/**
523
  \brief   Get Fault Mask
524
  \details Returns the current value of the Fault Mask register.
525
  \return               Fault Mask register value
526
 */
527
__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
528
{
529
  uint32_t result;
530
 
531
  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
532
  return(result);
533
}
534
 
535
 
536
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
537
/**
538
  \brief   Get Fault Mask (non-secure)
539
  \details Returns the current value of the non-secure Fault Mask register when in secure state.
540
  \return               Fault Mask register value
541
 */
542
__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
543
{
544
  uint32_t result;
545
 
546
  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
547
  return(result);
548
}
549
#endif
550
 
551
 
552
/**
553
  \brief   Set Fault Mask
554
  \details Assigns the given value to the Fault Mask register.
555
  \param [in]    faultMask  Fault Mask value to set
556
 */
557
__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
558
{
559
  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
560
}
561
 
562
 
563
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
564
/**
565
  \brief   Set Fault Mask (non-secure)
566
  \details Assigns the given value to the non-secure Fault Mask register when in secure state.
567
  \param [in]    faultMask  Fault Mask value to set
568
 */
569
__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
570
{
571
  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
572
}
573
#endif
574
 
575
#endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
576
           (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
577
           (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
578
 
579
 
580
#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
581
     (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
582
 
583
/**
584
  \brief   Get Process Stack Pointer Limit
585
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
586
  Stack Pointer Limit register hence zero is returned always in non-secure
587
  mode.
588
 
589
  \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
590
  \return               PSPLIM Register value
591
 */
592
__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
593
{
594
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
595
    (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
596
    // without main extensions, the non-secure PSPLIM is RAZ/WI
597
  return 0U;
598
#else
599
  uint32_t result;
600
  __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
601
  return result;
602
#endif
603
}
604
 
605
#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
606
/**
607
  \brief   Get Process Stack Pointer Limit (non-secure)
608
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
609
  Stack Pointer Limit register hence zero is returned always.
610
 
611
  \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
612
  \return               PSPLIM Register value
613
 */
614
__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
615
{
616
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
617
  // without main extensions, the non-secure PSPLIM is RAZ/WI
618
  return 0U;
619
#else
620
  uint32_t result;
621
  __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
622
  return result;
623
#endif
624
}
625
#endif
626
 
627
 
628
/**
629
  \brief   Set Process Stack Pointer Limit
630
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
631
  Stack Pointer Limit register hence the write is silently ignored in non-secure
632
  mode.
633
 
634
  \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
635
  \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
636
 */
637
__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
638
{
639
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
640
    (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
641
  // without main extensions, the non-secure PSPLIM is RAZ/WI
642
  (void)ProcStackPtrLimit;
643
#else
644
  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
645
#endif
646
}
647
 
648
 
649
#if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
650
/**
651
  \brief   Set Process Stack Pointer (non-secure)
652
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
653
  Stack Pointer Limit register hence the write is silently ignored.
654
 
655
  \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
656
  \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
657
 */
658
__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
659
{
660
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
661
  // without main extensions, the non-secure PSPLIM is RAZ/WI
662
  (void)ProcStackPtrLimit;
663
#else
664
  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
665
#endif
666
}
667
#endif
668
 
669
 
670
/**
671
  \brief   Get Main Stack Pointer Limit
672
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
673
  Stack Pointer Limit register hence zero is returned always in non-secure
674
  mode.
675
 
676
  \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
677
  \return               MSPLIM Register value
678
 */
679
__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
680
{
681
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
682
    (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
683
  // without main extensions, the non-secure MSPLIM is RAZ/WI
684
  return 0U;
685
#else
686
  uint32_t result;
687
  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
688
  return result;
689
#endif
690
}
691
 
692
 
693
#if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
694
/**
695
  \brief   Get Main Stack Pointer Limit (non-secure)
696
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
697
  Stack Pointer Limit register hence zero is returned always.
698
 
699
  \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
700
  \return               MSPLIM Register value
701
 */
702
__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
703
{
704
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
705
  // without main extensions, the non-secure MSPLIM is RAZ/WI
706
  return 0U;
707
#else
708
  uint32_t result;
709
  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
710
  return result;
711
#endif
712
}
713
#endif
714
 
715
 
716
/**
717
  \brief   Set Main Stack Pointer Limit
718
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
719
  Stack Pointer Limit register hence the write is silently ignored in non-secure
720
  mode.
721
 
722
  \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
723
  \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
724
 */
725
__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
726
{
727
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
728
    (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
729
  // without main extensions, the non-secure MSPLIM is RAZ/WI
730
  (void)MainStackPtrLimit;
731
#else
732
  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
733
#endif
734
}
735
 
736
 
737
#if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
738
/**
739
  \brief   Set Main Stack Pointer Limit (non-secure)
740
  Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
741
  Stack Pointer Limit register hence the write is silently ignored.
742
 
743
  \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
744
  \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
745
 */
746
__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
747
{
748
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
749
  // without main extensions, the non-secure MSPLIM is RAZ/WI
750
  (void)MainStackPtrLimit;
751
#else
752
  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
753
#endif
754
}
755
#endif
756
 
757
#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
758
           (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
759
 
760
 
761
/**
762
  \brief   Get FPSCR
763
  \details Returns the current value of the Floating Point Status/Control register.
764
  \return               Floating Point Status/Control register value
765
 */
766
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
767
{
768
#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
769
     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
770
#if __has_builtin(__builtin_arm_get_fpscr) 
771
// Re-enable using built-in when GCC has been fixed
772
// || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
773
  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
774
  return __builtin_arm_get_fpscr();
775
#else
776
  uint32_t result;
777
 
778
  __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
779
  return(result);
780
#endif
781
#else
782
  return(0U);
783
#endif
784
}
785
 
786
 
787
/**
788
  \brief   Set FPSCR
789
  \details Assigns the given value to the Floating Point Status/Control register.
790
  \param [in]    fpscr  Floating Point Status/Control value to set
791
 */
792
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
793
{
794
#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
795
     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
796
#if __has_builtin(__builtin_arm_set_fpscr)
797
// Re-enable using built-in when GCC has been fixed
798
// || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
799
  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
800
  __builtin_arm_set_fpscr(fpscr);
801
#else
802
  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
803
#endif
804
#else
805
  (void)fpscr;
806
#endif
807
}
808
 
809
 
810
/*@} end of CMSIS_Core_RegAccFunctions */
811
 
812
 
813
/* ##########################  Core Instruction Access  ######################### */
814
/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
815
  Access to dedicated instructions
816
  @{
817
*/
818
 
819
/* Define macros for porting to both thumb1 and thumb2.
820
 * For thumb1, use low register (r0-r7), specified by constraint "l"
821
 * Otherwise, use general registers, specified by constraint "r" */
822
#if defined (__thumb__) && !defined (__thumb2__)
823
#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
824
#define __CMSIS_GCC_RW_REG(r) "+l" (r)
825
#define __CMSIS_GCC_USE_REG(r) "l" (r)
826
#else
827
#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
828
#define __CMSIS_GCC_RW_REG(r) "+r" (r)
829
#define __CMSIS_GCC_USE_REG(r) "r" (r)
830
#endif
831
 
832
/**
833
  \brief   No Operation
834
  \details No Operation does nothing. This instruction can be used for code alignment purposes.
835
 */
836
#define __NOP()                             __ASM volatile ("nop")
837
 
838
/**
839
  \brief   Wait For Interrupt
840
  \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
841
 */
842
#define __WFI()                             __ASM volatile ("wfi")
843
 
844
 
845
/**
846
  \brief   Wait For Event
847
  \details Wait For Event is a hint instruction that permits the processor to enter
848
           a low-power state until one of a number of events occurs.
849
 */
850
#define __WFE()                             __ASM volatile ("wfe")
851
 
852
 
853
/**
854
  \brief   Send Event
855
  \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
856
 */
857
#define __SEV()                             __ASM volatile ("sev")
858
 
859
 
860
/**
861
  \brief   Instruction Synchronization Barrier
862
  \details Instruction Synchronization Barrier flushes the pipeline in the processor,
863
           so that all instructions following the ISB are fetched from cache or memory,
864
           after the instruction has been completed.
865
 */
866
__STATIC_FORCEINLINE void __ISB(void)
867
{
868
  __ASM volatile ("isb 0xF":::"memory");
869
}
870
 
871
 
872
/**
873
  \brief   Data Synchronization Barrier
874
  \details Acts as a special kind of Data Memory Barrier.
875
           It completes when all explicit memory accesses before this instruction complete.
876
 */
877
__STATIC_FORCEINLINE void __DSB(void)
878
{
879
  __ASM volatile ("dsb 0xF":::"memory");
880
}
881
 
882
 
883
/**
884
  \brief   Data Memory Barrier
885
  \details Ensures the apparent order of the explicit memory operations before
886
           and after the instruction, without ensuring their completion.
887
 */
888
__STATIC_FORCEINLINE void __DMB(void)
889
{
890
  __ASM volatile ("dmb 0xF":::"memory");
891
}
892
 
893
 
894
/**
895
  \brief   Reverse byte order (32 bit)
896
  \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
897
  \param [in]    value  Value to reverse
898
  \return               Reversed value
899
 */
900
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
901
{
902
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
903
  return __builtin_bswap32(value);
904
#else
905
  uint32_t result;
906
 
907
  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
908
  return result;
909
#endif
910
}
911
 
912
 
913
/**
914
  \brief   Reverse byte order (16 bit)
915
  \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
916
  \param [in]    value  Value to reverse
917
  \return               Reversed value
918
 */
919
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
920
{
921
  uint32_t result;
922
 
923
  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
924
  return result;
925
}
926
 
927
 
928
/**
929
  \brief   Reverse byte order (16 bit)
930
  \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
931
  \param [in]    value  Value to reverse
932
  \return               Reversed value
933
 */
934
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
935
{
936
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
937
  return (int16_t)__builtin_bswap16(value);
938
#else
939
  int16_t result;
940
 
941
  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
942
  return result;
943
#endif
944
}
945
 
946
 
947
/**
948
  \brief   Rotate Right in unsigned value (32 bit)
949
  \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
950
  \param [in]    op1  Value to rotate
951
  \param [in]    op2  Number of Bits to rotate
952
  \return               Rotated value
953
 */
954
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
955
{
956
  op2 %= 32U;
957
  if (op2 == 0U)
958
  {
959
    return op1;
960
  }
961
  return (op1 >> op2) | (op1 << (32U - op2));
962
}
963
 
964
 
965
/**
966
  \brief   Breakpoint
967
  \details Causes the processor to enter Debug state.
968
           Debug tools can use this to investigate system state when the instruction at a particular address is reached.
969
  \param [in]    value  is ignored by the processor.
970
                 If required, a debugger can use it to store additional information about the breakpoint.
971
 */
972
#define __BKPT(value)                       __ASM volatile ("bkpt "#value)
973
 
974
 
975
/**
976
  \brief   Reverse bit order of value
977
  \details Reverses the bit order of the given value.
978
  \param [in]    value  Value to reverse
979
  \return               Reversed value
980
 */
981
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
982
{
983
  uint32_t result;
984
 
985
#if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
986
     (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
987
     (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
988
   __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
989
#else
990
  uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
991
 
992
  result = value;                      /* r will be reversed bits of v; first get LSB of v */
993
  for (value >>= 1U; value != 0U; value >>= 1U)
994
  {
995
    result <<= 1U;
996
    result |= value & 1U;
997
    s--;
998
  }
999
  result <<= s;                        /* shift when v's highest bits are zero */
1000
#endif
1001
  return result;
1002
}
1003
 
1004
 
1005
/**
1006
  \brief   Count leading zeros
1007
  \details Counts the number of leading zeros of a data value.
1008
  \param [in]  value  Value to count the leading zeros
1009
  \return             number of leading zeros in value
1010
 */
1011
#define __CLZ             (uint8_t)__builtin_clz
1012
 
1013
 
1014
#if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1015
     (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1016
     (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1017
     (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
1018
/**
1019
  \brief   LDR Exclusive (8 bit)
1020
  \details Executes a exclusive LDR instruction for 8 bit value.
1021
  \param [in]    ptr  Pointer to data
1022
  \return             value of type uint8_t at (*ptr)
1023
 */
1024
__STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
1025
{
1026
    uint32_t result;
1027
 
1028
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1029
   __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
1030
#else
1031
    /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1032
       accepted by assembler. So has to use following less efficient pattern.
1033
    */
1034
   __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1035
#endif
1036
   return ((uint8_t) result);    /* Add explicit type cast here */
1037
}
1038
 
1039
 
1040
/**
1041
  \brief   LDR Exclusive (16 bit)
1042
  \details Executes a exclusive LDR instruction for 16 bit values.
1043
  \param [in]    ptr  Pointer to data
1044
  \return        value of type uint16_t at (*ptr)
1045
 */
1046
__STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
1047
{
1048
    uint32_t result;
1049
 
1050
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1051
   __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
1052
#else
1053
    /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1054
       accepted by assembler. So has to use following less efficient pattern.
1055
    */
1056
   __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1057
#endif
1058
   return ((uint16_t) result);    /* Add explicit type cast here */
1059
}
1060
 
1061
 
1062
/**
1063
  \brief   LDR Exclusive (32 bit)
1064
  \details Executes a exclusive LDR instruction for 32 bit values.
1065
  \param [in]    ptr  Pointer to data
1066
  \return        value of type uint32_t at (*ptr)
1067
 */
1068
__STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
1069
{
1070
    uint32_t result;
1071
 
1072
   __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
1073
   return(result);
1074
}
1075
 
1076
 
1077
/**
1078
  \brief   STR Exclusive (8 bit)
1079
  \details Executes a exclusive STR instruction for 8 bit values.
1080
  \param [in]  value  Value to store
1081
  \param [in]    ptr  Pointer to location
1082
  \return          0  Function succeeded
1083
  \return          1  Function failed
1084
 */
1085
__STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
1086
{
1087
   uint32_t result;
1088
 
1089
   __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1090
   return(result);
1091
}
1092
 
1093
 
1094
/**
1095
  \brief   STR Exclusive (16 bit)
1096
  \details Executes a exclusive STR instruction for 16 bit values.
1097
  \param [in]  value  Value to store
1098
  \param [in]    ptr  Pointer to location
1099
  \return          0  Function succeeded
1100
  \return          1  Function failed
1101
 */
1102
__STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
1103
{
1104
   uint32_t result;
1105
 
1106
   __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1107
   return(result);
1108
}
1109
 
1110
 
1111
/**
1112
  \brief   STR Exclusive (32 bit)
1113
  \details Executes a exclusive STR instruction for 32 bit values.
1114
  \param [in]  value  Value to store
1115
  \param [in]    ptr  Pointer to location
1116
  \return          0  Function succeeded
1117
  \return          1  Function failed
1118
 */
1119
__STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
1120
{
1121
   uint32_t result;
1122
 
1123
   __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
1124
   return(result);
1125
}
1126
 
1127
 
1128
/**
1129
  \brief   Remove the exclusive lock
1130
  \details Removes the exclusive lock which is created by LDREX.
1131
 */
1132
__STATIC_FORCEINLINE void __CLREX(void)
1133
{
1134
  __ASM volatile ("clrex" ::: "memory");
1135
}
1136
 
1137
#endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1138
           (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1139
           (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1140
           (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1141
 
1142
 
1143
#if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1144
     (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1145
     (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
1146
/**
1147
  \brief   Signed Saturate
1148
  \details Saturates a signed value.
1149
  \param [in]  ARG1  Value to be saturated
1150
  \param [in]  ARG2  Bit position to saturate to (1..32)
1151
  \return             Saturated value
1152
 */
1153
#define __SSAT(ARG1,ARG2) \
1154
__extension__ \
1155
({                          \
1156
  int32_t __RES, __ARG1 = (ARG1); \
1157
  __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1158
  __RES; \
1159
 })
1160
 
1161
 
1162
/**
1163
  \brief   Unsigned Saturate
1164
  \details Saturates an unsigned value.
1165
  \param [in]  ARG1  Value to be saturated
1166
  \param [in]  ARG2  Bit position to saturate to (0..31)
1167
  \return             Saturated value
1168
 */
1169
#define __USAT(ARG1,ARG2) \
1170
 __extension__ \
1171
({                          \
1172
  uint32_t __RES, __ARG1 = (ARG1); \
1173
  __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1174
  __RES; \
1175
 })
1176
 
1177
 
1178
/**
1179
  \brief   Rotate Right with Extend (32 bit)
1180
  \details Moves each bit of a bitstring right by one bit.
1181
           The carry input is shifted in at the left end of the bitstring.
1182
  \param [in]    value  Value to rotate
1183
  \return               Rotated value
1184
 */
1185
__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1186
{
1187
  uint32_t result;
1188
 
1189
  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1190
  return(result);
1191
}
1192
 
1193
 
1194
/**
1195
  \brief   LDRT Unprivileged (8 bit)
1196
  \details Executes a Unprivileged LDRT instruction for 8 bit value.
1197
  \param [in]    ptr  Pointer to data
1198
  \return             value of type uint8_t at (*ptr)
1199
 */
1200
__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1201
{
1202
    uint32_t result;
1203
 
1204
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1205
   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1206
#else
1207
    /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1208
       accepted by assembler. So has to use following less efficient pattern.
1209
    */
1210
   __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1211
#endif
1212
   return ((uint8_t) result);    /* Add explicit type cast here */
1213
}
1214
 
1215
 
1216
/**
1217
  \brief   LDRT Unprivileged (16 bit)
1218
  \details Executes a Unprivileged LDRT instruction for 16 bit values.
1219
  \param [in]    ptr  Pointer to data
1220
  \return        value of type uint16_t at (*ptr)
1221
 */
1222
__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1223
{
1224
    uint32_t result;
1225
 
1226
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1227
   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1228
#else
1229
    /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1230
       accepted by assembler. So has to use following less efficient pattern.
1231
    */
1232
   __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1233
#endif
1234
   return ((uint16_t) result);    /* Add explicit type cast here */
1235
}
1236
 
1237
 
1238
/**
1239
  \brief   LDRT Unprivileged (32 bit)
1240
  \details Executes a Unprivileged LDRT instruction for 32 bit values.
1241
  \param [in]    ptr  Pointer to data
1242
  \return        value of type uint32_t at (*ptr)
1243
 */
1244
__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1245
{
1246
    uint32_t result;
1247
 
1248
   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1249
   return(result);
1250
}
1251
 
1252
 
1253
/**
1254
  \brief   STRT Unprivileged (8 bit)
1255
  \details Executes a Unprivileged STRT instruction for 8 bit values.
1256
  \param [in]  value  Value to store
1257
  \param [in]    ptr  Pointer to location
1258
 */
1259
__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1260
{
1261
   __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1262
}
1263
 
1264
 
1265
/**
1266
  \brief   STRT Unprivileged (16 bit)
1267
  \details Executes a Unprivileged STRT instruction for 16 bit values.
1268
  \param [in]  value  Value to store
1269
  \param [in]    ptr  Pointer to location
1270
 */
1271
__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1272
{
1273
   __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1274
}
1275
 
1276
 
1277
/**
1278
  \brief   STRT Unprivileged (32 bit)
1279
  \details Executes a Unprivileged STRT instruction for 32 bit values.
1280
  \param [in]  value  Value to store
1281
  \param [in]    ptr  Pointer to location
1282
 */
1283
__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1284
{
1285
   __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1286
}
1287
 
1288
#else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1289
           (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1290
           (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1291
 
1292
/**
1293
  \brief   Signed Saturate
1294
  \details Saturates a signed value.
1295
  \param [in]  value  Value to be saturated
1296
  \param [in]    sat  Bit position to saturate to (1..32)
1297
  \return             Saturated value
1298
 */
1299
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1300
{
1301
  if ((sat >= 1U) && (sat <= 32U))
1302
  {
1303
    const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1304
    const int32_t min = -1 - max ;
1305
    if (val > max)
1306
    {
1307
      return max;
1308
    }
1309
    else if (val < min)
1310
    {
1311
      return min;
1312
    }
1313
  }
1314
  return val;
1315
}
1316
 
1317
/**
1318
  \brief   Unsigned Saturate
1319
  \details Saturates an unsigned value.
1320
  \param [in]  value  Value to be saturated
1321
  \param [in]    sat  Bit position to saturate to (0..31)
1322
  \return             Saturated value
1323
 */
1324
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1325
{
1326
  if (sat <= 31U)
1327
  {
1328
    const uint32_t max = ((1U << sat) - 1U);
1329
    if (val > (int32_t)max)
1330
    {
1331
      return max;
1332
    }
1333
    else if (val < 0)
1334
    {
1335
      return 0U;
1336
    }
1337
  }
1338
  return (uint32_t)val;
1339
}
1340
 
1341
#endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1342
           (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1343
           (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1344
 
1345
 
1346
#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1347
     (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
1348
/**
1349
  \brief   Load-Acquire (8 bit)
1350
  \details Executes a LDAB instruction for 8 bit value.
1351
  \param [in]    ptr  Pointer to data
1352
  \return             value of type uint8_t at (*ptr)
1353
 */
1354
__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1355
{
1356
    uint32_t result;
1357
 
1358
   __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1359
   return ((uint8_t) result);
1360
}
1361
 
1362
 
1363
/**
1364
  \brief   Load-Acquire (16 bit)
1365
  \details Executes a LDAH instruction for 16 bit values.
1366
  \param [in]    ptr  Pointer to data
1367
  \return        value of type uint16_t at (*ptr)
1368
 */
1369
__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1370
{
1371
    uint32_t result;
1372
 
1373
   __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1374
   return ((uint16_t) result);
1375
}
1376
 
1377
 
1378
/**
1379
  \brief   Load-Acquire (32 bit)
1380
  \details Executes a LDA instruction for 32 bit values.
1381
  \param [in]    ptr  Pointer to data
1382
  \return        value of type uint32_t at (*ptr)
1383
 */
1384
__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1385
{
1386
    uint32_t result;
1387
 
1388
   __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1389
   return(result);
1390
}
1391
 
1392
 
1393
/**
1394
  \brief   Store-Release (8 bit)
1395
  \details Executes a STLB instruction for 8 bit values.
1396
  \param [in]  value  Value to store
1397
  \param [in]    ptr  Pointer to location
1398
 */
1399
__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1400
{
1401
   __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1402
}
1403
 
1404
 
1405
/**
1406
  \brief   Store-Release (16 bit)
1407
  \details Executes a STLH instruction for 16 bit values.
1408
  \param [in]  value  Value to store
1409
  \param [in]    ptr  Pointer to location
1410
 */
1411
__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1412
{
1413
   __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1414
}
1415
 
1416
 
1417
/**
1418
  \brief   Store-Release (32 bit)
1419
  \details Executes a STL instruction for 32 bit values.
1420
  \param [in]  value  Value to store
1421
  \param [in]    ptr  Pointer to location
1422
 */
1423
__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1424
{
1425
   __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1426
}
1427
 
1428
 
1429
/**
1430
  \brief   Load-Acquire Exclusive (8 bit)
1431
  \details Executes a LDAB exclusive instruction for 8 bit value.
1432
  \param [in]    ptr  Pointer to data
1433
  \return             value of type uint8_t at (*ptr)
1434
 */
1435
__STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
1436
{
1437
    uint32_t result;
1438
 
1439
   __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
1440
   return ((uint8_t) result);
1441
}
1442
 
1443
 
1444
/**
1445
  \brief   Load-Acquire Exclusive (16 bit)
1446
  \details Executes a LDAH exclusive instruction for 16 bit values.
1447
  \param [in]    ptr  Pointer to data
1448
  \return        value of type uint16_t at (*ptr)
1449
 */
1450
__STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
1451
{
1452
    uint32_t result;
1453
 
1454
   __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
1455
   return ((uint16_t) result);
1456
}
1457
 
1458
 
1459
/**
1460
  \brief   Load-Acquire Exclusive (32 bit)
1461
  \details Executes a LDA exclusive instruction for 32 bit values.
1462
  \param [in]    ptr  Pointer to data
1463
  \return        value of type uint32_t at (*ptr)
1464
 */
1465
__STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
1466
{
1467
    uint32_t result;
1468
 
1469
   __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
1470
   return(result);
1471
}
1472
 
1473
 
1474
/**
1475
  \brief   Store-Release Exclusive (8 bit)
1476
  \details Executes a STLB exclusive instruction for 8 bit values.
1477
  \param [in]  value  Value to store
1478
  \param [in]    ptr  Pointer to location
1479
  \return          0  Function succeeded
1480
  \return          1  Function failed
1481
 */
1482
__STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
1483
{
1484
   uint32_t result;
1485
 
1486
   __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1487
   return(result);
1488
}
1489
 
1490
 
1491
/**
1492
  \brief   Store-Release Exclusive (16 bit)
1493
  \details Executes a STLH exclusive instruction for 16 bit values.
1494
  \param [in]  value  Value to store
1495
  \param [in]    ptr  Pointer to location
1496
  \return          0  Function succeeded
1497
  \return          1  Function failed
1498
 */
1499
__STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
1500
{
1501
   uint32_t result;
1502
 
1503
   __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1504
   return(result);
1505
}
1506
 
1507
 
1508
/**
1509
  \brief   Store-Release Exclusive (32 bit)
1510
  \details Executes a STL exclusive instruction for 32 bit values.
1511
  \param [in]  value  Value to store
1512
  \param [in]    ptr  Pointer to location
1513
  \return          0  Function succeeded
1514
  \return          1  Function failed
1515
 */
1516
__STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
1517
{
1518
   uint32_t result;
1519
 
1520
   __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1521
   return(result);
1522
}
1523
 
1524
#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1525
           (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1526
 
1527
/*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1528
 
1529
 
1530
/* ###################  Compiler specific Intrinsics  ########################### */
1531
/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1532
  Access to dedicated SIMD instructions
1533
  @{
1534
*/
1535
 
1536
#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1537
 
1538
__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1539
{
1540
  uint32_t result;
1541
 
1542
  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1543
  return(result);
1544
}
1545
 
1546
__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1547
{
1548
  uint32_t result;
1549
 
1550
  __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1551
  return(result);
1552
}
1553
 
1554
__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1555
{
1556
  uint32_t result;
1557
 
1558
  __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1559
  return(result);
1560
}
1561
 
1562
__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1563
{
1564
  uint32_t result;
1565
 
1566
  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1567
  return(result);
1568
}
1569
 
1570
__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1571
{
1572
  uint32_t result;
1573
 
1574
  __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1575
  return(result);
1576
}
1577
 
1578
__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1579
{
1580
  uint32_t result;
1581
 
1582
  __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1583
  return(result);
1584
}
1585
 
1586
 
1587
__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1588
{
1589
  uint32_t result;
1590
 
1591
  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1592
  return(result);
1593
}
1594
 
1595
__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1596
{
1597
  uint32_t result;
1598
 
1599
  __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1600
  return(result);
1601
}
1602
 
1603
__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1604
{
1605
  uint32_t result;
1606
 
1607
  __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1608
  return(result);
1609
}
1610
 
1611
__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1612
{
1613
  uint32_t result;
1614
 
1615
  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1616
  return(result);
1617
}
1618
 
1619
__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1620
{
1621
  uint32_t result;
1622
 
1623
  __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1624
  return(result);
1625
}
1626
 
1627
__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1628
{
1629
  uint32_t result;
1630
 
1631
  __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1632
  return(result);
1633
}
1634
 
1635
 
1636
__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1637
{
1638
  uint32_t result;
1639
 
1640
  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1641
  return(result);
1642
}
1643
 
1644
__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1645
{
1646
  uint32_t result;
1647
 
1648
  __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1649
  return(result);
1650
}
1651
 
1652
__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1653
{
1654
  uint32_t result;
1655
 
1656
  __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1657
  return(result);
1658
}
1659
 
1660
__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1661
{
1662
  uint32_t result;
1663
 
1664
  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1665
  return(result);
1666
}
1667
 
1668
__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1669
{
1670
  uint32_t result;
1671
 
1672
  __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673
  return(result);
1674
}
1675
 
1676
__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1677
{
1678
  uint32_t result;
1679
 
1680
  __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681
  return(result);
1682
}
1683
 
1684
__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1685
{
1686
  uint32_t result;
1687
 
1688
  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689
  return(result);
1690
}
1691
 
1692
__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1693
{
1694
  uint32_t result;
1695
 
1696
  __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1697
  return(result);
1698
}
1699
 
1700
__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1701
{
1702
  uint32_t result;
1703
 
1704
  __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1705
  return(result);
1706
}
1707
 
1708
__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1709
{
1710
  uint32_t result;
1711
 
1712
  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1713
  return(result);
1714
}
1715
 
1716
__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1717
{
1718
  uint32_t result;
1719
 
1720
  __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1721
  return(result);
1722
}
1723
 
1724
__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1725
{
1726
  uint32_t result;
1727
 
1728
  __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1729
  return(result);
1730
}
1731
 
1732
__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1733
{
1734
  uint32_t result;
1735
 
1736
  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1737
  return(result);
1738
}
1739
 
1740
__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1741
{
1742
  uint32_t result;
1743
 
1744
  __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1745
  return(result);
1746
}
1747
 
1748
__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1749
{
1750
  uint32_t result;
1751
 
1752
  __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1753
  return(result);
1754
}
1755
 
1756
__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1757
{
1758
  uint32_t result;
1759
 
1760
  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1761
  return(result);
1762
}
1763
 
1764
__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1765
{
1766
  uint32_t result;
1767
 
1768
  __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1769
  return(result);
1770
}
1771
 
1772
__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1773
{
1774
  uint32_t result;
1775
 
1776
  __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1777
  return(result);
1778
}
1779
 
1780
__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1781
{
1782
  uint32_t result;
1783
 
1784
  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1785
  return(result);
1786
}
1787
 
1788
__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1789
{
1790
  uint32_t result;
1791
 
1792
  __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1793
  return(result);
1794
}
1795
 
1796
__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1797
{
1798
  uint32_t result;
1799
 
1800
  __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1801
  return(result);
1802
}
1803
 
1804
__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1805
{
1806
  uint32_t result;
1807
 
1808
  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1809
  return(result);
1810
}
1811
 
1812
__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1813
{
1814
  uint32_t result;
1815
 
1816
  __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1817
  return(result);
1818
}
1819
 
1820
__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1821
{
1822
  uint32_t result;
1823
 
1824
  __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1825
  return(result);
1826
}
1827
 
1828
__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1829
{
1830
  uint32_t result;
1831
 
1832
  __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1833
  return(result);
1834
}
1835
 
1836
__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1837
{
1838
  uint32_t result;
1839
 
1840
  __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1841
  return(result);
1842
}
1843
 
1844
#define __SSAT16(ARG1,ARG2) \
1845
({                          \
1846
  int32_t __RES, __ARG1 = (ARG1); \
1847
  __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1848
  __RES; \
1849
 })
1850
 
1851
#define __USAT16(ARG1,ARG2) \
1852
({                          \
1853
  uint32_t __RES, __ARG1 = (ARG1); \
1854
  __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1855
  __RES; \
1856
 })
1857
 
1858
__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1859
{
1860
  uint32_t result;
1861
 
1862
  __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1863
  return(result);
1864
}
1865
 
1866
__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1867
{
1868
  uint32_t result;
1869
 
1870
  __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1871
  return(result);
1872
}
1873
 
1874
__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1875
{
1876
  uint32_t result;
1877
 
1878
  __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1879
  return(result);
1880
}
1881
 
1882
__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1883
{
1884
  uint32_t result;
1885
 
1886
  __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1887
  return(result);
1888
}
1889
 
1890
__STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
1891
{
1892
  uint32_t result;
1893
 
1894
  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1895
  return(result);
1896
}
1897
 
1898
__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1899
{
1900
  uint32_t result;
1901
 
1902
  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1903
  return(result);
1904
}
1905
 
1906
__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1907
{
1908
  uint32_t result;
1909
 
1910
  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1911
  return(result);
1912
}
1913
 
1914
__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1915
{
1916
  uint32_t result;
1917
 
1918
  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1919
  return(result);
1920
}
1921
 
1922
__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1923
{
1924
  union llreg_u{
1925
    uint32_t w32[2];
1926
    uint64_t w64;
1927
  } llr;
1928
  llr.w64 = acc;
1929
 
1930
#ifndef __ARMEB__   /* Little endian */
1931
  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1932
#else               /* Big endian */
1933
  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1934
#endif
1935
 
1936
  return(llr.w64);
1937
}
1938
 
1939
__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1940
{
1941
  union llreg_u{
1942
    uint32_t w32[2];
1943
    uint64_t w64;
1944
  } llr;
1945
  llr.w64 = acc;
1946
 
1947
#ifndef __ARMEB__   /* Little endian */
1948
  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1949
#else               /* Big endian */
1950
  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1951
#endif
1952
 
1953
  return(llr.w64);
1954
}
1955
 
1956
__STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
1957
{
1958
  uint32_t result;
1959
 
1960
  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1961
  return(result);
1962
}
1963
 
1964
__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1965
{
1966
  uint32_t result;
1967
 
1968
  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1969
  return(result);
1970
}
1971
 
1972
__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1973
{
1974
  uint32_t result;
1975
 
1976
  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1977
  return(result);
1978
}
1979
 
1980
__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1981
{
1982
  uint32_t result;
1983
 
1984
  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1985
  return(result);
1986
}
1987
 
1988
__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1989
{
1990
  union llreg_u{
1991
    uint32_t w32[2];
1992
    uint64_t w64;
1993
  } llr;
1994
  llr.w64 = acc;
1995
 
1996
#ifndef __ARMEB__   /* Little endian */
1997
  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1998
#else               /* Big endian */
1999
  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2000
#endif
2001
 
2002
  return(llr.w64);
2003
}
2004
 
2005
__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2006
{
2007
  union llreg_u{
2008
    uint32_t w32[2];
2009
    uint64_t w64;
2010
  } llr;
2011
  llr.w64 = acc;
2012
 
2013
#ifndef __ARMEB__   /* Little endian */
2014
  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2015
#else               /* Big endian */
2016
  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2017
#endif
2018
 
2019
  return(llr.w64);
2020
}
2021
 
2022
__STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
2023
{
2024
  uint32_t result;
2025
 
2026
  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2027
  return(result);
2028
}
2029
 
2030
__STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
2031
{
2032
  int32_t result;
2033
 
2034
  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2035
  return(result);
2036
}
2037
 
2038
__STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
2039
{
2040
  int32_t result;
2041
 
2042
  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2043
  return(result);
2044
}
2045
 
2046
#if 0
2047
#define __PKHBT(ARG1,ARG2,ARG3) \
2048
({                          \
2049
  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2050
  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
2051
  __RES; \
2052
 })
2053
 
2054
#define __PKHTB(ARG1,ARG2,ARG3) \
2055
({                          \
2056
  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2057
  if (ARG3 == 0) \
2058
    __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
2059
  else \
2060
    __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
2061
  __RES; \
2062
 })
2063
#endif
2064
 
2065
#define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
2066
                                           ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
2067
 
2068
#define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
2069
                                           ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
2070
 
2071
__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2072
{
2073
 int32_t result;
2074
 
2075
 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
2076
 return(result);
2077
}
2078
 
2079
#endif /* (__ARM_FEATURE_DSP == 1) */
2080
/*@} end of group CMSIS_SIMD_intrinsics */
2081
 
2082
 
2083
#pragma GCC diagnostic pop
2084
 
2085
#endif /* __CMSIS_GCC_H */