Details | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
12 | mjames | 1 | /**************************************************************************//** |
2 | * @file cmsis_armcc_V6.h |
||
3 | * @brief CMSIS Cortex-M Core Function/Instruction Header File |
||
4 | * @version V4.30 |
||
5 | * @date 20. October 2015 |
||
6 | ******************************************************************************/ |
||
7 | /* Copyright (c) 2009 - 2015 ARM LIMITED |
||
8 | |||
9 | All rights reserved. |
||
10 | Redistribution and use in source and binary forms, with or without |
||
11 | modification, are permitted provided that the following conditions are met: |
||
12 | - Redistributions of source code must retain the above copyright |
||
13 | notice, this list of conditions and the following disclaimer. |
||
14 | - Redistributions in binary form must reproduce the above copyright |
||
15 | notice, this list of conditions and the following disclaimer in the |
||
16 | documentation and/or other materials provided with the distribution. |
||
17 | - Neither the name of ARM nor the names of its contributors may be used |
||
18 | to endorse or promote products derived from this software without |
||
19 | specific prior written permission. |
||
20 | * |
||
21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" |
||
22 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
||
23 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
||
24 | ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE |
||
25 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
||
26 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
||
27 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
||
28 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
||
29 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
||
30 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
||
31 | POSSIBILITY OF SUCH DAMAGE. |
||
32 | ---------------------------------------------------------------------------*/ |
||
33 | |||
34 | |||
35 | #ifndef __CMSIS_ARMCC_V6_H |
||
36 | #define __CMSIS_ARMCC_V6_H |
||
37 | |||
38 | |||
39 | /* ########################### Core Function Access ########################### */ |
||
40 | /** \ingroup CMSIS_Core_FunctionInterface |
||
41 | \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions |
||
42 | @{ |
||
43 | */ |
||
44 | |||
45 | /** |
||
46 | \brief Enable IRQ Interrupts |
||
47 | \details Enables IRQ interrupts by clearing the I-bit in the CPSR. |
||
48 | Can only be executed in Privileged modes. |
||
49 | */ |
||
50 | __attribute__((always_inline)) __STATIC_INLINE void __enable_irq(void) |
||
51 | { |
||
52 | __ASM volatile ("cpsie i" : : : "memory"); |
||
53 | } |
||
54 | |||
55 | |||
56 | /** |
||
57 | \brief Disable IRQ Interrupts |
||
58 | \details Disables IRQ interrupts by setting the I-bit in the CPSR. |
||
59 | Can only be executed in Privileged modes. |
||
60 | */ |
||
61 | __attribute__((always_inline)) __STATIC_INLINE void __disable_irq(void) |
||
62 | { |
||
63 | __ASM volatile ("cpsid i" : : : "memory"); |
||
64 | } |
||
65 | |||
66 | |||
67 | /** |
||
68 | \brief Get Control Register |
||
69 | \details Returns the content of the Control Register. |
||
70 | \return Control Register value |
||
71 | */ |
||
72 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_CONTROL(void) |
||
73 | { |
||
74 | uint32_t result; |
||
75 | |||
76 | __ASM volatile ("MRS %0, control" : "=r" (result) ); |
||
77 | return(result); |
||
78 | } |
||
79 | |||
80 | |||
81 | #if (__ARM_FEATURE_CMSE == 3U) |
||
82 | /** |
||
83 | \brief Get Control Register (non-secure) |
||
84 | \details Returns the content of the non-secure Control Register when in secure mode. |
||
85 | \return non-secure Control Register value |
||
86 | */ |
||
87 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_CONTROL_NS(void) |
||
88 | { |
||
89 | uint32_t result; |
||
90 | |||
91 | __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); |
||
92 | return(result); |
||
93 | } |
||
94 | #endif |
||
95 | |||
96 | |||
97 | /** |
||
98 | \brief Set Control Register |
||
99 | \details Writes the given value to the Control Register. |
||
100 | \param [in] control Control Register value to set |
||
101 | */ |
||
102 | __attribute__((always_inline)) __STATIC_INLINE void __set_CONTROL(uint32_t control) |
||
103 | { |
||
104 | __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); |
||
105 | } |
||
106 | |||
107 | |||
108 | #if (__ARM_FEATURE_CMSE == 3U) |
||
109 | /** |
||
110 | \brief Set Control Register (non-secure) |
||
111 | \details Writes the given value to the non-secure Control Register when in secure state. |
||
112 | \param [in] control Control Register value to set |
||
113 | */ |
||
114 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_CONTROL_NS(uint32_t control) |
||
115 | { |
||
116 | __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); |
||
117 | } |
||
118 | #endif |
||
119 | |||
120 | |||
121 | /** |
||
122 | \brief Get IPSR Register |
||
123 | \details Returns the content of the IPSR Register. |
||
124 | \return IPSR Register value |
||
125 | */ |
||
126 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_IPSR(void) |
||
127 | { |
||
128 | uint32_t result; |
||
129 | |||
130 | __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); |
||
131 | return(result); |
||
132 | } |
||
133 | |||
134 | |||
135 | #if (__ARM_FEATURE_CMSE == 3U) |
||
136 | /** |
||
137 | \brief Get IPSR Register (non-secure) |
||
138 | \details Returns the content of the non-secure IPSR Register when in secure state. |
||
139 | \return IPSR Register value |
||
140 | */ |
||
141 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_IPSR_NS(void) |
||
142 | { |
||
143 | uint32_t result; |
||
144 | |||
145 | __ASM volatile ("MRS %0, ipsr_ns" : "=r" (result) ); |
||
146 | return(result); |
||
147 | } |
||
148 | #endif |
||
149 | |||
150 | |||
151 | /** |
||
152 | \brief Get APSR Register |
||
153 | \details Returns the content of the APSR Register. |
||
154 | \return APSR Register value |
||
155 | */ |
||
156 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_APSR(void) |
||
157 | { |
||
158 | uint32_t result; |
||
159 | |||
160 | __ASM volatile ("MRS %0, apsr" : "=r" (result) ); |
||
161 | return(result); |
||
162 | } |
||
163 | |||
164 | |||
165 | #if (__ARM_FEATURE_CMSE == 3U) |
||
166 | /** |
||
167 | \brief Get APSR Register (non-secure) |
||
168 | \details Returns the content of the non-secure APSR Register when in secure state. |
||
169 | \return APSR Register value |
||
170 | */ |
||
171 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_APSR_NS(void) |
||
172 | { |
||
173 | uint32_t result; |
||
174 | |||
175 | __ASM volatile ("MRS %0, apsr_ns" : "=r" (result) ); |
||
176 | return(result); |
||
177 | } |
||
178 | #endif |
||
179 | |||
180 | |||
181 | /** |
||
182 | \brief Get xPSR Register |
||
183 | \details Returns the content of the xPSR Register. |
||
184 | \return xPSR Register value |
||
185 | */ |
||
186 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_xPSR(void) |
||
187 | { |
||
188 | uint32_t result; |
||
189 | |||
190 | __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); |
||
191 | return(result); |
||
192 | } |
||
193 | |||
194 | |||
195 | #if (__ARM_FEATURE_CMSE == 3U) |
||
196 | /** |
||
197 | \brief Get xPSR Register (non-secure) |
||
198 | \details Returns the content of the non-secure xPSR Register when in secure state. |
||
199 | \return xPSR Register value |
||
200 | */ |
||
201 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_xPSR_NS(void) |
||
202 | { |
||
203 | uint32_t result; |
||
204 | |||
205 | __ASM volatile ("MRS %0, xpsr_ns" : "=r" (result) ); |
||
206 | return(result); |
||
207 | } |
||
208 | #endif |
||
209 | |||
210 | |||
211 | /** |
||
212 | \brief Get Process Stack Pointer |
||
213 | \details Returns the current value of the Process Stack Pointer (PSP). |
||
214 | \return PSP Register value |
||
215 | */ |
||
216 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PSP(void) |
||
217 | { |
||
218 | register uint32_t result; |
||
219 | |||
220 | __ASM volatile ("MRS %0, psp" : "=r" (result) ); |
||
221 | return(result); |
||
222 | } |
||
223 | |||
224 | |||
225 | #if (__ARM_FEATURE_CMSE == 3U) |
||
226 | /** |
||
227 | \brief Get Process Stack Pointer (non-secure) |
||
228 | \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. |
||
229 | \return PSP Register value |
||
230 | */ |
||
231 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PSP_NS(void) |
||
232 | { |
||
233 | register uint32_t result; |
||
234 | |||
235 | __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); |
||
236 | return(result); |
||
237 | } |
||
238 | #endif |
||
239 | |||
240 | |||
241 | /** |
||
242 | \brief Set Process Stack Pointer |
||
243 | \details Assigns the given value to the Process Stack Pointer (PSP). |
||
244 | \param [in] topOfProcStack Process Stack Pointer value to set |
||
245 | */ |
||
246 | __attribute__((always_inline)) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack) |
||
247 | { |
||
248 | __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : "sp"); |
||
249 | } |
||
250 | |||
251 | |||
252 | #if (__ARM_FEATURE_CMSE == 3U) |
||
253 | /** |
||
254 | \brief Set Process Stack Pointer (non-secure) |
||
255 | \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. |
||
256 | \param [in] topOfProcStack Process Stack Pointer value to set |
||
257 | */ |
||
258 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) |
||
259 | { |
||
260 | __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : "sp"); |
||
261 | } |
||
262 | #endif |
||
263 | |||
264 | |||
265 | /** |
||
266 | \brief Get Main Stack Pointer |
||
267 | \details Returns the current value of the Main Stack Pointer (MSP). |
||
268 | \return MSP Register value |
||
269 | */ |
||
270 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_MSP(void) |
||
271 | { |
||
272 | register uint32_t result; |
||
273 | |||
274 | __ASM volatile ("MRS %0, msp" : "=r" (result) ); |
||
275 | return(result); |
||
276 | } |
||
277 | |||
278 | |||
279 | #if (__ARM_FEATURE_CMSE == 3U) |
||
280 | /** |
||
281 | \brief Get Main Stack Pointer (non-secure) |
||
282 | \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. |
||
283 | \return MSP Register value |
||
284 | */ |
||
285 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_MSP_NS(void) |
||
286 | { |
||
287 | register uint32_t result; |
||
288 | |||
289 | __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); |
||
290 | return(result); |
||
291 | } |
||
292 | #endif |
||
293 | |||
294 | |||
295 | /** |
||
296 | \brief Set Main Stack Pointer |
||
297 | \details Assigns the given value to the Main Stack Pointer (MSP). |
||
298 | \param [in] topOfMainStack Main Stack Pointer value to set |
||
299 | */ |
||
300 | __attribute__((always_inline)) __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack) |
||
301 | { |
||
302 | __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : "sp"); |
||
303 | } |
||
304 | |||
305 | |||
306 | #if (__ARM_FEATURE_CMSE == 3U) |
||
307 | /** |
||
308 | \brief Set Main Stack Pointer (non-secure) |
||
309 | \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. |
||
310 | \param [in] topOfMainStack Main Stack Pointer value to set |
||
311 | */ |
||
312 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) |
||
313 | { |
||
314 | __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : "sp"); |
||
315 | } |
||
316 | #endif |
||
317 | |||
318 | |||
319 | /** |
||
320 | \brief Get Priority Mask |
||
321 | \details Returns the current state of the priority mask bit from the Priority Mask Register. |
||
322 | \return Priority Mask value |
||
323 | */ |
||
324 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PRIMASK(void) |
||
325 | { |
||
326 | uint32_t result; |
||
327 | |||
328 | __ASM volatile ("MRS %0, primask" : "=r" (result) ); |
||
329 | return(result); |
||
330 | } |
||
331 | |||
332 | |||
333 | #if (__ARM_FEATURE_CMSE == 3U) |
||
334 | /** |
||
335 | \brief Get Priority Mask (non-secure) |
||
336 | \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. |
||
337 | \return Priority Mask value |
||
338 | */ |
||
339 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PRIMASK_NS(void) |
||
340 | { |
||
341 | uint32_t result; |
||
342 | |||
343 | __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); |
||
344 | return(result); |
||
345 | } |
||
346 | #endif |
||
347 | |||
348 | |||
349 | /** |
||
350 | \brief Set Priority Mask |
||
351 | \details Assigns the given value to the Priority Mask Register. |
||
352 | \param [in] priMask Priority Mask |
||
353 | */ |
||
354 | __attribute__((always_inline)) __STATIC_INLINE void __set_PRIMASK(uint32_t priMask) |
||
355 | { |
||
356 | __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); |
||
357 | } |
||
358 | |||
359 | |||
360 | #if (__ARM_FEATURE_CMSE == 3U) |
||
361 | /** |
||
362 | \brief Set Priority Mask (non-secure) |
||
363 | \details Assigns the given value to the non-secure Priority Mask Register when in secure state. |
||
364 | \param [in] priMask Priority Mask |
||
365 | */ |
||
366 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) |
||
367 | { |
||
368 | __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); |
||
369 | } |
||
370 | #endif |
||
371 | |||
372 | |||
373 | #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) /* ToDo: ARMCC_V6: check if this is ok for cortex >=3 */ |
||
374 | |||
375 | /** |
||
376 | \brief Enable FIQ |
||
377 | \details Enables FIQ interrupts by clearing the F-bit in the CPSR. |
||
378 | Can only be executed in Privileged modes. |
||
379 | */ |
||
380 | __attribute__((always_inline)) __STATIC_INLINE void __enable_fault_irq(void) |
||
381 | { |
||
382 | __ASM volatile ("cpsie f" : : : "memory"); |
||
383 | } |
||
384 | |||
385 | |||
386 | /** |
||
387 | \brief Disable FIQ |
||
388 | \details Disables FIQ interrupts by setting the F-bit in the CPSR. |
||
389 | Can only be executed in Privileged modes. |
||
390 | */ |
||
391 | __attribute__((always_inline)) __STATIC_INLINE void __disable_fault_irq(void) |
||
392 | { |
||
393 | __ASM volatile ("cpsid f" : : : "memory"); |
||
394 | } |
||
395 | |||
396 | |||
397 | /** |
||
398 | \brief Get Base Priority |
||
399 | \details Returns the current value of the Base Priority register. |
||
400 | \return Base Priority register value |
||
401 | */ |
||
402 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_BASEPRI(void) |
||
403 | { |
||
404 | uint32_t result; |
||
405 | |||
406 | __ASM volatile ("MRS %0, basepri" : "=r" (result) ); |
||
407 | return(result); |
||
408 | } |
||
409 | |||
410 | |||
411 | #if (__ARM_FEATURE_CMSE == 3U) |
||
412 | /** |
||
413 | \brief Get Base Priority (non-secure) |
||
414 | \details Returns the current value of the non-secure Base Priority register when in secure state. |
||
415 | \return Base Priority register value |
||
416 | */ |
||
417 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_BASEPRI_NS(void) |
||
418 | { |
||
419 | uint32_t result; |
||
420 | |||
421 | __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); |
||
422 | return(result); |
||
423 | } |
||
424 | #endif |
||
425 | |||
426 | |||
427 | /** |
||
428 | \brief Set Base Priority |
||
429 | \details Assigns the given value to the Base Priority register. |
||
430 | \param [in] basePri Base Priority value to set |
||
431 | */ |
||
432 | __attribute__((always_inline)) __STATIC_INLINE void __set_BASEPRI(uint32_t value) |
||
433 | { |
||
434 | __ASM volatile ("MSR basepri, %0" : : "r" (value) : "memory"); |
||
435 | } |
||
436 | |||
437 | |||
438 | #if (__ARM_FEATURE_CMSE == 3U) |
||
439 | /** |
||
440 | \brief Set Base Priority (non-secure) |
||
441 | \details Assigns the given value to the non-secure Base Priority register when in secure state. |
||
442 | \param [in] basePri Base Priority value to set |
||
443 | */ |
||
444 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_BASEPRI_NS(uint32_t value) |
||
445 | { |
||
446 | __ASM volatile ("MSR basepri_ns, %0" : : "r" (value) : "memory"); |
||
447 | } |
||
448 | #endif |
||
449 | |||
450 | |||
451 | /** |
||
452 | \brief Set Base Priority with condition |
||
453 | \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, |
||
454 | or the new value increases the BASEPRI priority level. |
||
455 | \param [in] basePri Base Priority value to set |
||
456 | */ |
||
457 | __attribute__((always_inline)) __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t value) |
||
458 | { |
||
459 | __ASM volatile ("MSR basepri_max, %0" : : "r" (value) : "memory"); |
||
460 | } |
||
461 | |||
462 | |||
463 | #if (__ARM_FEATURE_CMSE == 3U) |
||
464 | /** |
||
465 | \brief Set Base Priority with condition (non_secure) |
||
466 | \details Assigns the given value to the non-secure Base Priority register when in secure state only if BASEPRI masking is disabled, |
||
467 | or the new value increases the BASEPRI priority level. |
||
468 | \param [in] basePri Base Priority value to set |
||
469 | */ |
||
470 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_BASEPRI_MAX_NS(uint32_t value) |
||
471 | { |
||
472 | __ASM volatile ("MSR basepri_max_ns, %0" : : "r" (value) : "memory"); |
||
473 | } |
||
474 | #endif |
||
475 | |||
476 | |||
477 | /** |
||
478 | \brief Get Fault Mask |
||
479 | \details Returns the current value of the Fault Mask register. |
||
480 | \return Fault Mask register value |
||
481 | */ |
||
482 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FAULTMASK(void) |
||
483 | { |
||
484 | uint32_t result; |
||
485 | |||
486 | __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); |
||
487 | return(result); |
||
488 | } |
||
489 | |||
490 | |||
491 | #if (__ARM_FEATURE_CMSE == 3U) |
||
492 | /** |
||
493 | \brief Get Fault Mask (non-secure) |
||
494 | \details Returns the current value of the non-secure Fault Mask register when in secure state. |
||
495 | \return Fault Mask register value |
||
496 | */ |
||
497 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FAULTMASK_NS(void) |
||
498 | { |
||
499 | uint32_t result; |
||
500 | |||
501 | __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); |
||
502 | return(result); |
||
503 | } |
||
504 | #endif |
||
505 | |||
506 | |||
507 | /** |
||
508 | \brief Set Fault Mask |
||
509 | \details Assigns the given value to the Fault Mask register. |
||
510 | \param [in] faultMask Fault Mask value to set |
||
511 | */ |
||
512 | __attribute__((always_inline)) __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask) |
||
513 | { |
||
514 | __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); |
||
515 | } |
||
516 | |||
517 | |||
518 | #if (__ARM_FEATURE_CMSE == 3U) |
||
519 | /** |
||
520 | \brief Set Fault Mask (non-secure) |
||
521 | \details Assigns the given value to the non-secure Fault Mask register when in secure state. |
||
522 | \param [in] faultMask Fault Mask value to set |
||
523 | */ |
||
524 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) |
||
525 | { |
||
526 | __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); |
||
527 | } |
||
528 | #endif |
||
529 | |||
530 | |||
531 | #endif /* ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */ |
||
532 | |||
533 | |||
534 | #if (__ARM_ARCH_8M__ == 1U) |
||
535 | |||
536 | /** |
||
537 | \brief Get Process Stack Pointer Limit |
||
538 | \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). |
||
539 | \return PSPLIM Register value |
||
540 | */ |
||
541 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PSPLIM(void) |
||
542 | { |
||
543 | register uint32_t result; |
||
544 | |||
545 | __ASM volatile ("MRS %0, psplim" : "=r" (result) ); |
||
546 | return(result); |
||
547 | } |
||
548 | |||
549 | |||
550 | #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') /* ToDo: ARMCC_V6: check predefined macro for mainline */ |
||
551 | /** |
||
552 | \brief Get Process Stack Pointer Limit (non-secure) |
||
553 | \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. |
||
554 | \return PSPLIM Register value |
||
555 | */ |
||
556 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PSPLIM_NS(void) |
||
557 | { |
||
558 | register uint32_t result; |
||
559 | |||
560 | __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); |
||
561 | return(result); |
||
562 | } |
||
563 | #endif |
||
564 | |||
565 | |||
566 | /** |
||
567 | \brief Set Process Stack Pointer Limit |
||
568 | \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). |
||
569 | \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set |
||
570 | */ |
||
571 | __attribute__((always_inline)) __STATIC_INLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) |
||
572 | { |
||
573 | __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); |
||
574 | } |
||
575 | |||
576 | |||
577 | #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') /* ToDo: ARMCC_V6: check predefined macro for mainline */ |
||
578 | /** |
||
579 | \brief Set Process Stack Pointer (non-secure) |
||
580 | \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. |
||
581 | \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set |
||
582 | */ |
||
583 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) |
||
584 | { |
||
585 | __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); |
||
586 | } |
||
587 | #endif |
||
588 | |||
589 | |||
590 | /** |
||
591 | \brief Get Main Stack Pointer Limit |
||
592 | \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). |
||
593 | \return MSPLIM Register value |
||
594 | */ |
||
595 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_MSPLIM(void) |
||
596 | { |
||
597 | register uint32_t result; |
||
598 | |||
599 | __ASM volatile ("MRS %0, msplim" : "=r" (result) ); |
||
600 | |||
601 | return(result); |
||
602 | } |
||
603 | |||
604 | |||
605 | #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') /* ToDo: ARMCC_V6: check predefined macro for mainline */ |
||
606 | /** |
||
607 | \brief Get Main Stack Pointer Limit (non-secure) |
||
608 | \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. |
||
609 | \return MSPLIM Register value |
||
610 | */ |
||
611 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_MSPLIM_NS(void) |
||
612 | { |
||
613 | register uint32_t result; |
||
614 | |||
615 | __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); |
||
616 | return(result); |
||
617 | } |
||
618 | #endif |
||
619 | |||
620 | |||
621 | /** |
||
622 | \brief Set Main Stack Pointer Limit |
||
623 | \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). |
||
624 | \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set |
||
625 | */ |
||
626 | __attribute__((always_inline)) __STATIC_INLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) |
||
627 | { |
||
628 | __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); |
||
629 | } |
||
630 | |||
631 | |||
632 | #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') /* ToDo: ARMCC_V6: check predefined macro for mainline */ |
||
633 | /** |
||
634 | \brief Set Main Stack Pointer Limit (non-secure) |
||
635 | \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. |
||
636 | \param [in] MainStackPtrLimit Main Stack Pointer value to set |
||
637 | */ |
||
638 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) |
||
639 | { |
||
640 | __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); |
||
641 | } |
||
642 | #endif |
||
643 | |||
644 | #endif /* (__ARM_ARCH_8M__ == 1U) */ |
||
645 | |||
646 | |||
647 | #if ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) /* ToDo: ARMCC_V6: check if this is ok for cortex >=4 */ |
||
648 | |||
649 | /** |
||
650 | \brief Get FPSCR |
||
651 | \details eturns the current value of the Floating Point Status/Control register. |
||
652 | \return Floating Point Status/Control register value |
||
653 | */ |
||
654 | #define __get_FPSCR __builtin_arm_get_fpscr |
||
655 | #if 0 |
||
656 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void) |
||
657 | { |
||
658 | #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) |
||
659 | uint32_t result; |
||
660 | |||
661 | __ASM volatile (""); /* Empty asm statement works as a scheduling barrier */ |
||
662 | __ASM volatile ("VMRS %0, fpscr" : "=r" (result) ); |
||
663 | __ASM volatile (""); |
||
664 | return(result); |
||
665 | #else |
||
666 | return(0); |
||
667 | #endif |
||
668 | } |
||
669 | #endif |
||
670 | |||
671 | #if (__ARM_FEATURE_CMSE == 3U) |
||
672 | /** |
||
673 | \brief Get FPSCR (non-secure) |
||
674 | \details Returns the current value of the non-secure Floating Point Status/Control register when in secure state. |
||
675 | \return Floating Point Status/Control register value |
||
676 | */ |
||
677 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FPSCR_NS(void) |
||
678 | { |
||
679 | #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) |
||
680 | uint32_t result; |
||
681 | |||
682 | __ASM volatile (""); /* Empty asm statement works as a scheduling barrier */ |
||
683 | __ASM volatile ("VMRS %0, fpscr_ns" : "=r" (result) ); |
||
684 | __ASM volatile (""); |
||
685 | return(result); |
||
686 | #else |
||
687 | return(0); |
||
688 | #endif |
||
689 | } |
||
690 | #endif |
||
691 | |||
692 | |||
693 | /** |
||
694 | \brief Set FPSCR |
||
695 | \details Assigns the given value to the Floating Point Status/Control register. |
||
696 | \param [in] fpscr Floating Point Status/Control value to set |
||
697 | */ |
||
698 | #define __set_FPSCR __builtin_arm_set_fpscr |
||
699 | #if 0 |
||
700 | __attribute__((always_inline)) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr) |
||
701 | { |
||
702 | #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) |
||
703 | __ASM volatile (""); /* Empty asm statement works as a scheduling barrier */ |
||
704 | __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc"); |
||
705 | __ASM volatile (""); |
||
706 | #endif |
||
707 | } |
||
708 | #endif |
||
709 | |||
710 | #if (__ARM_FEATURE_CMSE == 3U) |
||
711 | /** |
||
712 | \brief Set FPSCR (non-secure) |
||
713 | \details Assigns the given value to the non-secure Floating Point Status/Control register when in secure state. |
||
714 | \param [in] fpscr Floating Point Status/Control value to set |
||
715 | */ |
||
716 | __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FPSCR_NS(uint32_t fpscr) |
||
717 | { |
||
718 | #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) |
||
719 | __ASM volatile (""); /* Empty asm statement works as a scheduling barrier */ |
||
720 | __ASM volatile ("VMSR fpscr_ns, %0" : : "r" (fpscr) : "vfpcc"); |
||
721 | __ASM volatile (""); |
||
722 | #endif |
||
723 | } |
||
724 | #endif |
||
725 | |||
726 | #endif /* ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */ |
||
727 | |||
728 | |||
729 | |||
730 | /*@} end of CMSIS_Core_RegAccFunctions */ |
||
731 | |||
732 | |||
733 | /* ########################## Core Instruction Access ######################### */ |
||
734 | /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface |
||
735 | Access to dedicated instructions |
||
736 | @{ |
||
737 | */ |
||
738 | |||
739 | /* Define macros for porting to both thumb1 and thumb2. |
||
740 | * For thumb1, use low register (r0-r7), specified by constraint "l" |
||
741 | * Otherwise, use general registers, specified by constraint "r" */ |
||
742 | #if defined (__thumb__) && !defined (__thumb2__) |
||
743 | #define __CMSIS_GCC_OUT_REG(r) "=l" (r) |
||
744 | #define __CMSIS_GCC_USE_REG(r) "l" (r) |
||
745 | #else |
||
746 | #define __CMSIS_GCC_OUT_REG(r) "=r" (r) |
||
747 | #define __CMSIS_GCC_USE_REG(r) "r" (r) |
||
748 | #endif |
||
749 | |||
750 | /** |
||
751 | \brief No Operation |
||
752 | \details No Operation does nothing. This instruction can be used for code alignment purposes. |
||
753 | */ |
||
754 | #define __NOP __builtin_arm_nop |
||
755 | |||
756 | /** |
||
757 | \brief Wait For Interrupt |
||
758 | \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs. |
||
759 | */ |
||
760 | #define __WFI __builtin_arm_wfi |
||
761 | |||
762 | |||
763 | /** |
||
764 | \brief Wait For Event |
||
765 | \details Wait For Event is a hint instruction that permits the processor to enter |
||
766 | a low-power state until one of a number of events occurs. |
||
767 | */ |
||
768 | #define __WFE __builtin_arm_wfe |
||
769 | |||
770 | |||
771 | /** |
||
772 | \brief Send Event |
||
773 | \details Send Event is a hint instruction. It causes an event to be signaled to the CPU. |
||
774 | */ |
||
775 | #define __SEV __builtin_arm_sev |
||
776 | |||
777 | |||
778 | /** |
||
779 | \brief Instruction Synchronization Barrier |
||
780 | \details Instruction Synchronization Barrier flushes the pipeline in the processor, |
||
781 | so that all instructions following the ISB are fetched from cache or memory, |
||
782 | after the instruction has been completed. |
||
783 | */ |
||
784 | #define __ISB() __builtin_arm_isb(0xF); |
||
785 | |||
786 | /** |
||
787 | \brief Data Synchronization Barrier |
||
788 | \details Acts as a special kind of Data Memory Barrier. |
||
789 | It completes when all explicit memory accesses before this instruction complete. |
||
790 | */ |
||
791 | #define __DSB() __builtin_arm_dsb(0xF); |
||
792 | |||
793 | |||
794 | /** |
||
795 | \brief Data Memory Barrier |
||
796 | \details Ensures the apparent order of the explicit memory operations before |
||
797 | and after the instruction, without ensuring their completion. |
||
798 | */ |
||
799 | #define __DMB() __builtin_arm_dmb(0xF); |
||
800 | |||
801 | |||
802 | /** |
||
803 | \brief Reverse byte order (32 bit) |
||
804 | \details Reverses the byte order in integer value. |
||
805 | \param [in] value Value to reverse |
||
806 | \return Reversed value |
||
807 | */ |
||
808 | #define __REV __builtin_bswap32 |
||
809 | |||
810 | |||
811 | /** |
||
812 | \brief Reverse byte order (16 bit) |
||
813 | \details Reverses the byte order in two unsigned short values. |
||
814 | \param [in] value Value to reverse |
||
815 | \return Reversed value |
||
816 | */ |
||
817 | #define __REV16 __builtin_bswap16 /* ToDo: ARMCC_V6: check if __builtin_bswap16 could be used */ |
||
818 | #if 0 |
||
819 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value) |
||
820 | { |
||
821 | uint32_t result; |
||
822 | |||
823 | __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); |
||
824 | return(result); |
||
825 | } |
||
826 | #endif |
||
827 | |||
828 | |||
829 | /** |
||
830 | \brief Reverse byte order in signed short value |
||
831 | \details Reverses the byte order in a signed short value with sign extension to integer. |
||
832 | \param [in] value Value to reverse |
||
833 | \return Reversed value |
||
834 | */ |
||
835 | /* ToDo: ARMCC_V6: check if __builtin_bswap16 could be used */ |
||
836 | __attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value) |
||
837 | { |
||
838 | int32_t result; |
||
839 | |||
840 | __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); |
||
841 | return(result); |
||
842 | } |
||
843 | |||
844 | |||
845 | /** |
||
846 | \brief Rotate Right in unsigned value (32 bit) |
||
847 | \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. |
||
848 | \param [in] op1 Value to rotate |
||
849 | \param [in] op2 Number of Bits to rotate |
||
850 | \return Rotated value |
||
851 | */ |
||
852 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2) |
||
853 | { |
||
854 | return (op1 >> op2) | (op1 << (32U - op2)); |
||
855 | } |
||
856 | |||
857 | |||
858 | /** |
||
859 | \brief Breakpoint |
||
860 | \details Causes the processor to enter Debug state. |
||
861 | Debug tools can use this to investigate system state when the instruction at a particular address is reached. |
||
862 | \param [in] value is ignored by the processor. |
||
863 | If required, a debugger can use it to store additional information about the breakpoint. |
||
864 | */ |
||
865 | #define __BKPT(value) __ASM volatile ("bkpt "#value) |
||
866 | |||
867 | |||
868 | /** |
||
869 | \brief Reverse bit order of value |
||
870 | \details Reverses the bit order of the given value. |
||
871 | \param [in] value Value to reverse |
||
872 | \return Reversed value |
||
873 | */ |
||
874 | /* ToDo: ARMCC_V6: check if __builtin_arm_rbit is supported */ |
||
875 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value) |
||
876 | { |
||
877 | uint32_t result; |
||
878 | |||
879 | #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) /* ToDo: ARMCC_V6: check if this is ok for cortex >=3 */ |
||
880 | __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) ); |
||
881 | #else |
||
882 | int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */ |
||
883 | |||
884 | result = value; /* r will be reversed bits of v; first get LSB of v */ |
||
885 | for (value >>= 1U; value; value >>= 1U) |
||
886 | { |
||
887 | result <<= 1U; |
||
888 | result |= value & 1U; |
||
889 | s--; |
||
890 | } |
||
891 | result <<= s; /* shift when v's highest bits are zero */ |
||
892 | #endif |
||
893 | return(result); |
||
894 | } |
||
895 | |||
896 | |||
897 | /** |
||
898 | \brief Count leading zeros |
||
899 | \details Counts the number of leading zeros of a data value. |
||
900 | \param [in] value Value to count the leading zeros |
||
901 | \return number of leading zeros in value |
||
902 | */ |
||
903 | #define __CLZ __builtin_clz |
||
904 | |||
905 | |||
906 | #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) /* ToDo: ARMCC_V6: check if this is ok for cortex >=3 */ |
||
907 | |||
908 | /** |
||
909 | \brief LDR Exclusive (8 bit) |
||
910 | \details Executes a exclusive LDR instruction for 8 bit value. |
||
911 | \param [in] ptr Pointer to data |
||
912 | \return value of type uint8_t at (*ptr) |
||
913 | */ |
||
914 | #define __LDREXB (uint8_t)__builtin_arm_ldrex |
||
915 | |||
916 | |||
917 | /** |
||
918 | \brief LDR Exclusive (16 bit) |
||
919 | \details Executes a exclusive LDR instruction for 16 bit values. |
||
920 | \param [in] ptr Pointer to data |
||
921 | \return value of type uint16_t at (*ptr) |
||
922 | */ |
||
923 | #define __LDREXH (uint16_t)__builtin_arm_ldrex |
||
924 | |||
925 | |||
926 | /** |
||
927 | \brief LDR Exclusive (32 bit) |
||
928 | \details Executes a exclusive LDR instruction for 32 bit values. |
||
929 | \param [in] ptr Pointer to data |
||
930 | \return value of type uint32_t at (*ptr) |
||
931 | */ |
||
932 | #define __LDREXW (uint32_t)__builtin_arm_ldrex |
||
933 | |||
934 | |||
935 | /** |
||
936 | \brief STR Exclusive (8 bit) |
||
937 | \details Executes a exclusive STR instruction for 8 bit values. |
||
938 | \param [in] value Value to store |
||
939 | \param [in] ptr Pointer to location |
||
940 | \return 0 Function succeeded |
||
941 | \return 1 Function failed |
||
942 | */ |
||
943 | #define __STREXB (uint32_t)__builtin_arm_strex |
||
944 | |||
945 | |||
946 | /** |
||
947 | \brief STR Exclusive (16 bit) |
||
948 | \details Executes a exclusive STR instruction for 16 bit values. |
||
949 | \param [in] value Value to store |
||
950 | \param [in] ptr Pointer to location |
||
951 | \return 0 Function succeeded |
||
952 | \return 1 Function failed |
||
953 | */ |
||
954 | #define __STREXH (uint32_t)__builtin_arm_strex |
||
955 | |||
956 | |||
957 | /** |
||
958 | \brief STR Exclusive (32 bit) |
||
959 | \details Executes a exclusive STR instruction for 32 bit values. |
||
960 | \param [in] value Value to store |
||
961 | \param [in] ptr Pointer to location |
||
962 | \return 0 Function succeeded |
||
963 | \return 1 Function failed |
||
964 | */ |
||
965 | #define __STREXW (uint32_t)__builtin_arm_strex |
||
966 | |||
967 | |||
968 | /** |
||
969 | \brief Remove the exclusive lock |
||
970 | \details Removes the exclusive lock which is created by LDREX. |
||
971 | */ |
||
972 | #define __CLREX __builtin_arm_clrex |
||
973 | |||
974 | |||
975 | /** |
||
976 | \brief Signed Saturate |
||
977 | \details Saturates a signed value. |
||
978 | \param [in] value Value to be saturated |
||
979 | \param [in] sat Bit position to saturate to (1..32) |
||
980 | \return Saturated value |
||
981 | */ |
||
982 | /*#define __SSAT __builtin_arm_ssat*/ |
||
983 | #define __SSAT(ARG1,ARG2) \ |
||
984 | ({ \ |
||
985 | int32_t __RES, __ARG1 = (ARG1); \ |
||
986 | __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ |
||
987 | __RES; \ |
||
988 | }) |
||
989 | |||
990 | |||
991 | /** |
||
992 | \brief Unsigned Saturate |
||
993 | \details Saturates an unsigned value. |
||
994 | \param [in] value Value to be saturated |
||
995 | \param [in] sat Bit position to saturate to (0..31) |
||
996 | \return Saturated value |
||
997 | */ |
||
998 | #define __USAT __builtin_arm_usat |
||
999 | #if 0 |
||
1000 | #define __USAT(ARG1,ARG2) \ |
||
1001 | ({ \ |
||
1002 | uint32_t __RES, __ARG1 = (ARG1); \ |
||
1003 | __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ |
||
1004 | __RES; \ |
||
1005 | }) |
||
1006 | #endif |
||
1007 | |||
1008 | |||
1009 | /** |
||
1010 | \brief Rotate Right with Extend (32 bit) |
||
1011 | \details Moves each bit of a bitstring right by one bit. |
||
1012 | The carry input is shifted in at the left end of the bitstring. |
||
1013 | \param [in] value Value to rotate |
||
1014 | \return Rotated value |
||
1015 | */ |
||
1016 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value) |
||
1017 | { |
||
1018 | uint32_t result; |
||
1019 | |||
1020 | __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); |
||
1021 | return(result); |
||
1022 | } |
||
1023 | |||
1024 | |||
1025 | /** |
||
1026 | \brief LDRT Unprivileged (8 bit) |
||
1027 | \details Executes a Unprivileged LDRT instruction for 8 bit value. |
||
1028 | \param [in] ptr Pointer to data |
||
1029 | \return value of type uint8_t at (*ptr) |
||
1030 | */ |
||
1031 | __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *ptr) |
||
1032 | { |
||
1033 | uint32_t result; |
||
1034 | |||
1035 | __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) ); |
||
1036 | return ((uint8_t) result); /* Add explicit type cast here */ |
||
1037 | } |
||
1038 | |||
1039 | |||
1040 | /** |
||
1041 | \brief LDRT Unprivileged (16 bit) |
||
1042 | \details Executes a Unprivileged LDRT instruction for 16 bit values. |
||
1043 | \param [in] ptr Pointer to data |
||
1044 | \return value of type uint16_t at (*ptr) |
||
1045 | */ |
||
1046 | __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *ptr) |
||
1047 | { |
||
1048 | uint32_t result; |
||
1049 | |||
1050 | __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) ); |
||
1051 | return ((uint16_t) result); /* Add explicit type cast here */ |
||
1052 | } |
||
1053 | |||
1054 | |||
1055 | /** |
||
1056 | \brief LDRT Unprivileged (32 bit) |
||
1057 | \details Executes a Unprivileged LDRT instruction for 32 bit values. |
||
1058 | \param [in] ptr Pointer to data |
||
1059 | \return value of type uint32_t at (*ptr) |
||
1060 | */ |
||
1061 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *ptr) |
||
1062 | { |
||
1063 | uint32_t result; |
||
1064 | |||
1065 | __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) ); |
||
1066 | return(result); |
||
1067 | } |
||
1068 | |||
1069 | |||
1070 | /** |
||
1071 | \brief STRT Unprivileged (8 bit) |
||
1072 | \details Executes a Unprivileged STRT instruction for 8 bit values. |
||
1073 | \param [in] value Value to store |
||
1074 | \param [in] ptr Pointer to location |
||
1075 | */ |
||
1076 | __attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *ptr) |
||
1077 | { |
||
1078 | __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); |
||
1079 | } |
||
1080 | |||
1081 | |||
1082 | /** |
||
1083 | \brief STRT Unprivileged (16 bit) |
||
1084 | \details Executes a Unprivileged STRT instruction for 16 bit values. |
||
1085 | \param [in] value Value to store |
||
1086 | \param [in] ptr Pointer to location |
||
1087 | */ |
||
1088 | __attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *ptr) |
||
1089 | { |
||
1090 | __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); |
||
1091 | } |
||
1092 | |||
1093 | |||
1094 | /** |
||
1095 | \brief STRT Unprivileged (32 bit) |
||
1096 | \details Executes a Unprivileged STRT instruction for 32 bit values. |
||
1097 | \param [in] value Value to store |
||
1098 | \param [in] ptr Pointer to location |
||
1099 | */ |
||
1100 | __attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *ptr) |
||
1101 | { |
||
1102 | __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) ); |
||
1103 | } |
||
1104 | |||
1105 | #endif /* ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */ |
||
1106 | |||
1107 | |||
1108 | #if (__ARM_ARCH_8M__ == 1U) |
||
1109 | |||
1110 | /** |
||
1111 | \brief Load-Acquire (8 bit) |
||
1112 | \details Executes a LDAB instruction for 8 bit value. |
||
1113 | \param [in] ptr Pointer to data |
||
1114 | \return value of type uint8_t at (*ptr) |
||
1115 | */ |
||
1116 | __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDAB(volatile uint8_t *ptr) |
||
1117 | { |
||
1118 | uint32_t result; |
||
1119 | |||
1120 | __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) ); |
||
1121 | return ((uint8_t) result); |
||
1122 | } |
||
1123 | |||
1124 | |||
1125 | /** |
||
1126 | \brief Load-Acquire (16 bit) |
||
1127 | \details Executes a LDAH instruction for 16 bit values. |
||
1128 | \param [in] ptr Pointer to data |
||
1129 | \return value of type uint16_t at (*ptr) |
||
1130 | */ |
||
1131 | __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDAH(volatile uint16_t *ptr) |
||
1132 | { |
||
1133 | uint32_t result; |
||
1134 | |||
1135 | __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) ); |
||
1136 | return ((uint16_t) result); |
||
1137 | } |
||
1138 | |||
1139 | |||
1140 | /** |
||
1141 | \brief Load-Acquire (32 bit) |
||
1142 | \details Executes a LDA instruction for 32 bit values. |
||
1143 | \param [in] ptr Pointer to data |
||
1144 | \return value of type uint32_t at (*ptr) |
||
1145 | */ |
||
1146 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDA(volatile uint32_t *ptr) |
||
1147 | { |
||
1148 | uint32_t result; |
||
1149 | |||
1150 | __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) ); |
||
1151 | return(result); |
||
1152 | } |
||
1153 | |||
1154 | |||
1155 | /** |
||
1156 | \brief Store-Release (8 bit) |
||
1157 | \details Executes a STLB instruction for 8 bit values. |
||
1158 | \param [in] value Value to store |
||
1159 | \param [in] ptr Pointer to location |
||
1160 | */ |
||
1161 | __attribute__((always_inline)) __STATIC_INLINE void __STLB(uint8_t value, volatile uint8_t *ptr) |
||
1162 | { |
||
1163 | __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); |
||
1164 | } |
||
1165 | |||
1166 | |||
1167 | /** |
||
1168 | \brief Store-Release (16 bit) |
||
1169 | \details Executes a STLH instruction for 16 bit values. |
||
1170 | \param [in] value Value to store |
||
1171 | \param [in] ptr Pointer to location |
||
1172 | */ |
||
1173 | __attribute__((always_inline)) __STATIC_INLINE void __STLH(uint16_t value, volatile uint16_t *ptr) |
||
1174 | { |
||
1175 | __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); |
||
1176 | } |
||
1177 | |||
1178 | |||
1179 | /** |
||
1180 | \brief Store-Release (32 bit) |
||
1181 | \details Executes a STL instruction for 32 bit values. |
||
1182 | \param [in] value Value to store |
||
1183 | \param [in] ptr Pointer to location |
||
1184 | */ |
||
1185 | __attribute__((always_inline)) __STATIC_INLINE void __STL(uint32_t value, volatile uint32_t *ptr) |
||
1186 | { |
||
1187 | __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); |
||
1188 | } |
||
1189 | |||
1190 | |||
1191 | /** |
||
1192 | \brief Load-Acquire Exclusive (8 bit) |
||
1193 | \details Executes a LDAB exclusive instruction for 8 bit value. |
||
1194 | \param [in] ptr Pointer to data |
||
1195 | \return value of type uint8_t at (*ptr) |
||
1196 | */ |
||
1197 | #define __LDAEXB (uint8_t)__builtin_arm_ldaex |
||
1198 | |||
1199 | |||
1200 | /** |
||
1201 | \brief Load-Acquire Exclusive (16 bit) |
||
1202 | \details Executes a LDAH exclusive instruction for 16 bit values. |
||
1203 | \param [in] ptr Pointer to data |
||
1204 | \return value of type uint16_t at (*ptr) |
||
1205 | */ |
||
1206 | #define __LDAEXH (uint16_t)__builtin_arm_ldaex |
||
1207 | |||
1208 | |||
1209 | /** |
||
1210 | \brief Load-Acquire Exclusive (32 bit) |
||
1211 | \details Executes a LDA exclusive instruction for 32 bit values. |
||
1212 | \param [in] ptr Pointer to data |
||
1213 | \return value of type uint32_t at (*ptr) |
||
1214 | */ |
||
1215 | #define __LDAEX (uint32_t)__builtin_arm_ldaex |
||
1216 | |||
1217 | |||
1218 | /** |
||
1219 | \brief Store-Release Exclusive (8 bit) |
||
1220 | \details Executes a STLB exclusive instruction for 8 bit values. |
||
1221 | \param [in] value Value to store |
||
1222 | \param [in] ptr Pointer to location |
||
1223 | \return 0 Function succeeded |
||
1224 | \return 1 Function failed |
||
1225 | */ |
||
1226 | #define __STLEXB (uint32_t)__builtin_arm_stlex |
||
1227 | |||
1228 | |||
1229 | /** |
||
1230 | \brief Store-Release Exclusive (16 bit) |
||
1231 | \details Executes a STLH exclusive instruction for 16 bit values. |
||
1232 | \param [in] value Value to store |
||
1233 | \param [in] ptr Pointer to location |
||
1234 | \return 0 Function succeeded |
||
1235 | \return 1 Function failed |
||
1236 | */ |
||
1237 | #define __STLEXH (uint32_t)__builtin_arm_stlex |
||
1238 | |||
1239 | |||
1240 | /** |
||
1241 | \brief Store-Release Exclusive (32 bit) |
||
1242 | \details Executes a STL exclusive instruction for 32 bit values. |
||
1243 | \param [in] value Value to store |
||
1244 | \param [in] ptr Pointer to location |
||
1245 | \return 0 Function succeeded |
||
1246 | \return 1 Function failed |
||
1247 | */ |
||
1248 | #define __STLEX (uint32_t)__builtin_arm_stlex |
||
1249 | |||
1250 | #endif /* (__ARM_ARCH_8M__ == 1U) */ |
||
1251 | |||
1252 | /*@}*/ /* end of group CMSIS_Core_InstructionInterface */ |
||
1253 | |||
1254 | |||
1255 | /* ################### Compiler specific Intrinsics ########################### */ |
||
1256 | /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics |
||
1257 | Access to dedicated SIMD instructions |
||
1258 | @{ |
||
1259 | */ |
||
1260 | |||
1261 | #if (__ARM_FEATURE_DSP == 1U) /* ToDo: ARMCC_V6: This should be ARCH >= ARMv7-M + SIMD */ |
||
1262 | |||
1263 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2) |
||
1264 | { |
||
1265 | uint32_t result; |
||
1266 | |||
1267 | __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1268 | return(result); |
||
1269 | } |
||
1270 | |||
1271 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2) |
||
1272 | { |
||
1273 | uint32_t result; |
||
1274 | |||
1275 | __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1276 | return(result); |
||
1277 | } |
||
1278 | |||
1279 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2) |
||
1280 | { |
||
1281 | uint32_t result; |
||
1282 | |||
1283 | __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1284 | return(result); |
||
1285 | } |
||
1286 | |||
1287 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2) |
||
1288 | { |
||
1289 | uint32_t result; |
||
1290 | |||
1291 | __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1292 | return(result); |
||
1293 | } |
||
1294 | |||
1295 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2) |
||
1296 | { |
||
1297 | uint32_t result; |
||
1298 | |||
1299 | __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1300 | return(result); |
||
1301 | } |
||
1302 | |||
1303 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2) |
||
1304 | { |
||
1305 | uint32_t result; |
||
1306 | |||
1307 | __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1308 | return(result); |
||
1309 | } |
||
1310 | |||
1311 | |||
1312 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2) |
||
1313 | { |
||
1314 | uint32_t result; |
||
1315 | |||
1316 | __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1317 | return(result); |
||
1318 | } |
||
1319 | |||
1320 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2) |
||
1321 | { |
||
1322 | uint32_t result; |
||
1323 | |||
1324 | __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1325 | return(result); |
||
1326 | } |
||
1327 | |||
1328 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2) |
||
1329 | { |
||
1330 | uint32_t result; |
||
1331 | |||
1332 | __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1333 | return(result); |
||
1334 | } |
||
1335 | |||
1336 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2) |
||
1337 | { |
||
1338 | uint32_t result; |
||
1339 | |||
1340 | __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1341 | return(result); |
||
1342 | } |
||
1343 | |||
1344 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2) |
||
1345 | { |
||
1346 | uint32_t result; |
||
1347 | |||
1348 | __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1349 | return(result); |
||
1350 | } |
||
1351 | |||
1352 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2) |
||
1353 | { |
||
1354 | uint32_t result; |
||
1355 | |||
1356 | __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1357 | return(result); |
||
1358 | } |
||
1359 | |||
1360 | |||
1361 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2) |
||
1362 | { |
||
1363 | uint32_t result; |
||
1364 | |||
1365 | __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1366 | return(result); |
||
1367 | } |
||
1368 | |||
1369 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2) |
||
1370 | { |
||
1371 | uint32_t result; |
||
1372 | |||
1373 | __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1374 | return(result); |
||
1375 | } |
||
1376 | |||
1377 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2) |
||
1378 | { |
||
1379 | uint32_t result; |
||
1380 | |||
1381 | __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1382 | return(result); |
||
1383 | } |
||
1384 | |||
1385 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2) |
||
1386 | { |
||
1387 | uint32_t result; |
||
1388 | |||
1389 | __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1390 | return(result); |
||
1391 | } |
||
1392 | |||
1393 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2) |
||
1394 | { |
||
1395 | uint32_t result; |
||
1396 | |||
1397 | __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1398 | return(result); |
||
1399 | } |
||
1400 | |||
1401 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2) |
||
1402 | { |
||
1403 | uint32_t result; |
||
1404 | |||
1405 | __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1406 | return(result); |
||
1407 | } |
||
1408 | |||
1409 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2) |
||
1410 | { |
||
1411 | uint32_t result; |
||
1412 | |||
1413 | __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1414 | return(result); |
||
1415 | } |
||
1416 | |||
1417 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2) |
||
1418 | { |
||
1419 | uint32_t result; |
||
1420 | |||
1421 | __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1422 | return(result); |
||
1423 | } |
||
1424 | |||
1425 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2) |
||
1426 | { |
||
1427 | uint32_t result; |
||
1428 | |||
1429 | __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1430 | return(result); |
||
1431 | } |
||
1432 | |||
1433 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2) |
||
1434 | { |
||
1435 | uint32_t result; |
||
1436 | |||
1437 | __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1438 | return(result); |
||
1439 | } |
||
1440 | |||
1441 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2) |
||
1442 | { |
||
1443 | uint32_t result; |
||
1444 | |||
1445 | __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1446 | return(result); |
||
1447 | } |
||
1448 | |||
1449 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2) |
||
1450 | { |
||
1451 | uint32_t result; |
||
1452 | |||
1453 | __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1454 | return(result); |
||
1455 | } |
||
1456 | |||
1457 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2) |
||
1458 | { |
||
1459 | uint32_t result; |
||
1460 | |||
1461 | __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1462 | return(result); |
||
1463 | } |
||
1464 | |||
1465 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2) |
||
1466 | { |
||
1467 | uint32_t result; |
||
1468 | |||
1469 | __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1470 | return(result); |
||
1471 | } |
||
1472 | |||
1473 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2) |
||
1474 | { |
||
1475 | uint32_t result; |
||
1476 | |||
1477 | __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1478 | return(result); |
||
1479 | } |
||
1480 | |||
1481 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2) |
||
1482 | { |
||
1483 | uint32_t result; |
||
1484 | |||
1485 | __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1486 | return(result); |
||
1487 | } |
||
1488 | |||
1489 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2) |
||
1490 | { |
||
1491 | uint32_t result; |
||
1492 | |||
1493 | __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1494 | return(result); |
||
1495 | } |
||
1496 | |||
1497 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2) |
||
1498 | { |
||
1499 | uint32_t result; |
||
1500 | |||
1501 | __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1502 | return(result); |
||
1503 | } |
||
1504 | |||
1505 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2) |
||
1506 | { |
||
1507 | uint32_t result; |
||
1508 | |||
1509 | __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1510 | return(result); |
||
1511 | } |
||
1512 | |||
1513 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2) |
||
1514 | { |
||
1515 | uint32_t result; |
||
1516 | |||
1517 | __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1518 | return(result); |
||
1519 | } |
||
1520 | |||
1521 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2) |
||
1522 | { |
||
1523 | uint32_t result; |
||
1524 | |||
1525 | __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1526 | return(result); |
||
1527 | } |
||
1528 | |||
1529 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2) |
||
1530 | { |
||
1531 | uint32_t result; |
||
1532 | |||
1533 | __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1534 | return(result); |
||
1535 | } |
||
1536 | |||
1537 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2) |
||
1538 | { |
||
1539 | uint32_t result; |
||
1540 | |||
1541 | __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1542 | return(result); |
||
1543 | } |
||
1544 | |||
1545 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2) |
||
1546 | { |
||
1547 | uint32_t result; |
||
1548 | |||
1549 | __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1550 | return(result); |
||
1551 | } |
||
1552 | |||
1553 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2) |
||
1554 | { |
||
1555 | uint32_t result; |
||
1556 | |||
1557 | __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1558 | return(result); |
||
1559 | } |
||
1560 | |||
1561 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3) |
||
1562 | { |
||
1563 | uint32_t result; |
||
1564 | |||
1565 | __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); |
||
1566 | return(result); |
||
1567 | } |
||
1568 | |||
1569 | #define __SSAT16(ARG1,ARG2) \ |
||
1570 | ({ \ |
||
1571 | uint32_t __RES, __ARG1 = (ARG1); \ |
||
1572 | __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ |
||
1573 | __RES; \ |
||
1574 | }) |
||
1575 | |||
1576 | #define __USAT16(ARG1,ARG2) \ |
||
1577 | ({ \ |
||
1578 | uint32_t __RES, __ARG1 = (ARG1); \ |
||
1579 | __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ |
||
1580 | __RES; \ |
||
1581 | }) |
||
1582 | |||
1583 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1) |
||
1584 | { |
||
1585 | uint32_t result; |
||
1586 | |||
1587 | __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1)); |
||
1588 | return(result); |
||
1589 | } |
||
1590 | |||
1591 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2) |
||
1592 | { |
||
1593 | uint32_t result; |
||
1594 | |||
1595 | __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1596 | return(result); |
||
1597 | } |
||
1598 | |||
1599 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1) |
||
1600 | { |
||
1601 | uint32_t result; |
||
1602 | |||
1603 | __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1)); |
||
1604 | return(result); |
||
1605 | } |
||
1606 | |||
1607 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2) |
||
1608 | { |
||
1609 | uint32_t result; |
||
1610 | |||
1611 | __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1612 | return(result); |
||
1613 | } |
||
1614 | |||
1615 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2) |
||
1616 | { |
||
1617 | uint32_t result; |
||
1618 | |||
1619 | __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1620 | return(result); |
||
1621 | } |
||
1622 | |||
1623 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2) |
||
1624 | { |
||
1625 | uint32_t result; |
||
1626 | |||
1627 | __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1628 | return(result); |
||
1629 | } |
||
1630 | |||
1631 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3) |
||
1632 | { |
||
1633 | uint32_t result; |
||
1634 | |||
1635 | __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); |
||
1636 | return(result); |
||
1637 | } |
||
1638 | |||
1639 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3) |
||
1640 | { |
||
1641 | uint32_t result; |
||
1642 | |||
1643 | __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); |
||
1644 | return(result); |
||
1645 | } |
||
1646 | |||
1647 | __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc) |
||
1648 | { |
||
1649 | union llreg_u{ |
||
1650 | uint32_t w32[2]; |
||
1651 | uint64_t w64; |
||
1652 | } llr; |
||
1653 | llr.w64 = acc; |
||
1654 | |||
1655 | #ifndef __ARMEB__ /* Little endian */ |
||
1656 | __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); |
||
1657 | #else /* Big endian */ |
||
1658 | __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); |
||
1659 | #endif |
||
1660 | |||
1661 | return(llr.w64); |
||
1662 | } |
||
1663 | |||
1664 | __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc) |
||
1665 | { |
||
1666 | union llreg_u{ |
||
1667 | uint32_t w32[2]; |
||
1668 | uint64_t w64; |
||
1669 | } llr; |
||
1670 | llr.w64 = acc; |
||
1671 | |||
1672 | #ifndef __ARMEB__ /* Little endian */ |
||
1673 | __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); |
||
1674 | #else /* Big endian */ |
||
1675 | __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); |
||
1676 | #endif |
||
1677 | |||
1678 | return(llr.w64); |
||
1679 | } |
||
1680 | |||
1681 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2) |
||
1682 | { |
||
1683 | uint32_t result; |
||
1684 | |||
1685 | __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1686 | return(result); |
||
1687 | } |
||
1688 | |||
1689 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2) |
||
1690 | { |
||
1691 | uint32_t result; |
||
1692 | |||
1693 | __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1694 | return(result); |
||
1695 | } |
||
1696 | |||
1697 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3) |
||
1698 | { |
||
1699 | uint32_t result; |
||
1700 | |||
1701 | __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); |
||
1702 | return(result); |
||
1703 | } |
||
1704 | |||
1705 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3) |
||
1706 | { |
||
1707 | uint32_t result; |
||
1708 | |||
1709 | __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); |
||
1710 | return(result); |
||
1711 | } |
||
1712 | |||
1713 | __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc) |
||
1714 | { |
||
1715 | union llreg_u{ |
||
1716 | uint32_t w32[2]; |
||
1717 | uint64_t w64; |
||
1718 | } llr; |
||
1719 | llr.w64 = acc; |
||
1720 | |||
1721 | #ifndef __ARMEB__ /* Little endian */ |
||
1722 | __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); |
||
1723 | #else /* Big endian */ |
||
1724 | __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); |
||
1725 | #endif |
||
1726 | |||
1727 | return(llr.w64); |
||
1728 | } |
||
1729 | |||
1730 | __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc) |
||
1731 | { |
||
1732 | union llreg_u{ |
||
1733 | uint32_t w32[2]; |
||
1734 | uint64_t w64; |
||
1735 | } llr; |
||
1736 | llr.w64 = acc; |
||
1737 | |||
1738 | #ifndef __ARMEB__ /* Little endian */ |
||
1739 | __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); |
||
1740 | #else /* Big endian */ |
||
1741 | __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); |
||
1742 | #endif |
||
1743 | |||
1744 | return(llr.w64); |
||
1745 | } |
||
1746 | |||
1747 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2) |
||
1748 | { |
||
1749 | uint32_t result; |
||
1750 | |||
1751 | __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1752 | return(result); |
||
1753 | } |
||
1754 | |||
1755 | __attribute__((always_inline)) __STATIC_INLINE int32_t __QADD( int32_t op1, int32_t op2) |
||
1756 | { |
||
1757 | int32_t result; |
||
1758 | |||
1759 | __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1760 | return(result); |
||
1761 | } |
||
1762 | |||
1763 | __attribute__((always_inline)) __STATIC_INLINE int32_t __QSUB( int32_t op1, int32_t op2) |
||
1764 | { |
||
1765 | int32_t result; |
||
1766 | |||
1767 | __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); |
||
1768 | return(result); |
||
1769 | } |
||
1770 | |||
1771 | #define __PKHBT(ARG1,ARG2,ARG3) \ |
||
1772 | ({ \ |
||
1773 | uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ |
||
1774 | __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ |
||
1775 | __RES; \ |
||
1776 | }) |
||
1777 | |||
1778 | #define __PKHTB(ARG1,ARG2,ARG3) \ |
||
1779 | ({ \ |
||
1780 | uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ |
||
1781 | if (ARG3 == 0) \ |
||
1782 | __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ |
||
1783 | else \ |
||
1784 | __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ |
||
1785 | __RES; \ |
||
1786 | }) |
||
1787 | |||
1788 | __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3) |
||
1789 | { |
||
1790 | int32_t result; |
||
1791 | |||
1792 | __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); |
||
1793 | return(result); |
||
1794 | } |
||
1795 | |||
1796 | #endif /* (__ARM_FEATURE_DSP == 1U) */ |
||
1797 | /*@} end of group CMSIS_SIMD_intrinsics */ |
||
1798 | |||
1799 | |||
1800 | #endif /* __CMSIS_ARMCC_V6_H */ |