Details | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
2 | mjames | 1 | /**************************************************************************//** |
2 | * @file cmsis_iccarm.h |
||
3 | * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file |
||
4 | * @version V5.0.7 |
||
5 | * @date 19. June 2018 |
||
6 | ******************************************************************************/ |
||
7 | |||
8 | //------------------------------------------------------------------------------ |
||
9 | // |
||
10 | // Copyright (c) 2017-2018 IAR Systems |
||
11 | // |
||
12 | // Licensed under the Apache License, Version 2.0 (the "License") |
||
13 | // you may not use this file except in compliance with the License. |
||
14 | // You may obtain a copy of the License at |
||
15 | // http://www.apache.org/licenses/LICENSE-2.0 |
||
16 | // |
||
17 | // Unless required by applicable law or agreed to in writing, software |
||
18 | // distributed under the License is distributed on an "AS IS" BASIS, |
||
19 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||
20 | // See the License for the specific language governing permissions and |
||
21 | // limitations under the License. |
||
22 | // |
||
23 | //------------------------------------------------------------------------------ |
||
24 | |||
25 | |||
26 | #ifndef __CMSIS_ICCARM_H__ |
||
27 | #define __CMSIS_ICCARM_H__ |
||
28 | |||
29 | #ifndef __ICCARM__ |
||
30 | #error This file should only be compiled by ICCARM |
||
31 | #endif |
||
32 | |||
33 | #pragma system_include |
||
34 | |||
35 | #define __IAR_FT _Pragma("inline=forced") __intrinsic |
||
36 | |||
37 | #if (__VER__ >= 8000000) |
||
38 | #define __ICCARM_V8 1 |
||
39 | #else |
||
40 | #define __ICCARM_V8 0 |
||
41 | #endif |
||
42 | |||
43 | #ifndef __ALIGNED |
||
44 | #if __ICCARM_V8 |
||
45 | #define __ALIGNED(x) __attribute__((aligned(x))) |
||
46 | #elif (__VER__ >= 7080000) |
||
47 | /* Needs IAR language extensions */ |
||
48 | #define __ALIGNED(x) __attribute__((aligned(x))) |
||
49 | #else |
||
50 | #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored. |
||
51 | #define __ALIGNED(x) |
||
52 | #endif |
||
53 | #endif |
||
54 | |||
55 | |||
56 | /* Define compiler macros for CPU architecture, used in CMSIS 5. |
||
57 | */ |
||
58 | #if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__ |
||
59 | /* Macros already defined */ |
||
60 | #else |
||
61 | #if defined(__ARM8M_MAINLINE__) || defined(__ARM8EM_MAINLINE__) |
||
62 | #define __ARM_ARCH_8M_MAIN__ 1 |
||
63 | #elif defined(__ARM8M_BASELINE__) |
||
64 | #define __ARM_ARCH_8M_BASE__ 1 |
||
65 | #elif defined(__ARM_ARCH_PROFILE) && __ARM_ARCH_PROFILE == 'M' |
||
66 | #if __ARM_ARCH == 6 |
||
67 | #define __ARM_ARCH_6M__ 1 |
||
68 | #elif __ARM_ARCH == 7 |
||
69 | #if __ARM_FEATURE_DSP |
||
70 | #define __ARM_ARCH_7EM__ 1 |
||
71 | #else |
||
72 | #define __ARM_ARCH_7M__ 1 |
||
73 | #endif |
||
74 | #endif /* __ARM_ARCH */ |
||
75 | #endif /* __ARM_ARCH_PROFILE == 'M' */ |
||
76 | #endif |
||
77 | |||
78 | /* Alternativ core deduction for older ICCARM's */ |
||
79 | #if !defined(__ARM_ARCH_6M__) && !defined(__ARM_ARCH_7M__) && !defined(__ARM_ARCH_7EM__) && \ |
||
80 | !defined(__ARM_ARCH_8M_BASE__) && !defined(__ARM_ARCH_8M_MAIN__) |
||
81 | #if defined(__ARM6M__) && (__CORE__ == __ARM6M__) |
||
82 | #define __ARM_ARCH_6M__ 1 |
||
83 | #elif defined(__ARM7M__) && (__CORE__ == __ARM7M__) |
||
84 | #define __ARM_ARCH_7M__ 1 |
||
85 | #elif defined(__ARM7EM__) && (__CORE__ == __ARM7EM__) |
||
86 | #define __ARM_ARCH_7EM__ 1 |
||
87 | #elif defined(__ARM8M_BASELINE__) && (__CORE == __ARM8M_BASELINE__) |
||
88 | #define __ARM_ARCH_8M_BASE__ 1 |
||
89 | #elif defined(__ARM8M_MAINLINE__) && (__CORE == __ARM8M_MAINLINE__) |
||
90 | #define __ARM_ARCH_8M_MAIN__ 1 |
||
91 | #elif defined(__ARM8EM_MAINLINE__) && (__CORE == __ARM8EM_MAINLINE__) |
||
92 | #define __ARM_ARCH_8M_MAIN__ 1 |
||
93 | #else |
||
94 | #error "Unknown target." |
||
95 | #endif |
||
96 | #endif |
||
97 | |||
98 | |||
99 | |||
100 | #if defined(__ARM_ARCH_6M__) && __ARM_ARCH_6M__==1 |
||
101 | #define __IAR_M0_FAMILY 1 |
||
102 | #elif defined(__ARM_ARCH_8M_BASE__) && __ARM_ARCH_8M_BASE__==1 |
||
103 | #define __IAR_M0_FAMILY 1 |
||
104 | #else |
||
105 | #define __IAR_M0_FAMILY 0 |
||
106 | #endif |
||
107 | |||
108 | |||
109 | #ifndef __ASM |
||
110 | #define __ASM __asm |
||
111 | #endif |
||
112 | |||
113 | #ifndef __INLINE |
||
114 | #define __INLINE inline |
||
115 | #endif |
||
116 | |||
117 | #ifndef __NO_RETURN |
||
118 | #if __ICCARM_V8 |
||
119 | #define __NO_RETURN __attribute__((__noreturn__)) |
||
120 | #else |
||
121 | #define __NO_RETURN _Pragma("object_attribute=__noreturn") |
||
122 | #endif |
||
123 | #endif |
||
124 | |||
125 | #ifndef __PACKED |
||
126 | #if __ICCARM_V8 |
||
127 | #define __PACKED __attribute__((packed, aligned(1))) |
||
128 | #else |
||
129 | /* Needs IAR language extensions */ |
||
130 | #define __PACKED __packed |
||
131 | #endif |
||
132 | #endif |
||
133 | |||
134 | #ifndef __PACKED_STRUCT |
||
135 | #if __ICCARM_V8 |
||
136 | #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) |
||
137 | #else |
||
138 | /* Needs IAR language extensions */ |
||
139 | #define __PACKED_STRUCT __packed struct |
||
140 | #endif |
||
141 | #endif |
||
142 | |||
143 | #ifndef __PACKED_UNION |
||
144 | #if __ICCARM_V8 |
||
145 | #define __PACKED_UNION union __attribute__((packed, aligned(1))) |
||
146 | #else |
||
147 | /* Needs IAR language extensions */ |
||
148 | #define __PACKED_UNION __packed union |
||
149 | #endif |
||
150 | #endif |
||
151 | |||
152 | #ifndef __RESTRICT |
||
153 | #define __RESTRICT __restrict |
||
154 | #endif |
||
155 | |||
156 | #ifndef __STATIC_INLINE |
||
157 | #define __STATIC_INLINE static inline |
||
158 | #endif |
||
159 | |||
160 | #ifndef __FORCEINLINE |
||
161 | #define __FORCEINLINE _Pragma("inline=forced") |
||
162 | #endif |
||
163 | |||
164 | #ifndef __STATIC_FORCEINLINE |
||
165 | #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE |
||
166 | #endif |
||
167 | |||
168 | #ifndef __UNALIGNED_UINT16_READ |
||
169 | #pragma language=save |
||
170 | #pragma language=extended |
||
171 | __IAR_FT uint16_t __iar_uint16_read(void const *ptr) |
||
172 | { |
||
173 | return *(__packed uint16_t*)(ptr); |
||
174 | } |
||
175 | #pragma language=restore |
||
176 | #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR) |
||
177 | #endif |
||
178 | |||
179 | |||
180 | #ifndef __UNALIGNED_UINT16_WRITE |
||
181 | #pragma language=save |
||
182 | #pragma language=extended |
||
183 | __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val) |
||
184 | { |
||
185 | *(__packed uint16_t*)(ptr) = val;; |
||
186 | } |
||
187 | #pragma language=restore |
||
188 | #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL) |
||
189 | #endif |
||
190 | |||
191 | #ifndef __UNALIGNED_UINT32_READ |
||
192 | #pragma language=save |
||
193 | #pragma language=extended |
||
194 | __IAR_FT uint32_t __iar_uint32_read(void const *ptr) |
||
195 | { |
||
196 | return *(__packed uint32_t*)(ptr); |
||
197 | } |
||
198 | #pragma language=restore |
||
199 | #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR) |
||
200 | #endif |
||
201 | |||
202 | #ifndef __UNALIGNED_UINT32_WRITE |
||
203 | #pragma language=save |
||
204 | #pragma language=extended |
||
205 | __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val) |
||
206 | { |
||
207 | *(__packed uint32_t*)(ptr) = val;; |
||
208 | } |
||
209 | #pragma language=restore |
||
210 | #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL) |
||
211 | #endif |
||
212 | |||
213 | #ifndef __UNALIGNED_UINT32 /* deprecated */ |
||
214 | #pragma language=save |
||
215 | #pragma language=extended |
||
216 | __packed struct __iar_u32 { uint32_t v; }; |
||
217 | #pragma language=restore |
||
218 | #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v) |
||
219 | #endif |
||
220 | |||
221 | #ifndef __USED |
||
222 | #if __ICCARM_V8 |
||
223 | #define __USED __attribute__((used)) |
||
224 | #else |
||
225 | #define __USED _Pragma("__root") |
||
226 | #endif |
||
227 | #endif |
||
228 | |||
229 | #ifndef __WEAK |
||
230 | #if __ICCARM_V8 |
||
231 | #define __WEAK __attribute__((weak)) |
||
232 | #else |
||
233 | #define __WEAK _Pragma("__weak") |
||
234 | #endif |
||
235 | #endif |
||
236 | |||
237 | |||
238 | #ifndef __ICCARM_INTRINSICS_VERSION__ |
||
239 | #define __ICCARM_INTRINSICS_VERSION__ 0 |
||
240 | #endif |
||
241 | |||
242 | #if __ICCARM_INTRINSICS_VERSION__ == 2 |
||
243 | |||
244 | #if defined(__CLZ) |
||
245 | #undef __CLZ |
||
246 | #endif |
||
247 | #if defined(__REVSH) |
||
248 | #undef __REVSH |
||
249 | #endif |
||
250 | #if defined(__RBIT) |
||
251 | #undef __RBIT |
||
252 | #endif |
||
253 | #if defined(__SSAT) |
||
254 | #undef __SSAT |
||
255 | #endif |
||
256 | #if defined(__USAT) |
||
257 | #undef __USAT |
||
258 | #endif |
||
259 | |||
260 | #include "iccarm_builtin.h" |
||
261 | |||
262 | #define __disable_fault_irq __iar_builtin_disable_fiq |
||
263 | #define __disable_irq __iar_builtin_disable_interrupt |
||
264 | #define __enable_fault_irq __iar_builtin_enable_fiq |
||
265 | #define __enable_irq __iar_builtin_enable_interrupt |
||
266 | #define __arm_rsr __iar_builtin_rsr |
||
267 | #define __arm_wsr __iar_builtin_wsr |
||
268 | |||
269 | |||
270 | #define __get_APSR() (__arm_rsr("APSR")) |
||
271 | #define __get_BASEPRI() (__arm_rsr("BASEPRI")) |
||
272 | #define __get_CONTROL() (__arm_rsr("CONTROL")) |
||
273 | #define __get_FAULTMASK() (__arm_rsr("FAULTMASK")) |
||
274 | |||
275 | #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ |
||
276 | (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) |
||
277 | #define __get_FPSCR() (__arm_rsr("FPSCR")) |
||
278 | #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE))) |
||
279 | #else |
||
280 | #define __get_FPSCR() ( 0 ) |
||
281 | #define __set_FPSCR(VALUE) ((void)VALUE) |
||
282 | #endif |
||
283 | |||
284 | #define __get_IPSR() (__arm_rsr("IPSR")) |
||
285 | #define __get_MSP() (__arm_rsr("MSP")) |
||
286 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
287 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) |
||
288 | // without main extensions, the non-secure MSPLIM is RAZ/WI |
||
289 | #define __get_MSPLIM() (0U) |
||
290 | #else |
||
291 | #define __get_MSPLIM() (__arm_rsr("MSPLIM")) |
||
292 | #endif |
||
293 | #define __get_PRIMASK() (__arm_rsr("PRIMASK")) |
||
294 | #define __get_PSP() (__arm_rsr("PSP")) |
||
295 | |||
296 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
297 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) |
||
298 | // without main extensions, the non-secure PSPLIM is RAZ/WI |
||
299 | #define __get_PSPLIM() (0U) |
||
300 | #else |
||
301 | #define __get_PSPLIM() (__arm_rsr("PSPLIM")) |
||
302 | #endif |
||
303 | |||
304 | #define __get_xPSR() (__arm_rsr("xPSR")) |
||
305 | |||
306 | #define __set_BASEPRI(VALUE) (__arm_wsr("BASEPRI", (VALUE))) |
||
307 | #define __set_BASEPRI_MAX(VALUE) (__arm_wsr("BASEPRI_MAX", (VALUE))) |
||
308 | #define __set_CONTROL(VALUE) (__arm_wsr("CONTROL", (VALUE))) |
||
309 | #define __set_FAULTMASK(VALUE) (__arm_wsr("FAULTMASK", (VALUE))) |
||
310 | #define __set_MSP(VALUE) (__arm_wsr("MSP", (VALUE))) |
||
311 | |||
312 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
313 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) |
||
314 | // without main extensions, the non-secure MSPLIM is RAZ/WI |
||
315 | #define __set_MSPLIM(VALUE) ((void)(VALUE)) |
||
316 | #else |
||
317 | #define __set_MSPLIM(VALUE) (__arm_wsr("MSPLIM", (VALUE))) |
||
318 | #endif |
||
319 | #define __set_PRIMASK(VALUE) (__arm_wsr("PRIMASK", (VALUE))) |
||
320 | #define __set_PSP(VALUE) (__arm_wsr("PSP", (VALUE))) |
||
321 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
322 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) |
||
323 | // without main extensions, the non-secure PSPLIM is RAZ/WI |
||
324 | #define __set_PSPLIM(VALUE) ((void)(VALUE)) |
||
325 | #else |
||
326 | #define __set_PSPLIM(VALUE) (__arm_wsr("PSPLIM", (VALUE))) |
||
327 | #endif |
||
328 | |||
329 | #define __TZ_get_CONTROL_NS() (__arm_rsr("CONTROL_NS")) |
||
330 | #define __TZ_set_CONTROL_NS(VALUE) (__arm_wsr("CONTROL_NS", (VALUE))) |
||
331 | #define __TZ_get_PSP_NS() (__arm_rsr("PSP_NS")) |
||
332 | #define __TZ_set_PSP_NS(VALUE) (__arm_wsr("PSP_NS", (VALUE))) |
||
333 | #define __TZ_get_MSP_NS() (__arm_rsr("MSP_NS")) |
||
334 | #define __TZ_set_MSP_NS(VALUE) (__arm_wsr("MSP_NS", (VALUE))) |
||
335 | #define __TZ_get_SP_NS() (__arm_rsr("SP_NS")) |
||
336 | #define __TZ_set_SP_NS(VALUE) (__arm_wsr("SP_NS", (VALUE))) |
||
337 | #define __TZ_get_PRIMASK_NS() (__arm_rsr("PRIMASK_NS")) |
||
338 | #define __TZ_set_PRIMASK_NS(VALUE) (__arm_wsr("PRIMASK_NS", (VALUE))) |
||
339 | #define __TZ_get_BASEPRI_NS() (__arm_rsr("BASEPRI_NS")) |
||
340 | #define __TZ_set_BASEPRI_NS(VALUE) (__arm_wsr("BASEPRI_NS", (VALUE))) |
||
341 | #define __TZ_get_FAULTMASK_NS() (__arm_rsr("FAULTMASK_NS")) |
||
342 | #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE))) |
||
343 | |||
344 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
345 | (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) |
||
346 | // without main extensions, the non-secure PSPLIM is RAZ/WI |
||
347 | #define __TZ_get_PSPLIM_NS() (0U) |
||
348 | #define __TZ_set_PSPLIM_NS(VALUE) ((void)(VALUE)) |
||
349 | #else |
||
350 | #define __TZ_get_PSPLIM_NS() (__arm_rsr("PSPLIM_NS")) |
||
351 | #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE))) |
||
352 | #endif |
||
353 | |||
354 | #define __TZ_get_MSPLIM_NS() (__arm_rsr("MSPLIM_NS")) |
||
355 | #define __TZ_set_MSPLIM_NS(VALUE) (__arm_wsr("MSPLIM_NS", (VALUE))) |
||
356 | |||
357 | #define __NOP __iar_builtin_no_operation |
||
358 | |||
359 | #define __CLZ __iar_builtin_CLZ |
||
360 | #define __CLREX __iar_builtin_CLREX |
||
361 | |||
362 | #define __DMB __iar_builtin_DMB |
||
363 | #define __DSB __iar_builtin_DSB |
||
364 | #define __ISB __iar_builtin_ISB |
||
365 | |||
366 | #define __LDREXB __iar_builtin_LDREXB |
||
367 | #define __LDREXH __iar_builtin_LDREXH |
||
368 | #define __LDREXW __iar_builtin_LDREX |
||
369 | |||
370 | #define __RBIT __iar_builtin_RBIT |
||
371 | #define __REV __iar_builtin_REV |
||
372 | #define __REV16 __iar_builtin_REV16 |
||
373 | |||
374 | __IAR_FT int16_t __REVSH(int16_t val) |
||
375 | { |
||
376 | return (int16_t) __iar_builtin_REVSH(val); |
||
377 | } |
||
378 | |||
379 | #define __ROR __iar_builtin_ROR |
||
380 | #define __RRX __iar_builtin_RRX |
||
381 | |||
382 | #define __SEV __iar_builtin_SEV |
||
383 | |||
384 | #if !__IAR_M0_FAMILY |
||
385 | #define __SSAT __iar_builtin_SSAT |
||
386 | #endif |
||
387 | |||
388 | #define __STREXB __iar_builtin_STREXB |
||
389 | #define __STREXH __iar_builtin_STREXH |
||
390 | #define __STREXW __iar_builtin_STREX |
||
391 | |||
392 | #if !__IAR_M0_FAMILY |
||
393 | #define __USAT __iar_builtin_USAT |
||
394 | #endif |
||
395 | |||
396 | #define __WFE __iar_builtin_WFE |
||
397 | #define __WFI __iar_builtin_WFI |
||
398 | |||
399 | #if __ARM_MEDIA__ |
||
400 | #define __SADD8 __iar_builtin_SADD8 |
||
401 | #define __QADD8 __iar_builtin_QADD8 |
||
402 | #define __SHADD8 __iar_builtin_SHADD8 |
||
403 | #define __UADD8 __iar_builtin_UADD8 |
||
404 | #define __UQADD8 __iar_builtin_UQADD8 |
||
405 | #define __UHADD8 __iar_builtin_UHADD8 |
||
406 | #define __SSUB8 __iar_builtin_SSUB8 |
||
407 | #define __QSUB8 __iar_builtin_QSUB8 |
||
408 | #define __SHSUB8 __iar_builtin_SHSUB8 |
||
409 | #define __USUB8 __iar_builtin_USUB8 |
||
410 | #define __UQSUB8 __iar_builtin_UQSUB8 |
||
411 | #define __UHSUB8 __iar_builtin_UHSUB8 |
||
412 | #define __SADD16 __iar_builtin_SADD16 |
||
413 | #define __QADD16 __iar_builtin_QADD16 |
||
414 | #define __SHADD16 __iar_builtin_SHADD16 |
||
415 | #define __UADD16 __iar_builtin_UADD16 |
||
416 | #define __UQADD16 __iar_builtin_UQADD16 |
||
417 | #define __UHADD16 __iar_builtin_UHADD16 |
||
418 | #define __SSUB16 __iar_builtin_SSUB16 |
||
419 | #define __QSUB16 __iar_builtin_QSUB16 |
||
420 | #define __SHSUB16 __iar_builtin_SHSUB16 |
||
421 | #define __USUB16 __iar_builtin_USUB16 |
||
422 | #define __UQSUB16 __iar_builtin_UQSUB16 |
||
423 | #define __UHSUB16 __iar_builtin_UHSUB16 |
||
424 | #define __SASX __iar_builtin_SASX |
||
425 | #define __QASX __iar_builtin_QASX |
||
426 | #define __SHASX __iar_builtin_SHASX |
||
427 | #define __UASX __iar_builtin_UASX |
||
428 | #define __UQASX __iar_builtin_UQASX |
||
429 | #define __UHASX __iar_builtin_UHASX |
||
430 | #define __SSAX __iar_builtin_SSAX |
||
431 | #define __QSAX __iar_builtin_QSAX |
||
432 | #define __SHSAX __iar_builtin_SHSAX |
||
433 | #define __USAX __iar_builtin_USAX |
||
434 | #define __UQSAX __iar_builtin_UQSAX |
||
435 | #define __UHSAX __iar_builtin_UHSAX |
||
436 | #define __USAD8 __iar_builtin_USAD8 |
||
437 | #define __USADA8 __iar_builtin_USADA8 |
||
438 | #define __SSAT16 __iar_builtin_SSAT16 |
||
439 | #define __USAT16 __iar_builtin_USAT16 |
||
440 | #define __UXTB16 __iar_builtin_UXTB16 |
||
441 | #define __UXTAB16 __iar_builtin_UXTAB16 |
||
442 | #define __SXTB16 __iar_builtin_SXTB16 |
||
443 | #define __SXTAB16 __iar_builtin_SXTAB16 |
||
444 | #define __SMUAD __iar_builtin_SMUAD |
||
445 | #define __SMUADX __iar_builtin_SMUADX |
||
446 | #define __SMMLA __iar_builtin_SMMLA |
||
447 | #define __SMLAD __iar_builtin_SMLAD |
||
448 | #define __SMLADX __iar_builtin_SMLADX |
||
449 | #define __SMLALD __iar_builtin_SMLALD |
||
450 | #define __SMLALDX __iar_builtin_SMLALDX |
||
451 | #define __SMUSD __iar_builtin_SMUSD |
||
452 | #define __SMUSDX __iar_builtin_SMUSDX |
||
453 | #define __SMLSD __iar_builtin_SMLSD |
||
454 | #define __SMLSDX __iar_builtin_SMLSDX |
||
455 | #define __SMLSLD __iar_builtin_SMLSLD |
||
456 | #define __SMLSLDX __iar_builtin_SMLSLDX |
||
457 | #define __SEL __iar_builtin_SEL |
||
458 | #define __QADD __iar_builtin_QADD |
||
459 | #define __QSUB __iar_builtin_QSUB |
||
460 | #define __PKHBT __iar_builtin_PKHBT |
||
461 | #define __PKHTB __iar_builtin_PKHTB |
||
462 | #endif |
||
463 | |||
464 | #else /* __ICCARM_INTRINSICS_VERSION__ == 2 */ |
||
465 | |||
466 | #if __IAR_M0_FAMILY |
||
467 | /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */ |
||
468 | #define __CLZ __cmsis_iar_clz_not_active |
||
469 | #define __SSAT __cmsis_iar_ssat_not_active |
||
470 | #define __USAT __cmsis_iar_usat_not_active |
||
471 | #define __RBIT __cmsis_iar_rbit_not_active |
||
472 | #define __get_APSR __cmsis_iar_get_APSR_not_active |
||
473 | #endif |
||
474 | |||
475 | |||
476 | #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ |
||
477 | (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) |
||
478 | #define __get_FPSCR __cmsis_iar_get_FPSR_not_active |
||
479 | #define __set_FPSCR __cmsis_iar_set_FPSR_not_active |
||
480 | #endif |
||
481 | |||
482 | #ifdef __INTRINSICS_INCLUDED |
||
483 | #error intrinsics.h is already included previously! |
||
484 | #endif |
||
485 | |||
486 | #include <intrinsics.h> |
||
487 | |||
488 | #if __IAR_M0_FAMILY |
||
489 | /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */ |
||
490 | #undef __CLZ |
||
491 | #undef __SSAT |
||
492 | #undef __USAT |
||
493 | #undef __RBIT |
||
494 | #undef __get_APSR |
||
495 | |||
496 | __STATIC_INLINE uint8_t __CLZ(uint32_t data) |
||
497 | { |
||
498 | if (data == 0U) { return 32U; } |
||
499 | |||
500 | uint32_t count = 0U; |
||
501 | uint32_t mask = 0x80000000U; |
||
502 | |||
503 | while ((data & mask) == 0U) |
||
504 | { |
||
505 | count += 1U; |
||
506 | mask = mask >> 1U; |
||
507 | } |
||
508 | return count; |
||
509 | } |
||
510 | |||
511 | __STATIC_INLINE uint32_t __RBIT(uint32_t v) |
||
512 | { |
||
513 | uint8_t sc = 31U; |
||
514 | uint32_t r = v; |
||
515 | for (v >>= 1U; v; v >>= 1U) |
||
516 | { |
||
517 | r <<= 1U; |
||
518 | r |= v & 1U; |
||
519 | sc--; |
||
520 | } |
||
521 | return (r << sc); |
||
522 | } |
||
523 | |||
524 | __STATIC_INLINE uint32_t __get_APSR(void) |
||
525 | { |
||
526 | uint32_t res; |
||
527 | __asm("MRS %0,APSR" : "=r" (res)); |
||
528 | return res; |
||
529 | } |
||
530 | |||
531 | #endif |
||
532 | |||
533 | #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ |
||
534 | (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) |
||
535 | #undef __get_FPSCR |
||
536 | #undef __set_FPSCR |
||
537 | #define __get_FPSCR() (0) |
||
538 | #define __set_FPSCR(VALUE) ((void)VALUE) |
||
539 | #endif |
||
540 | |||
541 | #pragma diag_suppress=Pe940 |
||
542 | #pragma diag_suppress=Pe177 |
||
543 | |||
544 | #define __enable_irq __enable_interrupt |
||
545 | #define __disable_irq __disable_interrupt |
||
546 | #define __NOP __no_operation |
||
547 | |||
548 | #define __get_xPSR __get_PSR |
||
549 | |||
550 | #if (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0) |
||
551 | |||
552 | __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr) |
||
553 | { |
||
554 | return __LDREX((unsigned long *)ptr); |
||
555 | } |
||
556 | |||
557 | __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr) |
||
558 | { |
||
559 | return __STREX(value, (unsigned long *)ptr); |
||
560 | } |
||
561 | #endif |
||
562 | |||
563 | |||
564 | /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ |
||
565 | #if (__CORTEX_M >= 0x03) |
||
566 | |||
567 | __IAR_FT uint32_t __RRX(uint32_t value) |
||
568 | { |
||
569 | uint32_t result; |
||
570 | __ASM("RRX %0, %1" : "=r"(result) : "r" (value) : "cc"); |
||
571 | return(result); |
||
572 | } |
||
573 | |||
574 | __IAR_FT void __set_BASEPRI_MAX(uint32_t value) |
||
575 | { |
||
576 | __asm volatile("MSR BASEPRI_MAX,%0"::"r" (value)); |
||
577 | } |
||
578 | |||
579 | |||
580 | #define __enable_fault_irq __enable_fiq |
||
581 | #define __disable_fault_irq __disable_fiq |
||
582 | |||
583 | |||
584 | #endif /* (__CORTEX_M >= 0x03) */ |
||
585 | |||
586 | __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2) |
||
587 | { |
||
588 | return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2)); |
||
589 | } |
||
590 | |||
591 | #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ |
||
592 | (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) |
||
593 | |||
594 | __IAR_FT uint32_t __get_MSPLIM(void) |
||
595 | { |
||
596 | uint32_t res; |
||
597 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
598 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) |
||
599 | // without main extensions, the non-secure MSPLIM is RAZ/WI |
||
600 | res = 0U; |
||
601 | #else |
||
602 | __asm volatile("MRS %0,MSPLIM" : "=r" (res)); |
||
603 | #endif |
||
604 | return res; |
||
605 | } |
||
606 | |||
607 | __IAR_FT void __set_MSPLIM(uint32_t value) |
||
608 | { |
||
609 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
610 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) |
||
611 | // without main extensions, the non-secure MSPLIM is RAZ/WI |
||
612 | (void)value; |
||
613 | #else |
||
614 | __asm volatile("MSR MSPLIM,%0" :: "r" (value)); |
||
615 | #endif |
||
616 | } |
||
617 | |||
618 | __IAR_FT uint32_t __get_PSPLIM(void) |
||
619 | { |
||
620 | uint32_t res; |
||
621 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
622 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) |
||
623 | // without main extensions, the non-secure PSPLIM is RAZ/WI |
||
624 | res = 0U; |
||
625 | #else |
||
626 | __asm volatile("MRS %0,PSPLIM" : "=r" (res)); |
||
627 | #endif |
||
628 | return res; |
||
629 | } |
||
630 | |||
631 | __IAR_FT void __set_PSPLIM(uint32_t value) |
||
632 | { |
||
633 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
634 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) |
||
635 | // without main extensions, the non-secure PSPLIM is RAZ/WI |
||
636 | (void)value; |
||
637 | #else |
||
638 | __asm volatile("MSR PSPLIM,%0" :: "r" (value)); |
||
639 | #endif |
||
640 | } |
||
641 | |||
642 | __IAR_FT uint32_t __TZ_get_CONTROL_NS(void) |
||
643 | { |
||
644 | uint32_t res; |
||
645 | __asm volatile("MRS %0,CONTROL_NS" : "=r" (res)); |
||
646 | return res; |
||
647 | } |
||
648 | |||
649 | __IAR_FT void __TZ_set_CONTROL_NS(uint32_t value) |
||
650 | { |
||
651 | __asm volatile("MSR CONTROL_NS,%0" :: "r" (value)); |
||
652 | } |
||
653 | |||
654 | __IAR_FT uint32_t __TZ_get_PSP_NS(void) |
||
655 | { |
||
656 | uint32_t res; |
||
657 | __asm volatile("MRS %0,PSP_NS" : "=r" (res)); |
||
658 | return res; |
||
659 | } |
||
660 | |||
661 | __IAR_FT void __TZ_set_PSP_NS(uint32_t value) |
||
662 | { |
||
663 | __asm volatile("MSR PSP_NS,%0" :: "r" (value)); |
||
664 | } |
||
665 | |||
666 | __IAR_FT uint32_t __TZ_get_MSP_NS(void) |
||
667 | { |
||
668 | uint32_t res; |
||
669 | __asm volatile("MRS %0,MSP_NS" : "=r" (res)); |
||
670 | return res; |
||
671 | } |
||
672 | |||
673 | __IAR_FT void __TZ_set_MSP_NS(uint32_t value) |
||
674 | { |
||
675 | __asm volatile("MSR MSP_NS,%0" :: "r" (value)); |
||
676 | } |
||
677 | |||
678 | __IAR_FT uint32_t __TZ_get_SP_NS(void) |
||
679 | { |
||
680 | uint32_t res; |
||
681 | __asm volatile("MRS %0,SP_NS" : "=r" (res)); |
||
682 | return res; |
||
683 | } |
||
684 | __IAR_FT void __TZ_set_SP_NS(uint32_t value) |
||
685 | { |
||
686 | __asm volatile("MSR SP_NS,%0" :: "r" (value)); |
||
687 | } |
||
688 | |||
689 | __IAR_FT uint32_t __TZ_get_PRIMASK_NS(void) |
||
690 | { |
||
691 | uint32_t res; |
||
692 | __asm volatile("MRS %0,PRIMASK_NS" : "=r" (res)); |
||
693 | return res; |
||
694 | } |
||
695 | |||
696 | __IAR_FT void __TZ_set_PRIMASK_NS(uint32_t value) |
||
697 | { |
||
698 | __asm volatile("MSR PRIMASK_NS,%0" :: "r" (value)); |
||
699 | } |
||
700 | |||
701 | __IAR_FT uint32_t __TZ_get_BASEPRI_NS(void) |
||
702 | { |
||
703 | uint32_t res; |
||
704 | __asm volatile("MRS %0,BASEPRI_NS" : "=r" (res)); |
||
705 | return res; |
||
706 | } |
||
707 | |||
708 | __IAR_FT void __TZ_set_BASEPRI_NS(uint32_t value) |
||
709 | { |
||
710 | __asm volatile("MSR BASEPRI_NS,%0" :: "r" (value)); |
||
711 | } |
||
712 | |||
713 | __IAR_FT uint32_t __TZ_get_FAULTMASK_NS(void) |
||
714 | { |
||
715 | uint32_t res; |
||
716 | __asm volatile("MRS %0,FAULTMASK_NS" : "=r" (res)); |
||
717 | return res; |
||
718 | } |
||
719 | |||
720 | __IAR_FT void __TZ_set_FAULTMASK_NS(uint32_t value) |
||
721 | { |
||
722 | __asm volatile("MSR FAULTMASK_NS,%0" :: "r" (value)); |
||
723 | } |
||
724 | |||
725 | __IAR_FT uint32_t __TZ_get_PSPLIM_NS(void) |
||
726 | { |
||
727 | uint32_t res; |
||
728 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
729 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) |
||
730 | // without main extensions, the non-secure PSPLIM is RAZ/WI |
||
731 | res = 0U; |
||
732 | #else |
||
733 | __asm volatile("MRS %0,PSPLIM_NS" : "=r" (res)); |
||
734 | #endif |
||
735 | return res; |
||
736 | } |
||
737 | |||
738 | __IAR_FT void __TZ_set_PSPLIM_NS(uint32_t value) |
||
739 | { |
||
740 | #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ |
||
741 | (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) |
||
742 | // without main extensions, the non-secure PSPLIM is RAZ/WI |
||
743 | (void)value; |
||
744 | #else |
||
745 | __asm volatile("MSR PSPLIM_NS,%0" :: "r" (value)); |
||
746 | #endif |
||
747 | } |
||
748 | |||
749 | __IAR_FT uint32_t __TZ_get_MSPLIM_NS(void) |
||
750 | { |
||
751 | uint32_t res; |
||
752 | __asm volatile("MRS %0,MSPLIM_NS" : "=r" (res)); |
||
753 | return res; |
||
754 | } |
||
755 | |||
756 | __IAR_FT void __TZ_set_MSPLIM_NS(uint32_t value) |
||
757 | { |
||
758 | __asm volatile("MSR MSPLIM_NS,%0" :: "r" (value)); |
||
759 | } |
||
760 | |||
761 | #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */ |
||
762 | |||
763 | #endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */ |
||
764 | |||
765 | #define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value)) |
||
766 | |||
767 | #if __IAR_M0_FAMILY |
||
768 | __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat) |
||
769 | { |
||
770 | if ((sat >= 1U) && (sat <= 32U)) |
||
771 | { |
||
772 | const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U); |
||
773 | const int32_t min = -1 - max ; |
||
774 | if (val > max) |
||
775 | { |
||
776 | return max; |
||
777 | } |
||
778 | else if (val < min) |
||
779 | { |
||
780 | return min; |
||
781 | } |
||
782 | } |
||
783 | return val; |
||
784 | } |
||
785 | |||
786 | __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat) |
||
787 | { |
||
788 | if (sat <= 31U) |
||
789 | { |
||
790 | const uint32_t max = ((1U << sat) - 1U); |
||
791 | if (val > (int32_t)max) |
||
792 | { |
||
793 | return max; |
||
794 | } |
||
795 | else if (val < 0) |
||
796 | { |
||
797 | return 0U; |
||
798 | } |
||
799 | } |
||
800 | return (uint32_t)val; |
||
801 | } |
||
802 | #endif |
||
803 | |||
804 | #if (__CORTEX_M >= 0x03) /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ |
||
805 | |||
806 | __IAR_FT uint8_t __LDRBT(volatile uint8_t *addr) |
||
807 | { |
||
808 | uint32_t res; |
||
809 | __ASM("LDRBT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); |
||
810 | return ((uint8_t)res); |
||
811 | } |
||
812 | |||
813 | __IAR_FT uint16_t __LDRHT(volatile uint16_t *addr) |
||
814 | { |
||
815 | uint32_t res; |
||
816 | __ASM("LDRHT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); |
||
817 | return ((uint16_t)res); |
||
818 | } |
||
819 | |||
820 | __IAR_FT uint32_t __LDRT(volatile uint32_t *addr) |
||
821 | { |
||
822 | uint32_t res; |
||
823 | __ASM("LDRT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); |
||
824 | return res; |
||
825 | } |
||
826 | |||
827 | __IAR_FT void __STRBT(uint8_t value, volatile uint8_t *addr) |
||
828 | { |
||
829 | __ASM("STRBT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory"); |
||
830 | } |
||
831 | |||
832 | __IAR_FT void __STRHT(uint16_t value, volatile uint16_t *addr) |
||
833 | { |
||
834 | __ASM("STRHT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory"); |
||
835 | } |
||
836 | |||
837 | __IAR_FT void __STRT(uint32_t value, volatile uint32_t *addr) |
||
838 | { |
||
839 | __ASM("STRT %1, [%0]" : : "r" (addr), "r" (value) : "memory"); |
||
840 | } |
||
841 | |||
842 | #endif /* (__CORTEX_M >= 0x03) */ |
||
843 | |||
844 | #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ |
||
845 | (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) |
||
846 | |||
847 | |||
848 | __IAR_FT uint8_t __LDAB(volatile uint8_t *ptr) |
||
849 | { |
||
850 | uint32_t res; |
||
851 | __ASM volatile ("LDAB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); |
||
852 | return ((uint8_t)res); |
||
853 | } |
||
854 | |||
855 | __IAR_FT uint16_t __LDAH(volatile uint16_t *ptr) |
||
856 | { |
||
857 | uint32_t res; |
||
858 | __ASM volatile ("LDAH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); |
||
859 | return ((uint16_t)res); |
||
860 | } |
||
861 | |||
862 | __IAR_FT uint32_t __LDA(volatile uint32_t *ptr) |
||
863 | { |
||
864 | uint32_t res; |
||
865 | __ASM volatile ("LDA %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); |
||
866 | return res; |
||
867 | } |
||
868 | |||
869 | __IAR_FT void __STLB(uint8_t value, volatile uint8_t *ptr) |
||
870 | { |
||
871 | __ASM volatile ("STLB %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); |
||
872 | } |
||
873 | |||
874 | __IAR_FT void __STLH(uint16_t value, volatile uint16_t *ptr) |
||
875 | { |
||
876 | __ASM volatile ("STLH %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); |
||
877 | } |
||
878 | |||
879 | __IAR_FT void __STL(uint32_t value, volatile uint32_t *ptr) |
||
880 | { |
||
881 | __ASM volatile ("STL %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); |
||
882 | } |
||
883 | |||
884 | __IAR_FT uint8_t __LDAEXB(volatile uint8_t *ptr) |
||
885 | { |
||
886 | uint32_t res; |
||
887 | __ASM volatile ("LDAEXB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); |
||
888 | return ((uint8_t)res); |
||
889 | } |
||
890 | |||
891 | __IAR_FT uint16_t __LDAEXH(volatile uint16_t *ptr) |
||
892 | { |
||
893 | uint32_t res; |
||
894 | __ASM volatile ("LDAEXH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); |
||
895 | return ((uint16_t)res); |
||
896 | } |
||
897 | |||
898 | __IAR_FT uint32_t __LDAEX(volatile uint32_t *ptr) |
||
899 | { |
||
900 | uint32_t res; |
||
901 | __ASM volatile ("LDAEX %0, [%1]" : "=r" (res) : "r" (ptr) : "memory"); |
||
902 | return res; |
||
903 | } |
||
904 | |||
905 | __IAR_FT uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr) |
||
906 | { |
||
907 | uint32_t res; |
||
908 | __ASM volatile ("STLEXB %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory"); |
||
909 | return res; |
||
910 | } |
||
911 | |||
912 | __IAR_FT uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr) |
||
913 | { |
||
914 | uint32_t res; |
||
915 | __ASM volatile ("STLEXH %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory"); |
||
916 | return res; |
||
917 | } |
||
918 | |||
919 | __IAR_FT uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr) |
||
920 | { |
||
921 | uint32_t res; |
||
922 | __ASM volatile ("STLEX %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory"); |
||
923 | return res; |
||
924 | } |
||
925 | |||
926 | #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */ |
||
927 | |||
928 | #undef __IAR_FT |
||
929 | #undef __IAR_M0_FAMILY |
||
930 | #undef __ICCARM_V8 |
||
931 | |||
932 | #pragma diag_default=Pe940 |
||
933 | #pragma diag_default=Pe177 |
||
934 | |||
935 | #endif /* __CMSIS_ICCARM_H__ */ |