Go to most recent revision | Details | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
2 | mjames | 1 | /**************************************************************************//** |
2 | * @file cmsis_iccarm.h |
||
3 | * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file |
||
4 | * @version V5.0.6 |
||
5 | * @date 02. March 2018 |
||
6 | ******************************************************************************/ |
||
7 | |||
8 | //------------------------------------------------------------------------------ |
||
9 | // |
||
10 | // Copyright (c) 2017-2018 IAR Systems |
||
11 | // |
||
12 | // Licensed under the Apache License, Version 2.0 (the "License") |
||
13 | // you may not use this file except in compliance with the License. |
||
14 | // You may obtain a copy of the License at |
||
15 | // http://www.apache.org/licenses/LICENSE-2.0 |
||
16 | // |
||
17 | // Unless required by applicable law or agreed to in writing, software |
||
18 | // distributed under the License is distributed on an "AS IS" BASIS, |
||
19 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||
20 | // See the License for the specific language governing permissions and |
||
21 | // limitations under the License. |
||
22 | // |
||
23 | //------------------------------------------------------------------------------ |
||
24 | |||
25 | |||
26 | #ifndef __CMSIS_ICCARM_H__ |
||
27 | #define __CMSIS_ICCARM_H__ |
||
28 | |||
29 | #ifndef __ICCARM__ |
||
30 | #error This file should only be compiled by ICCARM |
||
31 | #endif |
||
32 | |||
33 | #pragma system_include |
||
34 | |||
35 | #define __IAR_FT _Pragma("inline=forced") __intrinsic |
||
36 | |||
37 | #if (__VER__ >= 8000000) |
||
38 | #define __ICCARM_V8 1 |
||
39 | #else |
||
40 | #define __ICCARM_V8 0 |
||
41 | #endif |
||
42 | |||
43 | #pragma language=extended |
||
44 | |||
45 | #ifndef __ALIGNED |
||
46 | #if __ICCARM_V8 |
||
47 | #define __ALIGNED(x) __attribute__((aligned(x))) |
||
48 | #elif (__VER__ >= 7080000) |
||
49 | /* Needs IAR language extensions */ |
||
50 | #define __ALIGNED(x) __attribute__((aligned(x))) |
||
51 | #else |
||
52 | #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored. |
||
53 | #define __ALIGNED(x) |
||
54 | #endif |
||
55 | #endif |
||
56 | |||
57 | |||
58 | /* Define compiler macros for CPU architecture, used in CMSIS 5. |
||
59 | */ |
||
60 | #if __ARM_ARCH_7A__ |
||
61 | /* Macro already defined */ |
||
62 | #else |
||
63 | #if defined(__ARM7A__) |
||
64 | #define __ARM_ARCH_7A__ 1 |
||
65 | #endif |
||
66 | #endif |
||
67 | |||
68 | #ifndef __ASM |
||
69 | #define __ASM __asm |
||
70 | #endif |
||
71 | |||
72 | #ifndef __INLINE |
||
73 | #define __INLINE inline |
||
74 | #endif |
||
75 | |||
76 | #ifndef __NO_RETURN |
||
77 | #if __ICCARM_V8 |
||
78 | #define __NO_RETURN __attribute__((__noreturn__)) |
||
79 | #else |
||
80 | #define __NO_RETURN _Pragma("object_attribute=__noreturn") |
||
81 | #endif |
||
82 | #endif |
||
83 | |||
84 | #ifndef __PACKED |
||
85 | /* Needs IAR language extensions */ |
||
86 | #if __ICCARM_V8 |
||
87 | #define __PACKED __attribute__((packed, aligned(1))) |
||
88 | #else |
||
89 | #define __PACKED __packed |
||
90 | #endif |
||
91 | #endif |
||
92 | |||
93 | #ifndef __PACKED_STRUCT |
||
94 | /* Needs IAR language extensions */ |
||
95 | #if __ICCARM_V8 |
||
96 | #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) |
||
97 | #else |
||
98 | #define __PACKED_STRUCT __packed struct |
||
99 | #endif |
||
100 | #endif |
||
101 | |||
102 | #ifndef __PACKED_UNION |
||
103 | /* Needs IAR language extensions */ |
||
104 | #if __ICCARM_V8 |
||
105 | #define __PACKED_UNION union __attribute__((packed, aligned(1))) |
||
106 | #else |
||
107 | #define __PACKED_UNION __packed union |
||
108 | #endif |
||
109 | #endif |
||
110 | |||
111 | #ifndef __RESTRICT |
||
112 | #define __RESTRICT __restrict |
||
113 | #endif |
||
114 | |||
115 | #ifndef __STATIC_INLINE |
||
116 | #define __STATIC_INLINE static inline |
||
117 | #endif |
||
118 | |||
119 | #ifndef __FORCEINLINE |
||
120 | #define __FORCEINLINE _Pragma("inline=forced") |
||
121 | #endif |
||
122 | |||
123 | #ifndef __STATIC_FORCEINLINE |
||
124 | #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE |
||
125 | #endif |
||
126 | |||
127 | #ifndef CMSIS_DEPRECATED |
||
128 | #define CMSIS_DEPRECATED __attribute__((deprecated)) |
||
129 | #endif |
||
130 | |||
131 | #ifndef __UNALIGNED_UINT16_READ |
||
132 | #pragma language=save |
||
133 | #pragma language=extended |
||
134 | __IAR_FT uint16_t __iar_uint16_read(void const *ptr) |
||
135 | { |
||
136 | return *(__packed uint16_t*)(ptr); |
||
137 | } |
||
138 | #pragma language=restore |
||
139 | #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR) |
||
140 | #endif |
||
141 | |||
142 | |||
143 | #ifndef __UNALIGNED_UINT16_WRITE |
||
144 | #pragma language=save |
||
145 | #pragma language=extended |
||
146 | __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val) |
||
147 | { |
||
148 | *(__packed uint16_t*)(ptr) = val;; |
||
149 | } |
||
150 | #pragma language=restore |
||
151 | #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL) |
||
152 | #endif |
||
153 | |||
154 | #ifndef __UNALIGNED_UINT32_READ |
||
155 | #pragma language=save |
||
156 | #pragma language=extended |
||
157 | __IAR_FT uint32_t __iar_uint32_read(void const *ptr) |
||
158 | { |
||
159 | return *(__packed uint32_t*)(ptr); |
||
160 | } |
||
161 | #pragma language=restore |
||
162 | #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR) |
||
163 | #endif |
||
164 | |||
165 | #ifndef __UNALIGNED_UINT32_WRITE |
||
166 | #pragma language=save |
||
167 | #pragma language=extended |
||
168 | __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val) |
||
169 | { |
||
170 | *(__packed uint32_t*)(ptr) = val;; |
||
171 | } |
||
172 | #pragma language=restore |
||
173 | #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL) |
||
174 | #endif |
||
175 | |||
176 | #if 0 |
||
177 | #ifndef __UNALIGNED_UINT32 /* deprecated */ |
||
178 | #pragma language=save |
||
179 | #pragma language=extended |
||
180 | __packed struct __iar_u32 { uint32_t v; }; |
||
181 | #pragma language=restore |
||
182 | #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v) |
||
183 | #endif |
||
184 | #endif |
||
185 | |||
186 | #ifndef __USED |
||
187 | #if __ICCARM_V8 |
||
188 | #define __USED __attribute__((used)) |
||
189 | #else |
||
190 | #define __USED _Pragma("__root") |
||
191 | #endif |
||
192 | #endif |
||
193 | |||
194 | #ifndef __WEAK |
||
195 | #if __ICCARM_V8 |
||
196 | #define __WEAK __attribute__((weak)) |
||
197 | #else |
||
198 | #define __WEAK _Pragma("__weak") |
||
199 | #endif |
||
200 | #endif |
||
201 | |||
202 | |||
203 | #ifndef __ICCARM_INTRINSICS_VERSION__ |
||
204 | #define __ICCARM_INTRINSICS_VERSION__ 0 |
||
205 | #endif |
||
206 | |||
207 | #if __ICCARM_INTRINSICS_VERSION__ == 2 |
||
208 | |||
209 | #if defined(__CLZ) |
||
210 | #undef __CLZ |
||
211 | #endif |
||
212 | #if defined(__REVSH) |
||
213 | #undef __REVSH |
||
214 | #endif |
||
215 | #if defined(__RBIT) |
||
216 | #undef __RBIT |
||
217 | #endif |
||
218 | #if defined(__SSAT) |
||
219 | #undef __SSAT |
||
220 | #endif |
||
221 | #if defined(__USAT) |
||
222 | #undef __USAT |
||
223 | #endif |
||
224 | |||
225 | #include "iccarm_builtin.h" |
||
226 | |||
227 | #define __enable_irq __iar_builtin_enable_interrupt |
||
228 | #define __disable_irq __iar_builtin_disable_interrupt |
||
229 | #define __enable_fault_irq __iar_builtin_enable_fiq |
||
230 | #define __disable_fault_irq __iar_builtin_disable_fiq |
||
231 | #define __arm_rsr __iar_builtin_rsr |
||
232 | #define __arm_wsr __iar_builtin_wsr |
||
233 | |||
234 | #if __FPU_PRESENT |
||
235 | #define __get_FPSCR() (__arm_rsr("FPSCR")) |
||
236 | #else |
||
237 | #define __get_FPSCR() ( 0 ) |
||
238 | #endif |
||
239 | |||
240 | #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", VALUE)) |
||
241 | |||
242 | #define __get_CPSR() (__arm_rsr("CPSR")) |
||
243 | #define __get_mode() (__get_CPSR() & 0x1FU) |
||
244 | |||
245 | #define __set_CPSR(VALUE) (__arm_wsr("CPSR", (VALUE))) |
||
246 | #define __set_mode(VALUE) (__arm_wsr("CPSR_c", (VALUE))) |
||
247 | |||
248 | |||
249 | #define __get_FPEXC() (__arm_rsr("FPEXC")) |
||
250 | #define __set_FPEXC(VALUE) (__arm_wsr("FPEXC", VALUE)) |
||
251 | |||
252 | #define __get_CP(cp, op1, RT, CRn, CRm, op2) \ |
||
253 | ((RT) = __arm_rsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2)) |
||
254 | |||
255 | #define __set_CP(cp, op1, RT, CRn, CRm, op2) \ |
||
256 | (__arm_wsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2, (RT))) |
||
257 | |||
258 | #define __get_CP64(cp, op1, Rt, CRm) \ |
||
259 | __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) |
||
260 | |||
261 | #define __set_CP64(cp, op1, Rt, CRm) \ |
||
262 | __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) |
||
263 | |||
264 | #include "cmsis_cp15.h" |
||
265 | |||
266 | #define __NOP __iar_builtin_no_operation |
||
267 | |||
268 | #define __CLZ __iar_builtin_CLZ |
||
269 | #define __CLREX __iar_builtin_CLREX |
||
270 | |||
271 | #define __DMB __iar_builtin_DMB |
||
272 | #define __DSB __iar_builtin_DSB |
||
273 | #define __ISB __iar_builtin_ISB |
||
274 | |||
275 | #define __LDREXB __iar_builtin_LDREXB |
||
276 | #define __LDREXH __iar_builtin_LDREXH |
||
277 | #define __LDREXW __iar_builtin_LDREX |
||
278 | |||
279 | #define __RBIT __iar_builtin_RBIT |
||
280 | #define __REV __iar_builtin_REV |
||
281 | #define __REV16 __iar_builtin_REV16 |
||
282 | |||
283 | __IAR_FT int16_t __REVSH(int16_t val) |
||
284 | { |
||
285 | return (int16_t) __iar_builtin_REVSH(val); |
||
286 | } |
||
287 | |||
288 | #define __ROR __iar_builtin_ROR |
||
289 | #define __RRX __iar_builtin_RRX |
||
290 | |||
291 | #define __SEV __iar_builtin_SEV |
||
292 | |||
293 | #define __SSAT __iar_builtin_SSAT |
||
294 | |||
295 | #define __STREXB __iar_builtin_STREXB |
||
296 | #define __STREXH __iar_builtin_STREXH |
||
297 | #define __STREXW __iar_builtin_STREX |
||
298 | |||
299 | #define __USAT __iar_builtin_USAT |
||
300 | |||
301 | #define __WFE __iar_builtin_WFE |
||
302 | #define __WFI __iar_builtin_WFI |
||
303 | |||
304 | #define __SADD8 __iar_builtin_SADD8 |
||
305 | #define __QADD8 __iar_builtin_QADD8 |
||
306 | #define __SHADD8 __iar_builtin_SHADD8 |
||
307 | #define __UADD8 __iar_builtin_UADD8 |
||
308 | #define __UQADD8 __iar_builtin_UQADD8 |
||
309 | #define __UHADD8 __iar_builtin_UHADD8 |
||
310 | #define __SSUB8 __iar_builtin_SSUB8 |
||
311 | #define __QSUB8 __iar_builtin_QSUB8 |
||
312 | #define __SHSUB8 __iar_builtin_SHSUB8 |
||
313 | #define __USUB8 __iar_builtin_USUB8 |
||
314 | #define __UQSUB8 __iar_builtin_UQSUB8 |
||
315 | #define __UHSUB8 __iar_builtin_UHSUB8 |
||
316 | #define __SADD16 __iar_builtin_SADD16 |
||
317 | #define __QADD16 __iar_builtin_QADD16 |
||
318 | #define __SHADD16 __iar_builtin_SHADD16 |
||
319 | #define __UADD16 __iar_builtin_UADD16 |
||
320 | #define __UQADD16 __iar_builtin_UQADD16 |
||
321 | #define __UHADD16 __iar_builtin_UHADD16 |
||
322 | #define __SSUB16 __iar_builtin_SSUB16 |
||
323 | #define __QSUB16 __iar_builtin_QSUB16 |
||
324 | #define __SHSUB16 __iar_builtin_SHSUB16 |
||
325 | #define __USUB16 __iar_builtin_USUB16 |
||
326 | #define __UQSUB16 __iar_builtin_UQSUB16 |
||
327 | #define __UHSUB16 __iar_builtin_UHSUB16 |
||
328 | #define __SASX __iar_builtin_SASX |
||
329 | #define __QASX __iar_builtin_QASX |
||
330 | #define __SHASX __iar_builtin_SHASX |
||
331 | #define __UASX __iar_builtin_UASX |
||
332 | #define __UQASX __iar_builtin_UQASX |
||
333 | #define __UHASX __iar_builtin_UHASX |
||
334 | #define __SSAX __iar_builtin_SSAX |
||
335 | #define __QSAX __iar_builtin_QSAX |
||
336 | #define __SHSAX __iar_builtin_SHSAX |
||
337 | #define __USAX __iar_builtin_USAX |
||
338 | #define __UQSAX __iar_builtin_UQSAX |
||
339 | #define __UHSAX __iar_builtin_UHSAX |
||
340 | #define __USAD8 __iar_builtin_USAD8 |
||
341 | #define __USADA8 __iar_builtin_USADA8 |
||
342 | #define __SSAT16 __iar_builtin_SSAT16 |
||
343 | #define __USAT16 __iar_builtin_USAT16 |
||
344 | #define __UXTB16 __iar_builtin_UXTB16 |
||
345 | #define __UXTAB16 __iar_builtin_UXTAB16 |
||
346 | #define __SXTB16 __iar_builtin_SXTB16 |
||
347 | #define __SXTAB16 __iar_builtin_SXTAB16 |
||
348 | #define __SMUAD __iar_builtin_SMUAD |
||
349 | #define __SMUADX __iar_builtin_SMUADX |
||
350 | #define __SMMLA __iar_builtin_SMMLA |
||
351 | #define __SMLAD __iar_builtin_SMLAD |
||
352 | #define __SMLADX __iar_builtin_SMLADX |
||
353 | #define __SMLALD __iar_builtin_SMLALD |
||
354 | #define __SMLALDX __iar_builtin_SMLALDX |
||
355 | #define __SMUSD __iar_builtin_SMUSD |
||
356 | #define __SMUSDX __iar_builtin_SMUSDX |
||
357 | #define __SMLSD __iar_builtin_SMLSD |
||
358 | #define __SMLSDX __iar_builtin_SMLSDX |
||
359 | #define __SMLSLD __iar_builtin_SMLSLD |
||
360 | #define __SMLSLDX __iar_builtin_SMLSLDX |
||
361 | #define __SEL __iar_builtin_SEL |
||
362 | #define __QADD __iar_builtin_QADD |
||
363 | #define __QSUB __iar_builtin_QSUB |
||
364 | #define __PKHBT __iar_builtin_PKHBT |
||
365 | #define __PKHTB __iar_builtin_PKHTB |
||
366 | |||
367 | #else /* __ICCARM_INTRINSICS_VERSION__ == 2 */ |
||
368 | |||
369 | #if !__FPU_PRESENT |
||
370 | #define __get_FPSCR __cmsis_iar_get_FPSR_not_active |
||
371 | #endif |
||
372 | |||
373 | #ifdef __INTRINSICS_INCLUDED |
||
374 | #error intrinsics.h is already included previously! |
||
375 | #endif |
||
376 | |||
377 | #include <intrinsics.h> |
||
378 | |||
379 | #if !__FPU_PRESENT |
||
380 | #define __get_FPSCR() (0) |
||
381 | #endif |
||
382 | |||
383 | #pragma diag_suppress=Pe940 |
||
384 | #pragma diag_suppress=Pe177 |
||
385 | |||
386 | #define __enable_irq __enable_interrupt |
||
387 | #define __disable_irq __disable_interrupt |
||
388 | #define __enable_fault_irq __enable_fiq |
||
389 | #define __disable_fault_irq __disable_fiq |
||
390 | #define __NOP __no_operation |
||
391 | |||
392 | #define __get_xPSR __get_PSR |
||
393 | |||
394 | __IAR_FT void __set_mode(uint32_t mode) |
||
395 | { |
||
396 | __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); |
||
397 | } |
||
398 | |||
399 | __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr) |
||
400 | { |
||
401 | return __LDREX((unsigned long *)ptr); |
||
402 | } |
||
403 | |||
404 | __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr) |
||
405 | { |
||
406 | return __STREX(value, (unsigned long *)ptr); |
||
407 | } |
||
408 | |||
409 | |||
410 | __IAR_FT uint32_t __RRX(uint32_t value) |
||
411 | { |
||
412 | uint32_t result; |
||
413 | __ASM("RRX %0, %1" : "=r"(result) : "r" (value) : "cc"); |
||
414 | return(result); |
||
415 | } |
||
416 | |||
417 | |||
418 | __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2) |
||
419 | { |
||
420 | return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2)); |
||
421 | } |
||
422 | |||
423 | __IAR_FT uint32_t __get_FPEXC(void) |
||
424 | { |
||
425 | #if (__FPU_PRESENT == 1) |
||
426 | uint32_t result; |
||
427 | __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); |
||
428 | return(result); |
||
429 | #else |
||
430 | return(0); |
||
431 | #endif |
||
432 | } |
||
433 | |||
434 | __IAR_FT void __set_FPEXC(uint32_t fpexc) |
||
435 | { |
||
436 | #if (__FPU_PRESENT == 1) |
||
437 | __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); |
||
438 | #endif |
||
439 | } |
||
440 | |||
441 | |||
442 | #define __get_CP(cp, op1, Rt, CRn, CRm, op2) \ |
||
443 | __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) |
||
444 | #define __set_CP(cp, op1, Rt, CRn, CRm, op2) \ |
||
445 | __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) |
||
446 | #define __get_CP64(cp, op1, Rt, CRm) \ |
||
447 | __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) |
||
448 | #define __set_CP64(cp, op1, Rt, CRm) \ |
||
449 | __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) |
||
450 | |||
451 | #include "cmsis_cp15.h" |
||
452 | |||
453 | #endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */ |
||
454 | |||
455 | #define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value)) |
||
456 | |||
457 | |||
458 | __IAR_FT uint32_t __get_SP_usr(void) |
||
459 | { |
||
460 | uint32_t cpsr; |
||
461 | uint32_t result; |
||
462 | __ASM volatile( |
||
463 | "MRS %0, cpsr \n" |
||
464 | "CPS #0x1F \n" // no effect in USR mode |
||
465 | "MOV %1, sp \n" |
||
466 | "MSR cpsr_c, %2 \n" // no effect in USR mode |
||
467 | "ISB" : "=r"(cpsr), "=r"(result) : "r"(cpsr) : "memory" |
||
468 | ); |
||
469 | return result; |
||
470 | } |
||
471 | |||
472 | __IAR_FT void __set_SP_usr(uint32_t topOfProcStack) |
||
473 | { |
||
474 | uint32_t cpsr; |
||
475 | __ASM volatile( |
||
476 | "MRS %0, cpsr \n" |
||
477 | "CPS #0x1F \n" // no effect in USR mode |
||
478 | "MOV sp, %1 \n" |
||
479 | "MSR cpsr_c, %2 \n" // no effect in USR mode |
||
480 | "ISB" : "=r"(cpsr) : "r" (topOfProcStack), "r"(cpsr) : "memory" |
||
481 | ); |
||
482 | } |
||
483 | |||
484 | #define __get_mode() (__get_CPSR() & 0x1FU) |
||
485 | |||
486 | __STATIC_INLINE |
||
487 | void __FPU_Enable(void) |
||
488 | { |
||
489 | __ASM volatile( |
||
490 | //Permit access to VFP/NEON, registers by modifying CPACR |
||
491 | " MRC p15,0,R1,c1,c0,2 \n" |
||
492 | " ORR R1,R1,#0x00F00000 \n" |
||
493 | " MCR p15,0,R1,c1,c0,2 \n" |
||
494 | |||
495 | //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted |
||
496 | " ISB \n" |
||
497 | |||
498 | //Enable VFP/NEON |
||
499 | " VMRS R1,FPEXC \n" |
||
500 | " ORR R1,R1,#0x40000000 \n" |
||
501 | " VMSR FPEXC,R1 \n" |
||
502 | |||
503 | //Initialise VFP/NEON registers to 0 |
||
504 | " MOV R2,#0 \n" |
||
505 | |||
506 | //Initialise D16 registers to 0 |
||
507 | " VMOV D0, R2,R2 \n" |
||
508 | " VMOV D1, R2,R2 \n" |
||
509 | " VMOV D2, R2,R2 \n" |
||
510 | " VMOV D3, R2,R2 \n" |
||
511 | " VMOV D4, R2,R2 \n" |
||
512 | " VMOV D5, R2,R2 \n" |
||
513 | " VMOV D6, R2,R2 \n" |
||
514 | " VMOV D7, R2,R2 \n" |
||
515 | " VMOV D8, R2,R2 \n" |
||
516 | " VMOV D9, R2,R2 \n" |
||
517 | " VMOV D10,R2,R2 \n" |
||
518 | " VMOV D11,R2,R2 \n" |
||
519 | " VMOV D12,R2,R2 \n" |
||
520 | " VMOV D13,R2,R2 \n" |
||
521 | " VMOV D14,R2,R2 \n" |
||
522 | " VMOV D15,R2,R2 \n" |
||
523 | |||
524 | #ifdef __ARM_ADVANCED_SIMD__ |
||
525 | //Initialise D32 registers to 0 |
||
526 | " VMOV D16,R2,R2 \n" |
||
527 | " VMOV D17,R2,R2 \n" |
||
528 | " VMOV D18,R2,R2 \n" |
||
529 | " VMOV D19,R2,R2 \n" |
||
530 | " VMOV D20,R2,R2 \n" |
||
531 | " VMOV D21,R2,R2 \n" |
||
532 | " VMOV D22,R2,R2 \n" |
||
533 | " VMOV D23,R2,R2 \n" |
||
534 | " VMOV D24,R2,R2 \n" |
||
535 | " VMOV D25,R2,R2 \n" |
||
536 | " VMOV D26,R2,R2 \n" |
||
537 | " VMOV D27,R2,R2 \n" |
||
538 | " VMOV D28,R2,R2 \n" |
||
539 | " VMOV D29,R2,R2 \n" |
||
540 | " VMOV D30,R2,R2 \n" |
||
541 | " VMOV D31,R2,R2 \n" |
||
542 | #endif |
||
543 | |||
544 | //Initialise FPSCR to a known state |
||
545 | " VMRS R2,FPSCR \n" |
||
546 | " MOV32 R3,#0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero. |
||
547 | " AND R2,R2,R3 \n" |
||
548 | " VMSR FPSCR,R2 \n"); |
||
549 | } |
||
550 | |||
551 | |||
552 | |||
553 | #undef __IAR_FT |
||
554 | #undef __ICCARM_V8 |
||
555 | |||
556 | #pragma diag_default=Pe940 |
||
557 | #pragma diag_default=Pe177 |
||
558 | |||
559 | #endif /* __CMSIS_ICCARM_H__ */ |