/* * Copyright (c) 2009-2024 Arm Limited. All rights reserved. * * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the License); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef __CMSIS_GCC_A_H #define __CMSIS_GCC_A_H #ifndef __CMSIS_GCC_H #error "This file must not be included directly" #endif /* ignore some GCC warnings */ #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wsign-conversion" #pragma GCC diagnostic ignored "-Wconversion" #pragma GCC diagnostic ignored "-Wunused-parameter" /** \defgroup CMSIS_Core_intrinsics CMSIS Core Intrinsics Access to dedicated SIMD instructions @{ */ /** \brief Get CPSR Register \return CPSR Register value */ __STATIC_FORCEINLINE uint32_t __get_CPSR(void) { uint32_t result; __ASM volatile("MRS %0, cpsr" : "=r" (result) ); return(result); } /** \brief Set CPSR Register \param [in] cpsr CPSR value to set */ __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) { __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); } /** \brief Get Mode \return Processor Mode */ __STATIC_FORCEINLINE uint32_t __get_mode(void) { return (__get_CPSR() & 0x1FU); } /** \brief Set Mode \param [in] mode Mode value to set */ __STATIC_FORCEINLINE void __set_mode(uint32_t mode) { __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); } /** \brief Get Stack Pointer \return Stack Pointer value */ __STATIC_FORCEINLINE uint32_t __get_SP(void) { uint32_t result; __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); return result; } /** \brief Set Stack Pointer \param [in] stack Stack Pointer value to set */ __STATIC_FORCEINLINE void __set_SP(uint32_t stack) { __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); } /** \brief Get USR/SYS Stack Pointer \return USR/SYS Stack Pointer value */ __STATIC_FORCEINLINE uint32_t __get_SP_usr(void) { uint32_t cpsr = __get_CPSR(); uint32_t result; __ASM volatile( "CPS #0x1F \n" "MOV %0, sp " : "=r"(result) : : "memory" ); __set_CPSR(cpsr); __ISB(); return result; } /** \brief Set USR/SYS Stack Pointer \param [in] topOfProcStack USR/SYS Stack Pointer value to set */ __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) { uint32_t cpsr = __get_CPSR(); __ASM volatile( "CPS #0x1F \n" "MOV sp, %0 " : : "r" (topOfProcStack) : "memory" ); __set_CPSR(cpsr); __ISB(); } /** \brief Get FPEXC \return Floating Point Exception Control register value */ __STATIC_FORCEINLINE uint32_t __get_FPEXC(void) { #if (__FPU_PRESENT == 1) uint32_t result; __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); return(result); #else return(0); #endif } /** \brief Set FPEXC \param [in] fpexc Floating Point Exception Control value to set */ __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) { #if (__FPU_PRESENT == 1) __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); #endif } /* * Include common core functions to access Coprocessor 15 registers */ #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) #include "cmsis_cp15.h" /** \brief Enable Floating Point Unit Critical section, called from undef handler, so systick is disabled */ __STATIC_INLINE void __FPU_Enable(void) { // Permit access to VFP/NEON, registers by modifying CPACR const uint32_t cpacr = __get_CPACR(); __set_CPACR(cpacr | 0x00F00000ul); __ISB(); // Enable VFP/NEON const uint32_t fpexc = __get_FPEXC(); __set_FPEXC(fpexc | 0x40000000ul); __ASM volatile( // Initialise VFP/NEON registers to 0 " MOV R2,#0 \n" // Initialise D16 registers to 0 " VMOV D0, R2,R2 \n" " VMOV D1, R2,R2 \n" " VMOV D2, R2,R2 \n" " VMOV D3, R2,R2 \n" " VMOV D4, R2,R2 \n" " VMOV D5, R2,R2 \n" " VMOV D6, R2,R2 \n" " VMOV D7, R2,R2 \n" " VMOV D8, R2,R2 \n" " VMOV D9, R2,R2 \n" " VMOV D10,R2,R2 \n" " VMOV D11,R2,R2 \n" " VMOV D12,R2,R2 \n" " VMOV D13,R2,R2 \n" " VMOV D14,R2,R2 \n" " VMOV D15,R2,R2 \n" #if (defined(__ARM_NEON) && (__ARM_NEON == 1)) // Initialise D32 registers to 0 " VMOV D16,R2,R2 \n" " VMOV D17,R2,R2 \n" " VMOV D18,R2,R2 \n" " VMOV D19,R2,R2 \n" " VMOV D20,R2,R2 \n" " VMOV D21,R2,R2 \n" " VMOV D22,R2,R2 \n" " VMOV D23,R2,R2 \n" " VMOV D24,R2,R2 \n" " VMOV D25,R2,R2 \n" " VMOV D26,R2,R2 \n" " VMOV D27,R2,R2 \n" " VMOV D28,R2,R2 \n" " VMOV D29,R2,R2 \n" " VMOV D30,R2,R2 \n" " VMOV D31,R2,R2 \n" #endif : : : "cc", "r2" ); // Initialise FPSCR to a known state const uint32_t fpscr = __get_FPSCR(); __set_FPSCR(fpscr & 0x00086060ul); } /*@} end of group CMSIS_Core_intrinsics */ #pragma GCC diagnostic pop #endif /* __CMSIS_GCC_A_H */